hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
sequencelengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
sequencelengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
sequencelengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
958k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
2 classes
is_sharp_comment_removed
bool
1 class
f70001f658d4dfaa72dd4f0d1b3176492f6658bb
6,442
py
Python
spider/openwrt.py
CNDB/CNDB
2e3a41111f604cf2f4f22a7c9370bb3f753e3e88
[ "BSD-3-Clause" ]
null
null
null
spider/openwrt.py
CNDB/CNDB
2e3a41111f604cf2f4f22a7c9370bb3f753e3e88
[ "BSD-3-Clause" ]
null
null
null
spider/openwrt.py
CNDB/CNDB
2e3a41111f604cf2f4f22a7c9370bb3f753e3e88
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- # #*** <License> ************************************************************# # This module is part of the repository CNDB. # # This module is licensed under the terms of the BSD 3-Clause License # <http://www.c-tanzer.at/license/bsd_3c.html>. # #*** </License> ***********************************************************# from _TFL.pyk import pyk from rsclib.HTML_Parse import tag, Page_Tree from rsclib.autosuper import autosuper from spider.common import Interface, Inet4, Inet6, unroutable from spider.common import WLAN_Config from spider.luci import Version_Mixin class Status (Page_Tree, Version_Mixin) : url = 'cgi-bin/luci/freifunk/status/status' retries = 2 timeout = 10 html_charset = 'utf-8' # force utf-8 encoding wl_names = dict \ ( ssid = 'ssid' , _bsiid = 'bssid' , channel = 'channel' , mode = 'mode' ) def parse (self) : root = self.tree.getroot () self.wlans = [] self.routes = {} for div in root.findall (".//%s" % tag ("div")) : id = div.get ('id') if id == 'cbi-wireless' : wlan_div = div elif id == 'cbi-routes' : route_div = div self.try_get_version (div) for d in self.tbl_iter (wlan_div) : for k, newkey in pyk.iteritems (self.wl_names) : if k in d : d [newkey] = d [k] wl = WLAN_Config (** d) self.wlans.append (wl) for d in self.tbl_iter (route_div) : iface = d.get ('iface') gw = d.get ('gateway') if iface and gw : self.routes [iface] = gw self.set_version (root) # end def parse def tbl_iter (self, div) : tbl = div.find (".//%s" % tag ("table")) assert tbl.get ('class') == 'cbi-section-table' d = {} for tr in tbl : if 'cbi-section-table-row' not in tr.get ('class').split () : continue for input in tr.findall (".//%s" % tag ('input')) : name = input.get ('id').split ('.') [-1] val = input.get ('value') d [name] = val if not d : continue yield d # end def tbl_iter # end class Status class Table_Iter (Page_Tree) : def table_iter (self) : root = self.tree.getroot () for div in root.findall (".//%s" % tag ("div")) : if div.get ('id') == 'maincontent' : break tbl = div.find (".//%s" % tag ("table")) if tbl is None : return for tr in tbl : if tr [0].tag == tag ('th') : continue yield (self.tree.get_text (x) for x in tr) # end def table_iter # end class Table_Iter class OLSR_Connections (Table_Iter) : url = 'cgi-bin/luci/freifunk/olsr/' retries = 2 timeout = 10 html_charset = 'utf-8' # force utf-8 encoding def parse (self) : self.neighbors = {} for l in self.table_iter () : neighbor, ip, lq, nlq, etx = l lq, nlq, etx = (float (x) for x in (lq, nlq, etx)) self.neighbors [neighbor] = [ip, lq, nlq, etx] # end def parse # end class OLSR_Connections class OLSR_Routes (Table_Iter) : url = 'cgi-bin/luci/freifunk/olsr/routes' retries = 2 timeout = 10 html_charset = 'utf-8' # force utf-8 encoding def parse (self) : self.iface_by_gw = {} for l in self.table_iter () : announced, gw, iface, metric, etx = l if gw in self.iface_by_gw : assert iface == self.iface_by_gw [gw] else : self.iface_by_gw [gw] = iface # end def parse # end class OLSR_Routes class OpenWRT (autosuper) : def __init__ (self, site, request) : self.site = site self.request = request if 'interfaces' in self.request or 'ips' in self.request : st = Status (site = site) conn = OLSR_Connections (site = site) route = OLSR_Routes (site = site) self.version = st.version assert len (st.wlans) <= 1 interfaces = {} ips = {} count = 0 for gw, ifname in pyk.iteritems (route.iface_by_gw) : ip, lq, nlq, etx = conn.neighbors [gw] i4 = Inet4 (ip, None, None, iface = ifname) ips [i4] = 1 is_wlan = True if lq == nlq == etx == 1.0 : is_wlan = False if ifname in interfaces : iface = interfaces [ifname] if not iface.is_wlan and is_wlan : iface.is_wlan = True iface.wlan_info = st.wlans [0] else : iface = Interface (count, ifname, None) iface.is_wlan = is_wlan if is_wlan : iface.wlan_info = st.wlans [0] count += 1 interfaces [ifname] = iface if i4 not in iface.inet4 : iface.append_inet4 (i4) wl_if = None for iface in pyk.itervalues (interfaces) : if iface.is_wlan : if wl_if : m = "Duplicate wlan: %s/%s" % (iface.name, wl_if.name) raise ValueError (m) wl_if = iface # check own ip n = 'unknown' i4 = Inet4 (self.request ['ip'], None, None, iface = n) if i4 not in ips : assert n not in interfaces iface = interfaces [n] = Interface (count, n, None) iface.append_inet4 (i4) iface.is_wlan = False if not wl_if and st.wlans : iface.is_wlan = True iface.wlan_info = st.wlans [0] ips [i4] = True self.request ['ips'] = ips self.request ['interfaces'] = interfaces self.request ['version'] = st.version # end def __init__ # end class OpenWRT
34.449198
78
0.472369
from _TFL.pyk import pyk from rsclib.HTML_Parse import tag, Page_Tree from rsclib.autosuper import autosuper from spider.common import Interface, Inet4, Inet6, unroutable from spider.common import WLAN_Config from spider.luci import Version_Mixin class Status (Page_Tree, Version_Mixin) : url = 'cgi-bin/luci/freifunk/status/status' retries = 2 timeout = 10 html_charset = 'utf-8' wl_names = dict \ ( ssid = 'ssid' , _bsiid = 'bssid' , channel = 'channel' , mode = 'mode' ) def parse (self) : root = self.tree.getroot () self.wlans = [] self.routes = {} for div in root.findall (".//%s" % tag ("div")) : id = div.get ('id') if id == 'cbi-wireless' : wlan_div = div elif id == 'cbi-routes' : route_div = div self.try_get_version (div) for d in self.tbl_iter (wlan_div) : for k, newkey in pyk.iteritems (self.wl_names) : if k in d : d [newkey] = d [k] wl = WLAN_Config (** d) self.wlans.append (wl) for d in self.tbl_iter (route_div) : iface = d.get ('iface') gw = d.get ('gateway') if iface and gw : self.routes [iface] = gw self.set_version (root) def tbl_iter (self, div) : tbl = div.find (".//%s" % tag ("table")) assert tbl.get ('class') == 'cbi-section-table' d = {} for tr in tbl : if 'cbi-section-table-row' not in tr.get ('class').split () : continue for input in tr.findall (".//%s" % tag ('input')) : name = input.get ('id').split ('.') [-1] val = input.get ('value') d [name] = val if not d : continue yield d class Table_Iter (Page_Tree) : def table_iter (self) : root = self.tree.getroot () for div in root.findall (".//%s" % tag ("div")) : if div.get ('id') == 'maincontent' : break tbl = div.find (".//%s" % tag ("table")) if tbl is None : return for tr in tbl : if tr [0].tag == tag ('th') : continue yield (self.tree.get_text (x) for x in tr) class OLSR_Connections (Table_Iter) : url = 'cgi-bin/luci/freifunk/olsr/' retries = 2 timeout = 10 html_charset = 'utf-8' def parse (self) : self.neighbors = {} for l in self.table_iter () : neighbor, ip, lq, nlq, etx = l lq, nlq, etx = (float (x) for x in (lq, nlq, etx)) self.neighbors [neighbor] = [ip, lq, nlq, etx] class OLSR_Routes (Table_Iter) : url = 'cgi-bin/luci/freifunk/olsr/routes' retries = 2 timeout = 10 html_charset = 'utf-8' def parse (self) : self.iface_by_gw = {} for l in self.table_iter () : announced, gw, iface, metric, etx = l if gw in self.iface_by_gw : assert iface == self.iface_by_gw [gw] else : self.iface_by_gw [gw] = iface class OpenWRT (autosuper) : def __init__ (self, site, request) : self.site = site self.request = request if 'interfaces' in self.request or 'ips' in self.request : st = Status (site = site) conn = OLSR_Connections (site = site) route = OLSR_Routes (site = site) self.version = st.version assert len (st.wlans) <= 1 interfaces = {} ips = {} count = 0 for gw, ifname in pyk.iteritems (route.iface_by_gw) : ip, lq, nlq, etx = conn.neighbors [gw] i4 = Inet4 (ip, None, None, iface = ifname) ips [i4] = 1 is_wlan = True if lq == nlq == etx == 1.0 : is_wlan = False if ifname in interfaces : iface = interfaces [ifname] if not iface.is_wlan and is_wlan : iface.is_wlan = True iface.wlan_info = st.wlans [0] else : iface = Interface (count, ifname, None) iface.is_wlan = is_wlan if is_wlan : iface.wlan_info = st.wlans [0] count += 1 interfaces [ifname] = iface if i4 not in iface.inet4 : iface.append_inet4 (i4) wl_if = None for iface in pyk.itervalues (interfaces) : if iface.is_wlan : if wl_if : m = "Duplicate wlan: %s/%s" % (iface.name, wl_if.name) raise ValueError (m) wl_if = iface n = 'unknown' i4 = Inet4 (self.request ['ip'], None, None, iface = n) if i4 not in ips : assert n not in interfaces iface = interfaces [n] = Interface (count, n, None) iface.append_inet4 (i4) iface.is_wlan = False if not wl_if and st.wlans : iface.is_wlan = True iface.wlan_info = st.wlans [0] ips [i4] = True self.request ['ips'] = ips self.request ['interfaces'] = interfaces self.request ['version'] = st.version
true
true
f7000273e22d5a0f2d5b40c38a0ed8511d1b8995
2,250
py
Python
utils/compare.py
adcrn/knest
a274dc9ddb642cc30f837e225f000bf33430eb43
[ "BSD-3-Clause" ]
8
2018-03-15T23:42:51.000Z
2020-03-10T06:21:03.000Z
utils/compare.py
deekerno/knest
a274dc9ddb642cc30f837e225f000bf33430eb43
[ "BSD-3-Clause" ]
12
2018-03-15T19:11:02.000Z
2018-10-30T10:02:45.000Z
utils/compare.py
adcrn/knest
a274dc9ddb642cc30f837e225f000bf33430eb43
[ "BSD-3-Clause" ]
null
null
null
# UCF Senior Design 2017-18 # Group 38 from PIL import Image import cv2 import imagehash import math import numpy as np DIFF_THRES = 20 LIMIT = 2 RESIZE = 1000 def calc_hash(img): """ Calculate the wavelet hash of the image img: (ndarray) image file """ # resize image if height > 1000 img = resize(img) return imagehash.whash(Image.fromarray(img)) def compare(hash1, hash2): """ Calculate the difference between two images hash1: (array) first wavelet hash hash2: (array) second wavelet hash """ return hash1 - hash2 def limit(img, std_hash, count): """ Determine whether image should be removed from image dictionary in main.py img: (ndarray) image file std_hash: (array) wavelet hash of comparison standard count: (int) global count of images similar to comparison standard """ # calculate hash for given image cmp_hash = calc_hash(img) # compare to standard diff = compare(std_hash, cmp_hash) # image is similar to standard if diff <= DIFF_THRES: # if there are 3 similar images already, remove image if count >= LIMIT: return 'remove' # non-similar image found else: # update comparison standard return 'update_std' # else continue reading images with same standard return 'continue' def resize(img): """ Resize an image img: (ndarray) RGB color image """ # get dimensions of image width = np.shape(img)[1] height = np.shape(img)[0] # if height of image is greater than 1000, resize it to 1000 if width > RESIZE: # keep resize proportional scale = RESIZE / width resized_img = cv2.resize( img, (RESIZE, math.floor(height / scale)), cv2.INTER_AREA) # return resized image return resized_img # if height of image is less than 1000, return image unresized return img def set_standard(images, filename): """ Set new comparison standard and update information images: (dictionary) dictionary containing all the image data filename: (String) name of the image file """ return filename, calc_hash(images[filename]), 0
24.725275
78
0.646667
from PIL import Image import cv2 import imagehash import math import numpy as np DIFF_THRES = 20 LIMIT = 2 RESIZE = 1000 def calc_hash(img): img = resize(img) return imagehash.whash(Image.fromarray(img)) def compare(hash1, hash2): return hash1 - hash2 def limit(img, std_hash, count): cmp_hash = calc_hash(img) diff = compare(std_hash, cmp_hash) if diff <= DIFF_THRES: if count >= LIMIT: return 'remove' else: return 'update_std' return 'continue' def resize(img): width = np.shape(img)[1] height = np.shape(img)[0] if width > RESIZE: scale = RESIZE / width resized_img = cv2.resize( img, (RESIZE, math.floor(height / scale)), cv2.INTER_AREA) return resized_img return img def set_standard(images, filename): return filename, calc_hash(images[filename]), 0
true
true
f70002926d1d600b4b068459c9dd40ebf3aef47d
757
py
Python
sdk/python/kfp/__main__.py
ConverJens/pipelines
a1d453af214ec9eebad73fb05845dd3499d60d00
[ "Apache-2.0" ]
6
2020-05-19T02:35:11.000Z
2020-05-29T17:58:42.000Z
sdk/python/kfp/__main__.py
ConverJens/pipelines
a1d453af214ec9eebad73fb05845dd3499d60d00
[ "Apache-2.0" ]
1,932
2021-01-25T11:23:37.000Z
2022-03-31T17:10:18.000Z
sdk/python/kfp/__main__.py
ConverJens/pipelines
a1d453af214ec9eebad73fb05845dd3499d60d00
[ "Apache-2.0" ]
11
2020-05-19T22:26:41.000Z
2021-01-25T09:56:21.000Z
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .cli.cli import main # TODO(hongyes): add more commands: # kfp compile (migrate from dsl-compile) # kfp experiment (manage experiments) if __name__ == '__main__': main()
32.913043
74
0.749009
from .cli.cli import main if __name__ == '__main__': main()
true
true
f7000327daf9ff11a381ce6d5de401ff007d1323
1,094
py
Python
TestProject/app/view/RoomItem.py
ChinSing00/ChatChat
48654e2e125298c00a558449353e38d0cec06d03
[ "MIT" ]
null
null
null
TestProject/app/view/RoomItem.py
ChinSing00/ChatChat
48654e2e125298c00a558449353e38d0cec06d03
[ "MIT" ]
null
null
null
TestProject/app/view/RoomItem.py
ChinSing00/ChatChat
48654e2e125298c00a558449353e38d0cec06d03
[ "MIT" ]
null
null
null
import time from PyQt5 import QtGui, QtCore from ui.room_item import Ui_Form from PyQt5.QtWidgets import QWidget class Room_Item(QWidget,Ui_Form): def __init__(self,parent=None,room_data=None): super(Room_Item,self).__init__(parent) self.setupUi(self) self.data = room_data self.setRoomInfo() def setRoomInfo(self): self.room_name.setText('{}({})'.format(self.data['naturalName'], self.data['roomName'])) self.description.setText("<a style='color:#BCBCBC'>{}</a>".format(self.data['description'])) timeStamp = int(self.data['creationDate']) / 1000 timeArray = time.localtime(timeStamp) otherStyleTime = time.strftime("%Y-%m-%d", timeArray) self.create_time.setText("<a style='color:#BCBCBC'>{}</a>".format(otherStyleTime)) members = len(self.data['owners']) + len(self.data['admins']) + len(self.data['members']) memberCounter = "<a style='color:#BCBCBC'>{}/{}</a>".format(members, ('∞' if self.data['maxUsers']==0 else self.data['maxUsers'])) self.member.setText(memberCounter)
45.583333
138
0.659049
import time from PyQt5 import QtGui, QtCore from ui.room_item import Ui_Form from PyQt5.QtWidgets import QWidget class Room_Item(QWidget,Ui_Form): def __init__(self,parent=None,room_data=None): super(Room_Item,self).__init__(parent) self.setupUi(self) self.data = room_data self.setRoomInfo() def setRoomInfo(self): self.room_name.setText('{}({})'.format(self.data['naturalName'], self.data['roomName'])) self.description.setText("<a style='color:#BCBCBC'>{}</a>".format(self.data['description'])) timeStamp = int(self.data['creationDate']) / 1000 timeArray = time.localtime(timeStamp) otherStyleTime = time.strftime("%Y-%m-%d", timeArray) self.create_time.setText("<a style='color:#BCBCBC'>{}</a>".format(otherStyleTime)) members = len(self.data['owners']) + len(self.data['admins']) + len(self.data['members']) memberCounter = "<a style='color:#BCBCBC'>{}/{}</a>".format(members, ('∞' if self.data['maxUsers']==0 else self.data['maxUsers'])) self.member.setText(memberCounter)
true
true
f7000371f0315cd55c0b14b33e7e8e56697cfc2e
10,498
py
Python
src/winforms/toga_winforms/app.py
holg/toga
9dd766e749c6cf29cdb1127c7637150381ac396d
[ "BSD-3-Clause" ]
1
2020-07-16T00:46:24.000Z
2020-07-16T00:46:24.000Z
src/winforms/toga_winforms/app.py
holg/toga
9dd766e749c6cf29cdb1127c7637150381ac396d
[ "BSD-3-Clause" ]
null
null
null
src/winforms/toga_winforms/app.py
holg/toga
9dd766e749c6cf29cdb1127c7637150381ac396d
[ "BSD-3-Clause" ]
null
null
null
import asyncio import re import sys import traceback import toga from toga import Key from .keys import toga_to_winforms_key from .libs import Threading, WinForms, shcore, user32, win_version from .libs.proactor import WinformsProactorEventLoop from .window import Window class MainWindow(Window): def winforms_FormClosing(self, sender, event): if not self.interface.app._impl._is_exiting: event.Cancel = not self.interface.app.exit() class App: _MAIN_WINDOW_CLASS = MainWindow def __init__(self, interface): self.interface = interface self.interface._impl = self # Winforms app exit is tightly bound to the close of the MainWindow. # The FormClosing message on MainWindow calls app.exit(), which # will then trigger the "on_exit" handler (which might abort the # close). However, if app.exit() succeeds, it will request the # Main Window to close... which calls app.exit(). # So - we have a flag that is only ever sent once a request has been # made to exit the native app. This flag can be used to shortcut any # window-level close handling. self._is_exiting = False self.loop = WinformsProactorEventLoop() asyncio.set_event_loop(self.loop) def create(self): self.native = WinForms.Application self.app_context = WinForms.ApplicationContext() # Check the version of windows and make sure we are setting the DPI mode # with the most up to date API # Windows Versioning Check Sources : https://www.lifewire.com/windows-version-numbers-2625171 # and https://docs.microsoft.com/en-us/windows/release-information/ if win_version.Major >= 6: # Checks for Windows Vista or later # Represents Windows 8.1 up to Windows 10 before Build 1703 which should use # SetProcessDpiAwareness(True) if ((win_version.Major == 6 and win_version.Minor == 3) or (win_version.Major == 10 and win_version.Build < 15063)): shcore.SetProcessDpiAwareness(True) # Represents Windows 10 Build 1703 and beyond which should use # SetProcessDpiAwarenessContext(-2) elif win_version.Major == 10 and win_version.Build >= 15063: user32.SetProcessDpiAwarenessContext(-2) # Any other version of windows should use SetProcessDPIAware() else: user32.SetProcessDPIAware() self.native.EnableVisualStyles() self.native.SetCompatibleTextRenderingDefault(False) self.interface.commands.add( toga.Command( lambda _: self.interface.about(), 'About {}'.format(self.interface.name), group=toga.Group.HELP ), toga.Command(None, 'Preferences', group=toga.Group.FILE), # Quit should always be the last item, in a section on it's own toga.Command( lambda _: self.interface.exit(), 'Exit ' + self.interface.name, shortcut=Key.MOD_1 + 'q', group=toga.Group.FILE, section=sys.maxsize ), toga.Command( lambda _: self.interface.visit_homepage(), 'Visit homepage', enabled=self.interface.home_page is not None, group=toga.Group.HELP ) ) self._create_app_commands() # Call user code to populate the main window self.interface.startup() self.create_menus() self.interface.icon.bind(self.interface.factory) self.interface.main_window._impl.set_app(self) def create_menus(self): self._menu_items = {} self._menu_groups = {} toga.Group.FILE.order = 0 menubar = WinForms.MenuStrip() submenu = None for cmd in self.interface.commands: if cmd == toga.GROUP_BREAK: submenu = None elif cmd == toga.SECTION_BREAK: submenu.DropDownItems.Add('-') else: submenu = self._submenu(cmd.group, menubar) item = WinForms.ToolStripMenuItem(cmd.label) if cmd.action: item.Click += cmd._impl.as_handler() item.Enabled = cmd.enabled if cmd.shortcut is not None: shortcut_keys = toga_to_winforms_key(cmd.shortcut) item.ShortcutKeys = shortcut_keys item.ShowShortcutKeys = True cmd._impl.native.append(item) self._menu_items[item] = cmd submenu.DropDownItems.Add(item) self.interface.main_window._impl.native.Controls.Add(menubar) self.interface.main_window._impl.native.MainMenuStrip = menubar self.interface.main_window.content.refresh() def _submenu(self, group, menubar): try: return self._menu_groups[group] except KeyError: if group is None: submenu = menubar else: parent_menu = self._submenu(group.parent, menubar) submenu = WinForms.ToolStripMenuItem(group.label) # Top level menus are added in a different way to submenus if group.parent is None: parent_menu.Items.Add(submenu) else: parent_menu.DropDownItems.Add(submenu) self._menu_groups[group] = submenu return submenu def _create_app_commands(self): # No extra menus pass def open_document(self, fileURL): '''Add a new document to this app.''' print("STUB: If you want to handle opening documents, implement App.open_document(fileURL)") def winforms_thread_exception(self, sender, winforms_exc): # The PythonException returned by Winforms doesn't give us # easy access to the underlying Python stacktrace; so we # reconstruct it from the string message. # The Python message is helpfully included in square brackets, # as the context for the first line in the .net stack trace. # So, look for the closing bracket and the start of the Python.net # stack trace. Then, reconstruct the line breaks internal to the # remaining string. print("Traceback (most recent call last):") py_exc = winforms_exc.get_Exception() full_stack_trace = py_exc.StackTrace regex = re.compile( r"^\[(?:'(.*?)', )*(?:'(.*?)')\] (?:.*?) Python\.Runtime", re.DOTALL | re.UNICODE ) stacktrace_relevant_lines = regex.findall(full_stack_trace) if len(stacktrace_relevant_lines) == 0: self.print_stack_trace(full_stack_trace) else: for lines in stacktrace_relevant_lines: for line in lines: self.print_stack_trace(line) print(py_exc.Message) @classmethod def print_stack_trace(cls, stack_trace_line): for level in stack_trace_line.split("', '"): for line in level.split("\\n"): if line: print(line) def run_app(self): try: self.create() self.native.ThreadException += self.winforms_thread_exception self.loop.run_forever(self.app_context) except: # NOQA traceback.print_exc() def main_loop(self): thread = Threading.Thread(Threading.ThreadStart(self.run_app)) thread.SetApartmentState(Threading.ApartmentState.STA) thread.Start() thread.Join() def show_about_dialog(self): message_parts = [] if self.interface.name is not None: if self.interface.version is not None: message_parts.append( "{name} v{version}".format( name=self.interface.name, version=self.interface.version, ) ) else: message_parts.append( "{name}".format(name=self.interface.name) ) elif self.interface.version is not None: message_parts.append( "v{version}".format(version=self.interface.version) ) if self.interface.author is not None: message_parts.append( "Author: {author}".format(author=self.interface.author) ) if self.interface.description is not None: message_parts.append( "\n{description}".format( description=self.interface.description ) ) self.interface.main_window.info_dialog( 'About {}'.format(self.interface.name), "\n".join(message_parts) ) def exit(self): self._is_exiting = True self.native.Exit() def set_main_window(self, window): self.app_context.MainForm = window._impl.native def set_on_exit(self, value): pass def current_window(self): self.interface.factory.not_implemented('App.current_window()') def enter_full_screen(self, windows): self.interface.factory.not_implemented('App.enter_full_screen()') def exit_full_screen(self, windows): self.interface.factory.not_implemented('App.exit_full_screen()') def set_cursor(self, value): self.interface.factory.not_implemented('App.set_cursor()') def show_cursor(self): self.interface.factory.not_implemented('App.show_cursor()') def hide_cursor(self): self.interface.factory.not_implemented('App.hide_cursor()') def add_background_task(self, handler): self.loop.call_soon(handler, self) class DocumentApp(App): def _create_app_commands(self): self.interface.commands.add( toga.Command( lambda w: self.open_file, label='Open...', shortcut=Key.MOD_1 + 'o', group=toga.Group.FILE, section=0 ), ) def open_document(self, fileURL): """Open a new document in this app. Args: fileURL (str): The URL/path to the file to add as a document. """ self.interface.factory.not_implemented('DocumentApp.open_document()')
35.952055
101
0.597638
import asyncio import re import sys import traceback import toga from toga import Key from .keys import toga_to_winforms_key from .libs import Threading, WinForms, shcore, user32, win_version from .libs.proactor import WinformsProactorEventLoop from .window import Window class MainWindow(Window): def winforms_FormClosing(self, sender, event): if not self.interface.app._impl._is_exiting: event.Cancel = not self.interface.app.exit() class App: _MAIN_WINDOW_CLASS = MainWindow def __init__(self, interface): self.interface = interface self.interface._impl = self self._is_exiting = False self.loop = WinformsProactorEventLoop() asyncio.set_event_loop(self.loop) def create(self): self.native = WinForms.Application self.app_context = WinForms.ApplicationContext() if win_version.Major >= 6: if ((win_version.Major == 6 and win_version.Minor == 3) or (win_version.Major == 10 and win_version.Build < 15063)): shcore.SetProcessDpiAwareness(True) elif win_version.Major == 10 and win_version.Build >= 15063: user32.SetProcessDpiAwarenessContext(-2) else: user32.SetProcessDPIAware() self.native.EnableVisualStyles() self.native.SetCompatibleTextRenderingDefault(False) self.interface.commands.add( toga.Command( lambda _: self.interface.about(), 'About {}'.format(self.interface.name), group=toga.Group.HELP ), toga.Command(None, 'Preferences', group=toga.Group.FILE), toga.Command( lambda _: self.interface.exit(), 'Exit ' + self.interface.name, shortcut=Key.MOD_1 + 'q', group=toga.Group.FILE, section=sys.maxsize ), toga.Command( lambda _: self.interface.visit_homepage(), 'Visit homepage', enabled=self.interface.home_page is not None, group=toga.Group.HELP ) ) self._create_app_commands() # Call user code to populate the main window self.interface.startup() self.create_menus() self.interface.icon.bind(self.interface.factory) self.interface.main_window._impl.set_app(self) def create_menus(self): self._menu_items = {} self._menu_groups = {} toga.Group.FILE.order = 0 menubar = WinForms.MenuStrip() submenu = None for cmd in self.interface.commands: if cmd == toga.GROUP_BREAK: submenu = None elif cmd == toga.SECTION_BREAK: submenu.DropDownItems.Add('-') else: submenu = self._submenu(cmd.group, menubar) item = WinForms.ToolStripMenuItem(cmd.label) if cmd.action: item.Click += cmd._impl.as_handler() item.Enabled = cmd.enabled if cmd.shortcut is not None: shortcut_keys = toga_to_winforms_key(cmd.shortcut) item.ShortcutKeys = shortcut_keys item.ShowShortcutKeys = True cmd._impl.native.append(item) self._menu_items[item] = cmd submenu.DropDownItems.Add(item) self.interface.main_window._impl.native.Controls.Add(menubar) self.interface.main_window._impl.native.MainMenuStrip = menubar self.interface.main_window.content.refresh() def _submenu(self, group, menubar): try: return self._menu_groups[group] except KeyError: if group is None: submenu = menubar else: parent_menu = self._submenu(group.parent, menubar) submenu = WinForms.ToolStripMenuItem(group.label) # Top level menus are added in a different way to submenus if group.parent is None: parent_menu.Items.Add(submenu) else: parent_menu.DropDownItems.Add(submenu) self._menu_groups[group] = submenu return submenu def _create_app_commands(self): # No extra menus pass def open_document(self, fileURL): print("STUB: If you want to handle opening documents, implement App.open_document(fileURL)") def winforms_thread_exception(self, sender, winforms_exc): # The PythonException returned by Winforms doesn't give us print("Traceback (most recent call last):") py_exc = winforms_exc.get_Exception() full_stack_trace = py_exc.StackTrace regex = re.compile( r"^\[(?:'(.*?)', )*(?:'(.*?)')\] (?:.*?) Python\.Runtime", re.DOTALL | re.UNICODE ) stacktrace_relevant_lines = regex.findall(full_stack_trace) if len(stacktrace_relevant_lines) == 0: self.print_stack_trace(full_stack_trace) else: for lines in stacktrace_relevant_lines: for line in lines: self.print_stack_trace(line) print(py_exc.Message) @classmethod def print_stack_trace(cls, stack_trace_line): for level in stack_trace_line.split("', '"): for line in level.split("\\n"): if line: print(line) def run_app(self): try: self.create() self.native.ThreadException += self.winforms_thread_exception self.loop.run_forever(self.app_context) except: traceback.print_exc() def main_loop(self): thread = Threading.Thread(Threading.ThreadStart(self.run_app)) thread.SetApartmentState(Threading.ApartmentState.STA) thread.Start() thread.Join() def show_about_dialog(self): message_parts = [] if self.interface.name is not None: if self.interface.version is not None: message_parts.append( "{name} v{version}".format( name=self.interface.name, version=self.interface.version, ) ) else: message_parts.append( "{name}".format(name=self.interface.name) ) elif self.interface.version is not None: message_parts.append( "v{version}".format(version=self.interface.version) ) if self.interface.author is not None: message_parts.append( "Author: {author}".format(author=self.interface.author) ) if self.interface.description is not None: message_parts.append( "\n{description}".format( description=self.interface.description ) ) self.interface.main_window.info_dialog( 'About {}'.format(self.interface.name), "\n".join(message_parts) ) def exit(self): self._is_exiting = True self.native.Exit() def set_main_window(self, window): self.app_context.MainForm = window._impl.native def set_on_exit(self, value): pass def current_window(self): self.interface.factory.not_implemented('App.current_window()') def enter_full_screen(self, windows): self.interface.factory.not_implemented('App.enter_full_screen()') def exit_full_screen(self, windows): self.interface.factory.not_implemented('App.exit_full_screen()') def set_cursor(self, value): self.interface.factory.not_implemented('App.set_cursor()') def show_cursor(self): self.interface.factory.not_implemented('App.show_cursor()') def hide_cursor(self): self.interface.factory.not_implemented('App.hide_cursor()') def add_background_task(self, handler): self.loop.call_soon(handler, self) class DocumentApp(App): def _create_app_commands(self): self.interface.commands.add( toga.Command( lambda w: self.open_file, label='Open...', shortcut=Key.MOD_1 + 'o', group=toga.Group.FILE, section=0 ), ) def open_document(self, fileURL): self.interface.factory.not_implemented('DocumentApp.open_document()')
true
true
f7000456815408e3a0899443a0df077b039855c4
1,731
py
Python
__init__.py
luoxiangyong/qgissprp
4698462743e11eac486af4b60046b99ae2abc1b0
[ "BSD-2-Clause" ]
null
null
null
__init__.py
luoxiangyong/qgissprp
4698462743e11eac486af4b60046b99ae2abc1b0
[ "BSD-2-Clause" ]
null
null
null
__init__.py
luoxiangyong/qgissprp
4698462743e11eac486af4b60046b99ae2abc1b0
[ "BSD-2-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """ /*************************************************************************** SimplePhotogrammetryRoutePlanner A QGIS plugin A imple photogrammetry route planner. Generated by Plugin Builder: http://g-sherman.github.io/Qgis-Plugin-Builder/ ------------------- begin : 2021-04-24 copyright : (C) 2021 by Xiangyong Luo email : [email protected] git sha : $Format:%H$ ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ This script initializes the plugin, making it known to QGIS. """ __version__ = "0.4.0" # noinspection PyPep8Naming def classFactory(iface): # pylint: disable=invalid-name """Load SimplePhotogrammetryRoutePlanner class from file SimplePhotogrammetryRoutePlanner. :param iface: A QGIS interface instance. :type iface: QgsInterface """ # from .SimplePhotogrammetryRoutePlanner import SimplePhotogrammetryRoutePlanner return SimplePhotogrammetryRoutePlanner(iface)
46.783784
94
0.458117
__version__ = "0.4.0" def classFactory(iface): from .SimplePhotogrammetryRoutePlanner import SimplePhotogrammetryRoutePlanner return SimplePhotogrammetryRoutePlanner(iface)
true
true
f70004a44b39e2f1be17fb0ebfe7da0897c5e85d
671
py
Python
eslearn/utils/lc_featureSelection_variance.py
dongmengshi/easylearn
df528aaa69c3cf61f5459a04671642eb49421dfb
[ "MIT" ]
19
2020-02-29T06:00:18.000Z
2022-01-24T01:30:14.000Z
eslearn/utils/lc_featureSelection_variance.py
dongmengshi/easylearn
df528aaa69c3cf61f5459a04671642eb49421dfb
[ "MIT" ]
7
2020-04-02T03:05:21.000Z
2020-11-11T11:45:05.000Z
eslearn/utils/lc_featureSelection_variance.py
dongmengshi/easylearn
df528aaa69c3cf61f5459a04671642eb49421dfb
[ "MIT" ]
11
2020-03-03T03:02:15.000Z
2020-11-11T14:09:55.000Z
# -*- coding: utf-8 -*- """ Created on Tue Jul 24 14:38:20 2018 dimension reduction with VarianceThreshold using sklearn. Feature selector that removes all low-variance features. @author: lenovo """ from sklearn.feature_selection import VarianceThreshold import numpy as np # np.random.seed(1) X = np.random.randn(100, 10) X = np.hstack([X, np.zeros([100, 5])]) # def featureSelection_variance(X, thrd): sel = VarianceThreshold(threshold=thrd) X_selected = sel.fit_transform(X) mask = sel.get_support() return X_selected, mask X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]] selector = VarianceThreshold() selector.fit_transform(X) selector.variances_
23.964286
57
0.709389
from sklearn.feature_selection import VarianceThreshold import numpy as np np.random.seed(1) X = np.random.randn(100, 10) X = np.hstack([X, np.zeros([100, 5])]) def featureSelection_variance(X, thrd): sel = VarianceThreshold(threshold=thrd) X_selected = sel.fit_transform(X) mask = sel.get_support() return X_selected, mask X = [[0, 2, 0, 3], [0, 1, 4, 3], [0, 1, 1, 3]] selector = VarianceThreshold() selector.fit_transform(X) selector.variances_
true
true
f70004bcd049386ad073e2d45bf8fe56d0639a36
3,382
py
Python
mnist/my_multi_tune3.py
silent567/examples
e9de12549125ecd93a4924f6b8e2bbf66d7635d9
[ "BSD-3-Clause" ]
null
null
null
mnist/my_multi_tune3.py
silent567/examples
e9de12549125ecd93a4924f6b8e2bbf66d7635d9
[ "BSD-3-Clause" ]
null
null
null
mnist/my_multi_tune3.py
silent567/examples
e9de12549125ecd93a4924f6b8e2bbf66d7635d9
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python # coding=utf-8 from my_multi_main3 import main import numpy as np import argparse import time parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--batch-size', type=int, default=64, metavar='N', help='input batch size for training (default: 64)') parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', help='input batch size for testing (default: 1000)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs to train (default: 10)') parser.add_argument('--lr', type=float, default=0.01, metavar='LR', help='learning rate (default: 0.01)') parser.add_argument('--momentum', type=float, default=0.5, metavar='M', help='SGD momentum (default: 0.5)') parser.add_argument('--no-cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches to wait before logging training status') parser.add_argument('--save-model', action='store_true', default=False, help='For Saving the current Model') parser.add_argument('--norm-flag', type=bool, default=False, help='Triggering the Layer Normalization flag for attention scores') parser.add_argument('--gamma', type=float, default=None, help='Controlling the sparisty of gfusedmax/sparsemax, the smaller, the more sparse') parser.add_argument('--lam', type=float, default=1.0, help='Lambda: Controlling the smoothness of gfusedmax, the larger, the smoother') parser.add_argument('--max-type', type=str, default='softmax',choices=['softmax','sparsemax','gfusedmax'], help='mapping function in attention') parser.add_argument('--optim-type', type=str, default='SGD',choices=['SGD','Adam'], help='mapping function in attention') parser.add_argument('--head-cnt', type=int, default=2, metavar='S', choices=[1,2,4,5,10], help='Number of heads for attention (default: 1)') args = parser.parse_args() hyperparameter_choices = { 'lr':list(10**np.arange(-4,-1,0.5)), 'norm_flag': [True,False], 'gamma':list(10**np.arange(-1,3,0.5))+[None,], 'lam':list(10**np.arange(-2,2,0.5)), 'max_type':['softmax','sparsemax','gfusedmax'], # 'max_type':['sparsemax'], 'optim_type':['SGD','Adam'], 'head_cnt':[1,2,4,5,10,20] } param_num = 25 record = np.zeros([param_num,len(hyperparameter_choices)+1]) record_name = 'record3_multi_%s.csv'%time.strftime('%Y-%m-%d_%H-%M-%S',time.localtime()) for n in range(param_num): for param_index,(k,v) in enumerate(hyperparameter_choices.items()): print(param_index,k) value_index = np.random.choice(len(v)) if isinstance(v[value_index],str) or isinstance(v[value_index],bool) or v[value_index] is None: record[n,param_index] = value_index else: record[n,param_index] = v[value_index] setattr(args,k,v[value_index]) record[n,-1] = main(args) np.savetxt(record_name, record, delimiter=',')
47.633803
106
0.642815
from my_multi_main3 import main import numpy as np import argparse import time parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('--batch-size', type=int, default=64, metavar='N', help='input batch size for training (default: 64)') parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N', help='input batch size for testing (default: 1000)') parser.add_argument('--epochs', type=int, default=10, metavar='N', help='number of epochs to train (default: 10)') parser.add_argument('--lr', type=float, default=0.01, metavar='LR', help='learning rate (default: 0.01)') parser.add_argument('--momentum', type=float, default=0.5, metavar='M', help='SGD momentum (default: 0.5)') parser.add_argument('--no-cuda', action='store_true', default=False, help='disables CUDA training') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') parser.add_argument('--log-interval', type=int, default=10, metavar='N', help='how many batches to wait before logging training status') parser.add_argument('--save-model', action='store_true', default=False, help='For Saving the current Model') parser.add_argument('--norm-flag', type=bool, default=False, help='Triggering the Layer Normalization flag for attention scores') parser.add_argument('--gamma', type=float, default=None, help='Controlling the sparisty of gfusedmax/sparsemax, the smaller, the more sparse') parser.add_argument('--lam', type=float, default=1.0, help='Lambda: Controlling the smoothness of gfusedmax, the larger, the smoother') parser.add_argument('--max-type', type=str, default='softmax',choices=['softmax','sparsemax','gfusedmax'], help='mapping function in attention') parser.add_argument('--optim-type', type=str, default='SGD',choices=['SGD','Adam'], help='mapping function in attention') parser.add_argument('--head-cnt', type=int, default=2, metavar='S', choices=[1,2,4,5,10], help='Number of heads for attention (default: 1)') args = parser.parse_args() hyperparameter_choices = { 'lr':list(10**np.arange(-4,-1,0.5)), 'norm_flag': [True,False], 'gamma':list(10**np.arange(-1,3,0.5))+[None,], 'lam':list(10**np.arange(-2,2,0.5)), 'max_type':['softmax','sparsemax','gfusedmax'], 'optim_type':['SGD','Adam'], 'head_cnt':[1,2,4,5,10,20] } param_num = 25 record = np.zeros([param_num,len(hyperparameter_choices)+1]) record_name = 'record3_multi_%s.csv'%time.strftime('%Y-%m-%d_%H-%M-%S',time.localtime()) for n in range(param_num): for param_index,(k,v) in enumerate(hyperparameter_choices.items()): print(param_index,k) value_index = np.random.choice(len(v)) if isinstance(v[value_index],str) or isinstance(v[value_index],bool) or v[value_index] is None: record[n,param_index] = value_index else: record[n,param_index] = v[value_index] setattr(args,k,v[value_index]) record[n,-1] = main(args) np.savetxt(record_name, record, delimiter=',')
true
true
f70004db8d93803fe1fd484a52ec6add2822ccb6
1,050
py
Python
spiker/data/hdf5.py
duguyue100/spiker
09437be393d7adf132f8ee2682e5b5b009c793a1
[ "MIT" ]
1
2021-01-13T10:46:44.000Z
2021-01-13T10:46:44.000Z
spiker/data/hdf5.py
duguyue100/spiker
09437be393d7adf132f8ee2682e5b5b009c793a1
[ "MIT" ]
null
null
null
spiker/data/hdf5.py
duguyue100/spiker
09437be393d7adf132f8ee2682e5b5b009c793a1
[ "MIT" ]
null
null
null
"""HDF5 related files. This file contains a set of functions that related to read and write HDF5 files. Author: Yuhuang Hu Email : [email protected] """ from __future__ import print_function, absolute_import import h5py from spiker import log logger = log.get_logger("data-hdf5", log.DEBUG) def init_hdf5(file_path, mode="w", cam_type="davis"): """Init HDF5 file object. # Parameters file_path : str absolute path for the HDF5 file. mode : str w : for writing r : for reading cam_type : str davis : for DAVIS camera dvs : for DVS camera # Returns dataset : h5py.File The file object of the given dataset """ if mode == "w": dataset = h5py.File(file_path, mode=mode) dataset.create_group("dvs") dataset.create_group("extra") if cam_type == "davis": dataset.create_group("aps") dataset.create_group("imu") elif mode == "r": dataset = h5py.File(file_path, mode=mode) return dataset
22.826087
68
0.629524
from __future__ import print_function, absolute_import import h5py from spiker import log logger = log.get_logger("data-hdf5", log.DEBUG) def init_hdf5(file_path, mode="w", cam_type="davis"): if mode == "w": dataset = h5py.File(file_path, mode=mode) dataset.create_group("dvs") dataset.create_group("extra") if cam_type == "davis": dataset.create_group("aps") dataset.create_group("imu") elif mode == "r": dataset = h5py.File(file_path, mode=mode) return dataset
true
true
f70006680091e477a9da34fc8c775b99d72def25
951
py
Python
thirdparty/org/apache/arrow/flatbuf/FloatingPoint.py
mrocklin/pygdf
2de9407427da9497ebdf8951a12857be0fab31bb
[ "Apache-2.0" ]
5
2018-10-17T20:28:42.000Z
2022-02-15T17:33:01.000Z
thirdparty/org/apache/arrow/flatbuf/FloatingPoint.py
mrocklin/pygdf
2de9407427da9497ebdf8951a12857be0fab31bb
[ "Apache-2.0" ]
19
2018-07-18T07:15:44.000Z
2021-02-22T17:00:18.000Z
thirdparty/org/apache/arrow/flatbuf/FloatingPoint.py
mrocklin/pygdf
2de9407427da9497ebdf8951a12857be0fab31bb
[ "Apache-2.0" ]
2
2020-05-01T09:54:34.000Z
2021-04-17T10:57:07.000Z
# automatically generated by the FlatBuffers compiler, do not modify # namespace: flatbuf import flatbuffers class FloatingPoint(object): __slots__ = ['_tab'] @classmethod def GetRootAsFloatingPoint(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = FloatingPoint() x.Init(buf, n + offset) return x # FloatingPoint def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) # FloatingPoint def Precision(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) return 0 def FloatingPointStart(builder): builder.StartObject(1) def FloatingPointAddPrecision(builder, precision): builder.PrependInt16Slot(0, precision, 0) def FloatingPointEnd(builder): return builder.EndObject()
30.677419
92
0.698212
import flatbuffers class FloatingPoint(object): __slots__ = ['_tab'] @classmethod def GetRootAsFloatingPoint(cls, buf, offset): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) x = FloatingPoint() x.Init(buf, n + offset) return x def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) def Precision(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int16Flags, o + self._tab.Pos) return 0 def FloatingPointStart(builder): builder.StartObject(1) def FloatingPointAddPrecision(builder, precision): builder.PrependInt16Slot(0, precision, 0) def FloatingPointEnd(builder): return builder.EndObject()
true
true
f70006c8c5153a3a1bb1f109dc563a53c20f0e43
162
py
Python
.history/Classiles/scynced_lights_20210615191535.py
minefarmer/Coding101-OOP
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
[ "Unlicense" ]
null
null
null
.history/Classiles/scynced_lights_20210615191535.py
minefarmer/Coding101-OOP
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
[ "Unlicense" ]
null
null
null
.history/Classiles/scynced_lights_20210615191535.py
minefarmer/Coding101-OOP
d5655977559e3bd1acf6a4f185a6121cc3b05ce4
[ "Unlicense" ]
null
null
null
"""[Scynced Lights] Class attributes are "shared" Instance attributes are not shared. """ def sub(x, y): f class Light: pass a = Light() b = Ligth()
10.125
35
0.62963
def sub(x, y): f class Light: pass a = Light() b = Ligth()
true
true
f70006d0df161d84dd7ec30a6d7506b5802d1f0c
9,378
py
Python
pyspider/libs/counter.py
willworks/pyspider
9fc2ffa57324d1a42ef767289faa3a04f4d20f2e
[ "Apache-2.0" ]
1
2015-11-08T07:33:31.000Z
2015-11-08T07:33:31.000Z
pyspider/libs/counter.py
willworks/pyspider
9fc2ffa57324d1a42ef767289faa3a04f4d20f2e
[ "Apache-2.0" ]
null
null
null
pyspider/libs/counter.py
willworks/pyspider
9fc2ffa57324d1a42ef767289faa3a04f4d20f2e
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- encoding: utf-8 -*- # vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8: # Author: Binux<[email protected]> # http://binux.me # Created on 2012-11-14 17:09:50 from __future__ import unicode_literals, division, absolute_import import time import logging from collections import deque try: from UserDict import DictMixin except ImportError: from collections import Mapping as DictMixin import six from six import iteritems from six.moves import cPickle class BaseCounter(object): def __init__(self): raise NotImplementedError def event(self, value=1): """Fire a event.""" raise NotImplementedError def value(self, value): """Set counter value.""" raise NotImplementedError @property def avg(self): """Get average value""" raise NotImplementedError @property def sum(self): """Get sum of counter""" raise NotImplementedError def empty(self): """Clear counter""" raise NotImplementedError class TotalCounter(BaseCounter): """Total counter""" def __init__(self): self.cnt = 0 def event(self, value=1): self.cnt += value def value(self, value): self.cnt = value @property def avg(self): return self.cnt @property def sum(self): return self.cnt def empty(self): return self.cnt == 0 class AverageWindowCounter(BaseCounter): """ Record last N(window) value """ def __init__(self, window_size=300): self.window_size = window_size self.values = deque(maxlen=window_size) def event(self, value=1): self.values.append(value) value = event @property def avg(self): return self.sum / len(self.values) @property def sum(self): return sum(self.values) def empty(self): if not self.values: return True class TimebaseAverageWindowCounter(BaseCounter): """ Record last window_size * window_interval seconds values. records will trim evert window_interval seconds """ def __init__(self, window_size=30, window_interval=10): self.max_window_size = window_size self.window_size = 0 self.window_interval = window_interval self.values = deque(maxlen=window_size) self.times = deque(maxlen=window_size) self.cache_value = 0 self.cache_start = None self._first_data_time = None def event(self, value=1): now = time.time() if self._first_data_time is None: self._first_data_time = now if self.cache_start is None: self.cache_value = value self.cache_start = now elif now - self.cache_start > self.window_interval: self.values.append(self.cache_value) self.times.append(self.cache_start) self.on_append(self.cache_value, self.cache_start) self.cache_value = value self.cache_start = now else: self.cache_value += value return self def value(self, value): self.cache_value = value def _trim_window(self): now = time.time() if self.cache_start and now - self.cache_start > self.window_interval: self.values.append(self.cache_value) self.times.append(self.cache_start) self.on_append(self.cache_value, self.cache_start) self.cache_value = 0 self.cache_start = None if self.window_size != self.max_window_size and self._first_data_time is not None: time_passed = now - self._first_data_time self.window_size = min(self.max_window_size, time_passed / self.window_interval) window_limit = now - self.window_size * self.window_interval while self.times and self.times[0] < window_limit: self.times.popleft() self.values.popleft() @property def avg(self): sum = float(self.sum) if not self.window_size: return 0 return sum / self.window_size / self.window_interval @property def sum(self): self._trim_window() return sum(self.values) + self.cache_value def empty(self): self._trim_window() if not self.values and not self.cache_start: return True def on_append(self, value, time): pass class CounterValue(DictMixin): """ A dict like value item for CounterManager. """ def __init__(self, manager, keys): self.manager = manager self._keys = keys def __getitem__(self, key): if key == '__value__': key = self._keys return self.manager.counters[key] else: key = self._keys + (key, ) available_keys = [] for _key in self.manager.counters: if _key[:len(key)] == key: available_keys.append(_key) if len(available_keys) == 0: raise KeyError elif len(available_keys) == 1: if available_keys[0] == key: return self.manager.counters[key] else: return CounterValue(self.manager, key) else: return CounterValue(self.manager, key) def __len__(self): return len(self.keys()) def __iter__(self): return iter(self.keys()) def __contains__(self, key): return key in self.keys() def keys(self): result = set() for key in self.manager.counters: if key[:len(self._keys)] == self._keys: key = key[len(self._keys):] result.add(key[0] if key else '__value__') return result def to_dict(self, get_value=None): """Dump counters as a dict""" result = {} for key, value in iteritems(self): if isinstance(value, BaseCounter): if get_value is not None: value = getattr(value, get_value) result[key] = value else: result[key] = value.to_dict(get_value) return result class CounterManager(DictMixin): """ A dict like counter manager. When using a tuple as event key, say: ('foo', 'bar'), You can visite counter with manager['foo']['bar']. Or get all counters which first element is 'foo' by manager['foo']. It's useful for a group of counters. """ def __init__(self, cls=TimebaseAverageWindowCounter): """init manager with Counter cls""" self.cls = cls self.counters = {} def event(self, key, value=1): """Fire a event of a counter by counter key""" if isinstance(key, six.string_types): key = (key, ) assert isinstance(key, tuple), "event key type error" if key not in self.counters: self.counters[key] = self.cls() self.counters[key].event(value) return self def value(self, key, value=1): """Set value of a counter by counter key""" if isinstance(key, six.string_types): key = (key, ) assert isinstance(key, tuple), "event key type error" if key not in self.counters: self.counters[key] = self.cls() self.counters[key].value(value) return self def trim(self): """Clear not used counters""" for key, value in list(iteritems(self.counters)): if value.empty(): del self.counters[key] def __getitem__(self, key): key = (key, ) available_keys = [] for _key in self.counters: if _key[:len(key)] == key: available_keys.append(_key) if len(available_keys) == 0: raise KeyError elif len(available_keys) == 1: if available_keys[0] == key: return self.counters[key] else: return CounterValue(self, key) else: return CounterValue(self, key) def __iter__(self): return iter(self.keys()) def __len__(self): return len(self.keys()) def keys(self): result = set() for key in self.counters: result.add(key[0] if key else ()) return result def to_dict(self, get_value=None): """Dump counters as a dict""" self.trim() result = {} for key, value in iteritems(self): if isinstance(value, BaseCounter): if get_value is not None: value = getattr(value, get_value) result[key] = value else: result[key] = value.to_dict(get_value) return result def dump(self, filename): """Dump counters to file""" try: with open(filename, 'wb') as fp: cPickle.dump(self.counters, fp) except: logging.error("can't dump counter to file: %s" % filename) return False return True def load(self, filename): """Load counters to file""" try: with open(filename) as fp: self.counters = cPickle.load(fp) except: logging.debug("can't load counter from file: %s" % filename) return False return True
27.341108
92
0.579335
from __future__ import unicode_literals, division, absolute_import import time import logging from collections import deque try: from UserDict import DictMixin except ImportError: from collections import Mapping as DictMixin import six from six import iteritems from six.moves import cPickle class BaseCounter(object): def __init__(self): raise NotImplementedError def event(self, value=1): raise NotImplementedError def value(self, value): raise NotImplementedError @property def avg(self): raise NotImplementedError @property def sum(self): raise NotImplementedError def empty(self): raise NotImplementedError class TotalCounter(BaseCounter): def __init__(self): self.cnt = 0 def event(self, value=1): self.cnt += value def value(self, value): self.cnt = value @property def avg(self): return self.cnt @property def sum(self): return self.cnt def empty(self): return self.cnt == 0 class AverageWindowCounter(BaseCounter): def __init__(self, window_size=300): self.window_size = window_size self.values = deque(maxlen=window_size) def event(self, value=1): self.values.append(value) value = event @property def avg(self): return self.sum / len(self.values) @property def sum(self): return sum(self.values) def empty(self): if not self.values: return True class TimebaseAverageWindowCounter(BaseCounter): def __init__(self, window_size=30, window_interval=10): self.max_window_size = window_size self.window_size = 0 self.window_interval = window_interval self.values = deque(maxlen=window_size) self.times = deque(maxlen=window_size) self.cache_value = 0 self.cache_start = None self._first_data_time = None def event(self, value=1): now = time.time() if self._first_data_time is None: self._first_data_time = now if self.cache_start is None: self.cache_value = value self.cache_start = now elif now - self.cache_start > self.window_interval: self.values.append(self.cache_value) self.times.append(self.cache_start) self.on_append(self.cache_value, self.cache_start) self.cache_value = value self.cache_start = now else: self.cache_value += value return self def value(self, value): self.cache_value = value def _trim_window(self): now = time.time() if self.cache_start and now - self.cache_start > self.window_interval: self.values.append(self.cache_value) self.times.append(self.cache_start) self.on_append(self.cache_value, self.cache_start) self.cache_value = 0 self.cache_start = None if self.window_size != self.max_window_size and self._first_data_time is not None: time_passed = now - self._first_data_time self.window_size = min(self.max_window_size, time_passed / self.window_interval) window_limit = now - self.window_size * self.window_interval while self.times and self.times[0] < window_limit: self.times.popleft() self.values.popleft() @property def avg(self): sum = float(self.sum) if not self.window_size: return 0 return sum / self.window_size / self.window_interval @property def sum(self): self._trim_window() return sum(self.values) + self.cache_value def empty(self): self._trim_window() if not self.values and not self.cache_start: return True def on_append(self, value, time): pass class CounterValue(DictMixin): def __init__(self, manager, keys): self.manager = manager self._keys = keys def __getitem__(self, key): if key == '__value__': key = self._keys return self.manager.counters[key] else: key = self._keys + (key, ) available_keys = [] for _key in self.manager.counters: if _key[:len(key)] == key: available_keys.append(_key) if len(available_keys) == 0: raise KeyError elif len(available_keys) == 1: if available_keys[0] == key: return self.manager.counters[key] else: return CounterValue(self.manager, key) else: return CounterValue(self.manager, key) def __len__(self): return len(self.keys()) def __iter__(self): return iter(self.keys()) def __contains__(self, key): return key in self.keys() def keys(self): result = set() for key in self.manager.counters: if key[:len(self._keys)] == self._keys: key = key[len(self._keys):] result.add(key[0] if key else '__value__') return result def to_dict(self, get_value=None): result = {} for key, value in iteritems(self): if isinstance(value, BaseCounter): if get_value is not None: value = getattr(value, get_value) result[key] = value else: result[key] = value.to_dict(get_value) return result class CounterManager(DictMixin): def __init__(self, cls=TimebaseAverageWindowCounter): self.cls = cls self.counters = {} def event(self, key, value=1): if isinstance(key, six.string_types): key = (key, ) assert isinstance(key, tuple), "event key type error" if key not in self.counters: self.counters[key] = self.cls() self.counters[key].event(value) return self def value(self, key, value=1): if isinstance(key, six.string_types): key = (key, ) assert isinstance(key, tuple), "event key type error" if key not in self.counters: self.counters[key] = self.cls() self.counters[key].value(value) return self def trim(self): for key, value in list(iteritems(self.counters)): if value.empty(): del self.counters[key] def __getitem__(self, key): key = (key, ) available_keys = [] for _key in self.counters: if _key[:len(key)] == key: available_keys.append(_key) if len(available_keys) == 0: raise KeyError elif len(available_keys) == 1: if available_keys[0] == key: return self.counters[key] else: return CounterValue(self, key) else: return CounterValue(self, key) def __iter__(self): return iter(self.keys()) def __len__(self): return len(self.keys()) def keys(self): result = set() for key in self.counters: result.add(key[0] if key else ()) return result def to_dict(self, get_value=None): self.trim() result = {} for key, value in iteritems(self): if isinstance(value, BaseCounter): if get_value is not None: value = getattr(value, get_value) result[key] = value else: result[key] = value.to_dict(get_value) return result def dump(self, filename): try: with open(filename, 'wb') as fp: cPickle.dump(self.counters, fp) except: logging.error("can't dump counter to file: %s" % filename) return False return True def load(self, filename): try: with open(filename) as fp: self.counters = cPickle.load(fp) except: logging.debug("can't load counter from file: %s" % filename) return False return True
true
true
f700088372c0eeaff049211c5fe92cdccb5fa804
6,706
py
Python
src/transformers/models/vit/feature_extraction_vit.py
djroxx2000/transformers
77770ec79883343d32051cfb6a04f64523cd8df1
[ "Apache-2.0" ]
723
2020-07-16T13:02:25.000Z
2022-03-31T21:03:55.000Z
src/transformers/models/vit/feature_extraction_vit.py
4nalog/transformers
76cadb7943c8492ec481f4f3925e9e8793a32c9d
[ "Apache-2.0" ]
170
2020-07-16T14:39:11.000Z
2022-03-31T13:02:11.000Z
src/transformers/models/vit/feature_extraction_vit.py
4nalog/transformers
76cadb7943c8492ec481f4f3925e9e8793a32c9d
[ "Apache-2.0" ]
131
2020-07-16T14:38:16.000Z
2022-03-29T19:43:18.000Z
# coding=utf-8 # Copyright 2021 The HuggingFace Inc. team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Feature extractor class for ViT.""" from typing import List, Optional, Union import numpy as np from PIL import Image from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin from ...file_utils import TensorType from ...image_utils import IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ImageFeatureExtractionMixin, is_torch_tensor from ...utils import logging logger = logging.get_logger(__name__) class ViTFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin): r""" Constructs a ViT feature extractor. This feature extractor inherits from :class:`~transformers.FeatureExtractionMixin` which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: do_resize (:obj:`bool`, `optional`, defaults to :obj:`True`): Whether to resize the input to a certain :obj:`size`. size (:obj:`int` or :obj:`Tuple(int)`, `optional`, defaults to 224): Resize the input to the given size. If a tuple is provided, it should be (width, height). If only an integer is provided, then the input will be resized to (size, size). Only has an effect if :obj:`do_resize` is set to :obj:`True`. resample (:obj:`int`, `optional`, defaults to :obj:`PIL.Image.BILINEAR`): An optional resampling filter. This can be one of :obj:`PIL.Image.NEAREST`, :obj:`PIL.Image.BOX`, :obj:`PIL.Image.BILINEAR`, :obj:`PIL.Image.HAMMING`, :obj:`PIL.Image.BICUBIC` or :obj:`PIL.Image.LANCZOS`. Only has an effect if :obj:`do_resize` is set to :obj:`True`. do_normalize (:obj:`bool`, `optional`, defaults to :obj:`True`): Whether or not to normalize the input with mean and standard deviation. image_mean (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`): The sequence of means for each channel, to be used when normalizing images. image_std (:obj:`List[int]`, defaults to :obj:`[0.5, 0.5, 0.5]`): The sequence of standard deviations for each channel, to be used when normalizing images. """ model_input_names = ["pixel_values"] def __init__( self, do_resize=True, size=224, resample=Image.BILINEAR, do_normalize=True, image_mean=None, image_std=None, **kwargs ): super().__init__(**kwargs) self.do_resize = do_resize self.size = size self.resample = resample self.do_normalize = do_normalize self.image_mean = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN self.image_std = image_std if image_std is not None else IMAGENET_STANDARD_STD def __call__( self, images: Union[ Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"] # noqa ], return_tensors: Optional[Union[str, TensorType]] = None, **kwargs ) -> BatchFeature: """ Main method to prepare for the model one or several image(s). .. warning:: NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass PIL images. Args: images (:obj:`PIL.Image.Image`, :obj:`np.ndarray`, :obj:`torch.Tensor`, :obj:`List[PIL.Image.Image]`, :obj:`List[np.ndarray]`, :obj:`List[torch.Tensor]`): The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a number of channels, H and W are image height and width. return_tensors (:obj:`str` or :class:`~transformers.file_utils.TensorType`, `optional`, defaults to :obj:`'np'`): If set, will return tensors of a particular framework. Acceptable values are: * :obj:`'tf'`: Return TensorFlow :obj:`tf.constant` objects. * :obj:`'pt'`: Return PyTorch :obj:`torch.Tensor` objects. * :obj:`'np'`: Return NumPy :obj:`np.ndarray` objects. * :obj:`'jax'`: Return JAX :obj:`jnp.ndarray` objects. Returns: :class:`~transformers.BatchFeature`: A :class:`~transformers.BatchFeature` with the following fields: - **pixel_values** -- Pixel values to be fed to a model, of shape (batch_size, num_channels, height, width). """ # Input type checking for clearer error valid_images = False # Check that images has a valid type if isinstance(images, (Image.Image, np.ndarray)) or is_torch_tensor(images): valid_images = True elif isinstance(images, (list, tuple)): if len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]): valid_images = True if not valid_images: raise ValueError( "Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example)," "`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)." ) is_batched = bool( isinstance(images, (list, tuple)) and (isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0])) ) if not is_batched: images = [images] # transformations (resizing + normalization) if self.do_resize and self.size is not None: images = [self.resize(image=image, size=self.size, resample=self.resample) for image in images] if self.do_normalize: images = [self.normalize(image=image, mean=self.image_mean, std=self.image_std) for image in images] # return as BatchFeature data = {"pixel_values": images} encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors) return encoded_inputs
45.310811
166
0.646287
from typing import List, Optional, Union import numpy as np from PIL import Image from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin from ...file_utils import TensorType from ...image_utils import IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ImageFeatureExtractionMixin, is_torch_tensor from ...utils import logging logger = logging.get_logger(__name__) class ViTFeatureExtractor(FeatureExtractionMixin, ImageFeatureExtractionMixin): model_input_names = ["pixel_values"] def __init__( self, do_resize=True, size=224, resample=Image.BILINEAR, do_normalize=True, image_mean=None, image_std=None, **kwargs ): super().__init__(**kwargs) self.do_resize = do_resize self.size = size self.resample = resample self.do_normalize = do_normalize self.image_mean = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN self.image_std = image_std if image_std is not None else IMAGENET_STANDARD_STD def __call__( self, images: Union[ Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"] ], return_tensors: Optional[Union[str, TensorType]] = None, **kwargs ) -> BatchFeature: valid_images = False if isinstance(images, (Image.Image, np.ndarray)) or is_torch_tensor(images): valid_images = True elif isinstance(images, (list, tuple)): if len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]): valid_images = True if not valid_images: raise ValueError( "Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example)," "`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)." ) is_batched = bool( isinstance(images, (list, tuple)) and (isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0])) ) if not is_batched: images = [images] if self.do_resize and self.size is not None: images = [self.resize(image=image, size=self.size, resample=self.resample) for image in images] if self.do_normalize: images = [self.normalize(image=image, mean=self.image_mean, std=self.image_std) for image in images] data = {"pixel_values": images} encoded_inputs = BatchFeature(data=data, tensor_type=return_tensors) return encoded_inputs
true
true
f700096cbce5db1538215892bb1dcc76b6c37987
734
py
Python
hier/project-euler/euler-067-hackerrank/euler067.py
EliahKagan/old-practice-snapshot
1b53897eac6902f8d867c8f154ce2a489abb8133
[ "0BSD" ]
null
null
null
hier/project-euler/euler-067-hackerrank/euler067.py
EliahKagan/old-practice-snapshot
1b53897eac6902f8d867c8f154ce2a489abb8133
[ "0BSD" ]
null
null
null
hier/project-euler/euler-067-hackerrank/euler067.py
EliahKagan/old-practice-snapshot
1b53897eac6902f8d867c8f154ce2a489abb8133
[ "0BSD" ]
null
null
null
#!/usr/bin/env python3 UNKNOWN = -1 def read_val(): return int(input()) def read_row(): return list(map(int, input().split())) def read_grid(): return [read_row() for _ in range(read_val())] def make_blank_row(i): return [UNKNOWN] * i def make_blank_grid(n): return [make_blank_row(i) for i in range(1, n + 1)] def compute_max_path_sum(grid): memo = make_blank_grid(len(grid)) def dfs(i, j): if i == len(grid): return 0 if memo[i][j] == UNKNOWN: memo[i][j] = grid[i][j] + max(dfs(i + 1, j), dfs(i + 1, j + 1)) return memo[i][j] return dfs(0, 0) for t in range(read_val()): print(compute_max_path_sum(read_grid()))
20.388889
75
0.564033
UNKNOWN = -1 def read_val(): return int(input()) def read_row(): return list(map(int, input().split())) def read_grid(): return [read_row() for _ in range(read_val())] def make_blank_row(i): return [UNKNOWN] * i def make_blank_grid(n): return [make_blank_row(i) for i in range(1, n + 1)] def compute_max_path_sum(grid): memo = make_blank_grid(len(grid)) def dfs(i, j): if i == len(grid): return 0 if memo[i][j] == UNKNOWN: memo[i][j] = grid[i][j] + max(dfs(i + 1, j), dfs(i + 1, j + 1)) return memo[i][j] return dfs(0, 0) for t in range(read_val()): print(compute_max_path_sum(read_grid()))
true
true
f7000a85ea7a735edb59575f149fc6ff7ce4b461
1,866
py
Python
tools/apps/find_blender.py
SeijiEmery/unity_tools
cb401e6979b95c081a2ab3f944fc6e4419ccfd0e
[ "MIT" ]
null
null
null
tools/apps/find_blender.py
SeijiEmery/unity_tools
cb401e6979b95c081a2ab3f944fc6e4419ccfd0e
[ "MIT" ]
null
null
null
tools/apps/find_blender.py
SeijiEmery/unity_tools
cb401e6979b95c081a2ab3f944fc6e4419ccfd0e
[ "MIT" ]
null
null
null
import platform # print(platform.system()) operating_system = platform.system().lower() if operating_system == 'darwin': from .blender_utils_macos import get_installed_blender_versions operating_system_name = 'macos' elif operating_system == 'linux': from .blender_utils_linux import get_installed_blender_versions operating_system_name = 'linux' elif operating_system == 'windows': from .blender_utils_windows import get_installed_blender_versions operating_system_name = 'windows' else: raise Exception("Unimplemented for OS {}".format(operating_system)) from .blender_utils_web import get_blender_version_download_links def find_blender(version): # TODO: add fuzzy version matching, ie. '>=2.80', '~2.80', '<2.80', etc. installed_versions = get_installed_blender_versions() if version in installed_versions: return installed_versions[version] else: print("blender version '{}' not found; found {} version(s):".format(version, len(installed_versions))) for v, path in installed_versions.items(): print(" {}: {}".format(v, path)) print("searching web archive...") versions = get_blender_version_download_links(version, operating_system_name) print("found {} download(s) for blender version '{}', platform '{}':".format(len(versions), version, operating_system_name)) for url in versions: print(" {}".format(url)) if __name__ == '__main__': for version, exec_path in get_installed_blender_versions().items(): print("found blender {version}: {path}".format(version=version, path=exec_path)) blender = find_blender('2.80') if blender: print("Found blender: '{}'".format(blender)) else: print("No matching blender version installed :(")
40.565217
132
0.681136
import platform operating_system = platform.system().lower() if operating_system == 'darwin': from .blender_utils_macos import get_installed_blender_versions operating_system_name = 'macos' elif operating_system == 'linux': from .blender_utils_linux import get_installed_blender_versions operating_system_name = 'linux' elif operating_system == 'windows': from .blender_utils_windows import get_installed_blender_versions operating_system_name = 'windows' else: raise Exception("Unimplemented for OS {}".format(operating_system)) from .blender_utils_web import get_blender_version_download_links def find_blender(version): installed_versions = get_installed_blender_versions() if version in installed_versions: return installed_versions[version] else: print("blender version '{}' not found; found {} version(s):".format(version, len(installed_versions))) for v, path in installed_versions.items(): print(" {}: {}".format(v, path)) print("searching web archive...") versions = get_blender_version_download_links(version, operating_system_name) print("found {} download(s) for blender version '{}', platform '{}':".format(len(versions), version, operating_system_name)) for url in versions: print(" {}".format(url)) if __name__ == '__main__': for version, exec_path in get_installed_blender_versions().items(): print("found blender {version}: {path}".format(version=version, path=exec_path)) blender = find_blender('2.80') if blender: print("Found blender: '{}'".format(blender)) else: print("No matching blender version installed :(")
true
true
f7000b2945cb3703ec7fbc7ccf8cd64d39f12e81
8,196
py
Python
codes/data/image_corruptor.py
neonbjb/DL-Art-School
a6f0f854b987ac724e258af8b042ea4459a571bc
[ "Apache-2.0" ]
12
2020-12-13T12:45:03.000Z
2022-03-29T09:58:15.000Z
codes/data/image_corruptor.py
neonbjb/DL-Art-School
a6f0f854b987ac724e258af8b042ea4459a571bc
[ "Apache-2.0" ]
1
2020-12-31T01:12:45.000Z
2021-03-31T11:43:52.000Z
codes/data/image_corruptor.py
neonbjb/DL-Art-School
a6f0f854b987ac724e258af8b042ea4459a571bc
[ "Apache-2.0" ]
3
2020-12-14T06:04:04.000Z
2020-12-26T19:11:41.000Z
import functools import random from math import cos, pi import cv2 import kornia import numpy as np import torch from kornia.augmentation import ColorJitter from data.util import read_img from PIL import Image from io import BytesIO # Get a rough visualization of the above distribution. (Y-axis is meaningless, just spreads data) from utils.util import opt_get ''' if __name__ == '__main__': import numpy as np import matplotlib.pyplot as plt data = np.asarray([get_rand() for _ in range(5000)]) plt.plot(data, np.random.uniform(size=(5000,)), 'x') plt.show() ''' def kornia_color_jitter_numpy(img, setting): if setting * 255 > 1: # I'm using Kornia's ColorJitter, which requires pytorch arrays in b,c,h,w format. img = torch.from_numpy(img).permute(2,0,1).unsqueeze(0) img = ColorJitter(setting, setting, setting, setting)(img) img = img.squeeze(0).permute(1,2,0).numpy() return img # Performs image corruption on a list of images from a configurable set of corruption # options. class ImageCorruptor: def __init__(self, opt): self.opt = opt self.reset_random() self.blur_scale = opt['corruption_blur_scale'] if 'corruption_blur_scale' in opt.keys() else 1 self.fixed_corruptions = opt['fixed_corruptions'] if 'fixed_corruptions' in opt.keys() else [] self.num_corrupts = opt['num_corrupts_per_image'] if 'num_corrupts_per_image' in opt.keys() else 0 self.cosine_bias = opt_get(opt, ['cosine_bias'], True) if self.num_corrupts == 0: return else: self.random_corruptions = opt['random_corruptions'] if 'random_corruptions' in opt.keys() else [] def reset_random(self): if 'random_seed' in self.opt.keys(): self.rand = random.Random(self.opt['random_seed']) else: self.rand = random.Random() # Feeds a random uniform through a cosine distribution to slightly bias corruptions towards "uncorrupted". # Return is on [0,1] with a bias towards 0. def get_rand(self): r = self.rand.random() if self.cosine_bias: return 1 - cos(r * pi / 2) else: return r def corrupt_images(self, imgs, return_entropy=False): if self.num_corrupts == 0 and not self.fixed_corruptions: if return_entropy: return imgs, [] else: return imgs if self.num_corrupts == 0: augmentations = [] else: augmentations = random.choices(self.random_corruptions, k=self.num_corrupts) # Sources of entropy corrupted_imgs = [] entropy = [] undo_fns = [] applied_augs = augmentations + self.fixed_corruptions for img in imgs: for aug in augmentations: r = self.get_rand() img, undo_fn = self.apply_corruption(img, aug, r, applied_augs) if undo_fn is not None: undo_fns.append(undo_fn) for aug in self.fixed_corruptions: r = self.get_rand() img, undo_fn = self.apply_corruption(img, aug, r, applied_augs) entropy.append(r) if undo_fn is not None: undo_fns.append(undo_fn) # Apply undo_fns after all corruptions are finished, in same order. for ufn in undo_fns: img = ufn(img) corrupted_imgs.append(img) if return_entropy: return corrupted_imgs, entropy else: return corrupted_imgs def apply_corruption(self, img, aug, rand_val, applied_augmentations): undo_fn = None if 'color_quantization' in aug: # Color quantization quant_div = 2 ** (int(rand_val * 10 / 3) + 2) img = img * 255 img = (img // quant_div) * quant_div img = img / 255 elif 'color_jitter' in aug: lo_end = 0 hi_end = .2 setting = rand_val * (hi_end - lo_end) + lo_end img = kornia_color_jitter_numpy(img, setting) elif 'gaussian_blur' in aug: img = cv2.GaussianBlur(img, (0,0), self.blur_scale*rand_val*1.5) elif 'motion_blur' in aug: # Motion blur intensity = self.blur_scale*rand_val * 3 + 1 angle = random.randint(0,360) k = np.zeros((intensity, intensity), dtype=np.float32) k[(intensity - 1) // 2, :] = np.ones(intensity, dtype=np.float32) k = cv2.warpAffine(k, cv2.getRotationMatrix2D((intensity / 2 - 0.5, intensity / 2 - 0.5), angle, 1.0), (intensity, intensity)) k = k * (1.0 / np.sum(k)) img = cv2.filter2D(img, -1, k) elif 'block_noise' in aug: # Large distortion blocks in part of an img, such as is used to mask out a face. pass elif 'lq_resampling' in aug: # Random mode interpolation HR->LR->HR if 'lq_resampling4x' == aug: scale = 4 else: if rand_val < .3: scale = 1 elif rand_val < .7: scale = 2 else: scale = 4 if scale > 1: interpolation_modes = [cv2.INTER_NEAREST, cv2.INTER_CUBIC, cv2.INTER_LINEAR, cv2.INTER_LANCZOS4] mode = random.randint(0,4) % len(interpolation_modes) # Downsample first, then upsample using the random mode. img = cv2.resize(img, dsize=(img.shape[1]//scale, img.shape[0]//scale), interpolation=mode) def lq_resampling_undo_fn(scale, img): return cv2.resize(img, dsize=(img.shape[1]*scale, img.shape[0]*scale), interpolation=cv2.INTER_LINEAR) undo_fn = functools.partial(lq_resampling_undo_fn, scale) elif 'color_shift' in aug: # Color shift pass elif 'interlacing' in aug: # Interlacing distortion pass elif 'chromatic_aberration' in aug: # Chromatic aberration pass elif 'noise' in aug: # Random noise if 'noise-5' == aug: noise_intensity = 5 / 255.0 else: noise_intensity = (rand_val*6) / 255.0 img += np.random.rand(*img.shape) * noise_intensity elif 'jpeg' in aug: if 'noise' not in applied_augmentations and 'noise-5' not in applied_augmentations: if aug == 'jpeg': lo=10 range=20 elif aug == 'jpeg-low': lo=15 range=10 elif aug == 'jpeg-medium': lo=23 range=25 elif aug == 'jpeg-broad': lo=15 range=60 elif aug == 'jpeg-normal': lo=47 range=35 else: raise NotImplementedError("specified jpeg corruption doesn't exist") # JPEG compression qf = (int((1-rand_val)*range) + lo) # Use PIL to perform a mock compression to a data buffer, then swap back to cv2. img = (img * 255).astype(np.uint8) img = Image.fromarray(img) buffer = BytesIO() img.save(buffer, "JPEG", quality=qf, optimize=True) buffer.seek(0) jpeg_img_bytes = np.asarray(bytearray(buffer.read()), dtype="uint8") img = read_img("buffer", jpeg_img_bytes, rgb=True) elif 'saturation' in aug: # Lightening / saturation saturation = rand_val * .3 img = np.clip(img + saturation, a_max=1, a_min=0) elif 'greyscale' in aug: img = np.tile(np.mean(img, axis=2, keepdims=True), [1,1,3]) elif 'none' not in aug: raise NotImplementedError("Augmentation doesn't exist") return img, undo_fn
39.028571
122
0.554173
import functools import random from math import cos, pi import cv2 import kornia import numpy as np import torch from kornia.augmentation import ColorJitter from data.util import read_img from PIL import Image from io import BytesIO from utils.util import opt_get def kornia_color_jitter_numpy(img, setting): if setting * 255 > 1: img = torch.from_numpy(img).permute(2,0,1).unsqueeze(0) img = ColorJitter(setting, setting, setting, setting)(img) img = img.squeeze(0).permute(1,2,0).numpy() return img class ImageCorruptor: def __init__(self, opt): self.opt = opt self.reset_random() self.blur_scale = opt['corruption_blur_scale'] if 'corruption_blur_scale' in opt.keys() else 1 self.fixed_corruptions = opt['fixed_corruptions'] if 'fixed_corruptions' in opt.keys() else [] self.num_corrupts = opt['num_corrupts_per_image'] if 'num_corrupts_per_image' in opt.keys() else 0 self.cosine_bias = opt_get(opt, ['cosine_bias'], True) if self.num_corrupts == 0: return else: self.random_corruptions = opt['random_corruptions'] if 'random_corruptions' in opt.keys() else [] def reset_random(self): if 'random_seed' in self.opt.keys(): self.rand = random.Random(self.opt['random_seed']) else: self.rand = random.Random() def get_rand(self): r = self.rand.random() if self.cosine_bias: return 1 - cos(r * pi / 2) else: return r def corrupt_images(self, imgs, return_entropy=False): if self.num_corrupts == 0 and not self.fixed_corruptions: if return_entropy: return imgs, [] else: return imgs if self.num_corrupts == 0: augmentations = [] else: augmentations = random.choices(self.random_corruptions, k=self.num_corrupts) corrupted_imgs = [] entropy = [] undo_fns = [] applied_augs = augmentations + self.fixed_corruptions for img in imgs: for aug in augmentations: r = self.get_rand() img, undo_fn = self.apply_corruption(img, aug, r, applied_augs) if undo_fn is not None: undo_fns.append(undo_fn) for aug in self.fixed_corruptions: r = self.get_rand() img, undo_fn = self.apply_corruption(img, aug, r, applied_augs) entropy.append(r) if undo_fn is not None: undo_fns.append(undo_fn) for ufn in undo_fns: img = ufn(img) corrupted_imgs.append(img) if return_entropy: return corrupted_imgs, entropy else: return corrupted_imgs def apply_corruption(self, img, aug, rand_val, applied_augmentations): undo_fn = None if 'color_quantization' in aug: quant_div = 2 ** (int(rand_val * 10 / 3) + 2) img = img * 255 img = (img // quant_div) * quant_div img = img / 255 elif 'color_jitter' in aug: lo_end = 0 hi_end = .2 setting = rand_val * (hi_end - lo_end) + lo_end img = kornia_color_jitter_numpy(img, setting) elif 'gaussian_blur' in aug: img = cv2.GaussianBlur(img, (0,0), self.blur_scale*rand_val*1.5) elif 'motion_blur' in aug: intensity = self.blur_scale*rand_val * 3 + 1 angle = random.randint(0,360) k = np.zeros((intensity, intensity), dtype=np.float32) k[(intensity - 1) // 2, :] = np.ones(intensity, dtype=np.float32) k = cv2.warpAffine(k, cv2.getRotationMatrix2D((intensity / 2 - 0.5, intensity / 2 - 0.5), angle, 1.0), (intensity, intensity)) k = k * (1.0 / np.sum(k)) img = cv2.filter2D(img, -1, k) elif 'block_noise' in aug: pass elif 'lq_resampling' in aug: if 'lq_resampling4x' == aug: scale = 4 else: if rand_val < .3: scale = 1 elif rand_val < .7: scale = 2 else: scale = 4 if scale > 1: interpolation_modes = [cv2.INTER_NEAREST, cv2.INTER_CUBIC, cv2.INTER_LINEAR, cv2.INTER_LANCZOS4] mode = random.randint(0,4) % len(interpolation_modes) img = cv2.resize(img, dsize=(img.shape[1]//scale, img.shape[0]//scale), interpolation=mode) def lq_resampling_undo_fn(scale, img): return cv2.resize(img, dsize=(img.shape[1]*scale, img.shape[0]*scale), interpolation=cv2.INTER_LINEAR) undo_fn = functools.partial(lq_resampling_undo_fn, scale) elif 'color_shift' in aug: pass elif 'interlacing' in aug: pass elif 'chromatic_aberration' in aug: pass elif 'noise' in aug: if 'noise-5' == aug: noise_intensity = 5 / 255.0 else: noise_intensity = (rand_val*6) / 255.0 img += np.random.rand(*img.shape) * noise_intensity elif 'jpeg' in aug: if 'noise' not in applied_augmentations and 'noise-5' not in applied_augmentations: if aug == 'jpeg': lo=10 range=20 elif aug == 'jpeg-low': lo=15 range=10 elif aug == 'jpeg-medium': lo=23 range=25 elif aug == 'jpeg-broad': lo=15 range=60 elif aug == 'jpeg-normal': lo=47 range=35 else: raise NotImplementedError("specified jpeg corruption doesn't exist") # JPEG compression qf = (int((1-rand_val)*range) + lo) # Use PIL to perform a mock compression to a data buffer, then swap back to cv2. img = (img * 255).astype(np.uint8) img = Image.fromarray(img) buffer = BytesIO() img.save(buffer, "JPEG", quality=qf, optimize=True) buffer.seek(0) jpeg_img_bytes = np.asarray(bytearray(buffer.read()), dtype="uint8") img = read_img("buffer", jpeg_img_bytes, rgb=True) elif 'saturation' in aug: # Lightening / saturation saturation = rand_val * .3 img = np.clip(img + saturation, a_max=1, a_min=0) elif 'greyscale' in aug: img = np.tile(np.mean(img, axis=2, keepdims=True), [1,1,3]) elif 'none' not in aug: raise NotImplementedError("Augmentation doesn't exist") return img, undo_fn
true
true
f7000b6751e6ca87c8cdd1ca6b7921d866ec80c7
159
py
Python
tests/basics/bytes_format_modulo.py
geowor01/micropython
7fb13eeef4a85f21cae36f1d502bcc53880e1815
[ "MIT" ]
7
2019-10-18T13:41:39.000Z
2022-03-15T17:27:57.000Z
tests/basics/bytes_format_modulo.py
geowor01/micropython
7fb13eeef4a85f21cae36f1d502bcc53880e1815
[ "MIT" ]
null
null
null
tests/basics/bytes_format_modulo.py
geowor01/micropython
7fb13eeef4a85f21cae36f1d502bcc53880e1815
[ "MIT" ]
2
2020-06-23T09:10:15.000Z
2020-12-22T06:42:14.000Z
# This test requires CPython3.5 print(b"%%" % ()) print(b"=%d=" % 1) print(b"=%d=%d=" % (1, 2)) print(b"=%s=" % b"str") print(b"=%r=" % b"str") print("PASS")
17.666667
31
0.503145
print(b"%%" % ()) print(b"=%d=" % 1) print(b"=%d=%d=" % (1, 2)) print(b"=%s=" % b"str") print(b"=%r=" % b"str") print("PASS")
true
true
f7000bbc055be36dc38b5ab214bad87b6d24f064
2,102
py
Python
tests/test_JpegCompression.py
tt195361/TfDataAugmentation
0deb987ae5a37816d88eec302bc42db7479ea8df
[ "MIT" ]
null
null
null
tests/test_JpegCompression.py
tt195361/TfDataAugmentation
0deb987ae5a37816d88eec302bc42db7479ea8df
[ "MIT" ]
null
null
null
tests/test_JpegCompression.py
tt195361/TfDataAugmentation
0deb987ae5a37816d88eec302bc42db7479ea8df
[ "MIT" ]
null
null
null
# # test_JpegCompression.py # import pytest import albumentations as A from .context import TfDataAugmentation as Tfda from . import test_utils from .test_utils import TestResult @pytest.mark.parametrize( "quality_lower, quality_upper, expected, message", [ # quality_lower (-1, 100, TestResult.Error, "quality_lower < min => Error"), (0, 100, TestResult.OK, "quality_lower == min => OK"), (100, 100, TestResult.OK, "quality_lower == max => OK"), (101, 100, TestResult.Error, "quality_lower >= max => Error"), # quality_upper (0, -1, TestResult.Error, "quality_upper < min => Error"), (0, 0, TestResult.OK, "quality_upper == min => OK"), (0, 100, TestResult.OK, "quality_upper == max => OK"), (0, 101, TestResult.Error, "quality_upper > max => Error"), # Relation (50, 50, TestResult.OK, "quality_lower == quality_upper => OK"), (51, 50, TestResult.Error, "quality_lower > quality_upper => Error"), ]) def test_hue_shift_limit_value( quality_lower, quality_upper, expected, message): try: Tfda.JpegCompression( quality_lower=quality_lower, quality_upper=quality_upper) actual = TestResult.OK except ValueError: actual = TestResult.Error assert expected == actual, message def test_call(): quality_lower = 50 quality_upper = 100 tgt_jpeg = Tfda.JpegCompression( quality_lower=quality_lower, quality_upper=quality_upper, p=1.0) tgt_transform = \ test_utils.make_tgt_transform(tgt_jpeg) image = test_utils.make_test_image() tgt_result = tgt_transform(image=image) actual_image = tgt_result['image'] image_np = image.numpy() quality = float(tgt_jpeg.get_param('quality')) expected_image = A.image_compression( image_np, quality, image_type='.jpg') test_utils.partial_assert_array( expected_image, actual_image, 0.6, "image", eps=0.1)
28.794521
60
0.621313
import pytest import albumentations as A from .context import TfDataAugmentation as Tfda from . import test_utils from .test_utils import TestResult @pytest.mark.parametrize( "quality_lower, quality_upper, expected, message", [ (-1, 100, TestResult.Error, "quality_lower < min => Error"), (0, 100, TestResult.OK, "quality_lower == min => OK"), (100, 100, TestResult.OK, "quality_lower == max => OK"), (101, 100, TestResult.Error, "quality_lower >= max => Error"), (0, -1, TestResult.Error, "quality_upper < min => Error"), (0, 0, TestResult.OK, "quality_upper == min => OK"), (0, 100, TestResult.OK, "quality_upper == max => OK"), (0, 101, TestResult.Error, "quality_upper > max => Error"), (50, 50, TestResult.OK, "quality_lower == quality_upper => OK"), (51, 50, TestResult.Error, "quality_lower > quality_upper => Error"), ]) def test_hue_shift_limit_value( quality_lower, quality_upper, expected, message): try: Tfda.JpegCompression( quality_lower=quality_lower, quality_upper=quality_upper) actual = TestResult.OK except ValueError: actual = TestResult.Error assert expected == actual, message def test_call(): quality_lower = 50 quality_upper = 100 tgt_jpeg = Tfda.JpegCompression( quality_lower=quality_lower, quality_upper=quality_upper, p=1.0) tgt_transform = \ test_utils.make_tgt_transform(tgt_jpeg) image = test_utils.make_test_image() tgt_result = tgt_transform(image=image) actual_image = tgt_result['image'] image_np = image.numpy() quality = float(tgt_jpeg.get_param('quality')) expected_image = A.image_compression( image_np, quality, image_type='.jpg') test_utils.partial_assert_array( expected_image, actual_image, 0.6, "image", eps=0.1)
true
true
f7000bc963cc817a5a5dca6aba86f5ea6dde667e
3,008
py
Python
tests/test_background_swap.py
pclucas14/continuum
3b9b0fc3c2f21dcaeafbccfa29987cefe55f37a0
[ "MIT" ]
4
2020-04-15T14:31:42.000Z
2020-04-24T17:07:34.000Z
tests/test_background_swap.py
pclucas14/continuum
3b9b0fc3c2f21dcaeafbccfa29987cefe55f37a0
[ "MIT" ]
18
2020-04-15T14:57:27.000Z
2020-05-02T14:05:36.000Z
tests/test_background_swap.py
arthurdouillard/continual_loader
09034db1371e9646ca660fd4d4df73e61bf77067
[ "MIT" ]
1
2020-04-15T15:50:28.000Z
2020-04-15T15:50:28.000Z
import os from torch.utils.data import DataLoader from continuum.datasets import CIFAR10, InMemoryDataset from continuum.datasets import MNIST import torchvision from continuum.scenarios import TransformationIncremental import pytest import numpy as np from continuum.transforms.bg_swap import BackgroundSwap DATA_PATH = os.environ.get("CONTINUUM_DATA_PATH") # Uncomment for debugging via image output # import matplotlib.pyplot as plt def test_bg_swap_fast(): """ Fast test for background swap. """ bg_x = np.ones(shape=[2, 5, 5, 3]) * -1 bg_y = np.random.rand(2) fg = np.random.normal(loc=.5, scale=.1, size=[5, 5]) bg = InMemoryDataset(bg_x, bg_y) bg_swap = BackgroundSwap(bg, input_dim=(5, 5), normalize_bg=None) spliced_1_channel = bg_swap(fg)[:, :, 0] assert np.array_equal((spliced_1_channel <= -1), (fg <= .5)) @pytest.mark.slow def test_background_swap_numpy(): """ Test background swap on a single ndarray input. """ mnist = MNIST(DATA_PATH, download=True, train=True) cifar = CIFAR10(DATA_PATH, download=True, train=True) bg_swap = BackgroundSwap(cifar, input_dim=(28, 28)) im = mnist.get_data()[0][0] im = bg_swap(im) # Uncomment for debugging # plt.imshow(im, interpolation='nearest') # plt.show() @pytest.mark.slow def test_background_swap_torch(): """ Test background swap on a single tensor input. """ cifar = CIFAR10(DATA_PATH, download=True, train=True) mnist = torchvision.datasets.MNIST(DATA_PATH, train=True, download=True, transform=torchvision.transforms.Compose([ torchvision.transforms.ToTensor() ])) bg_swap = BackgroundSwap(cifar, input_dim=(28, 28)) im = mnist[0][0] im = bg_swap(im) # Uncomment for debugging # plt.imshow(im.permute(1, 2, 0), interpolation='nearest') # plt.show() @pytest.mark.slow def test_background_tranformation(): """ Example code using TransformationIncremental to create a setting with 3 tasks. """ cifar = CIFAR10(DATA_PATH, train=True) mnist = MNIST(DATA_PATH, download=False, train=True) nb_task = 3 list_trsf = [] for i in range(nb_task): list_trsf.append([torchvision.transforms.ToTensor(), BackgroundSwap(cifar, bg_label=i, input_dim=(28, 28)), torchvision.transforms.ToPILImage()]) scenario = TransformationIncremental(mnist, base_transformations=[torchvision.transforms.ToTensor()], incremental_transformations=list_trsf) folder = "tests/samples/background_trsf/" if not os.path.exists(folder): os.makedirs(folder) for task_id, task_data in enumerate(scenario): task_data.plot(path=folder, title=f"background_{task_id}.jpg", nb_samples=100, shape=[28, 28, 3]) loader = DataLoader(task_data) _, _, _ = next(iter(loader))
31.010309
115
0.657247
import os from torch.utils.data import DataLoader from continuum.datasets import CIFAR10, InMemoryDataset from continuum.datasets import MNIST import torchvision from continuum.scenarios import TransformationIncremental import pytest import numpy as np from continuum.transforms.bg_swap import BackgroundSwap DATA_PATH = os.environ.get("CONTINUUM_DATA_PATH") def test_bg_swap_fast(): bg_x = np.ones(shape=[2, 5, 5, 3]) * -1 bg_y = np.random.rand(2) fg = np.random.normal(loc=.5, scale=.1, size=[5, 5]) bg = InMemoryDataset(bg_x, bg_y) bg_swap = BackgroundSwap(bg, input_dim=(5, 5), normalize_bg=None) spliced_1_channel = bg_swap(fg)[:, :, 0] assert np.array_equal((spliced_1_channel <= -1), (fg <= .5)) @pytest.mark.slow def test_background_swap_numpy(): mnist = MNIST(DATA_PATH, download=True, train=True) cifar = CIFAR10(DATA_PATH, download=True, train=True) bg_swap = BackgroundSwap(cifar, input_dim=(28, 28)) im = mnist.get_data()[0][0] im = bg_swap(im) @pytest.mark.slow def test_background_swap_torch(): cifar = CIFAR10(DATA_PATH, download=True, train=True) mnist = torchvision.datasets.MNIST(DATA_PATH, train=True, download=True, transform=torchvision.transforms.Compose([ torchvision.transforms.ToTensor() ])) bg_swap = BackgroundSwap(cifar, input_dim=(28, 28)) im = mnist[0][0] im = bg_swap(im) @pytest.mark.slow def test_background_tranformation(): cifar = CIFAR10(DATA_PATH, train=True) mnist = MNIST(DATA_PATH, download=False, train=True) nb_task = 3 list_trsf = [] for i in range(nb_task): list_trsf.append([torchvision.transforms.ToTensor(), BackgroundSwap(cifar, bg_label=i, input_dim=(28, 28)), torchvision.transforms.ToPILImage()]) scenario = TransformationIncremental(mnist, base_transformations=[torchvision.transforms.ToTensor()], incremental_transformations=list_trsf) folder = "tests/samples/background_trsf/" if not os.path.exists(folder): os.makedirs(folder) for task_id, task_data in enumerate(scenario): task_data.plot(path=folder, title=f"background_{task_id}.jpg", nb_samples=100, shape=[28, 28, 3]) loader = DataLoader(task_data) _, _, _ = next(iter(loader))
true
true
f7000c3468a0624d54db99fbbde0ac002173b532
2,025
py
Python
python/communitymanager/lib/const.py
OpenCIOC/communityrepo
63199a7b620f5c08624e534faf771e5dd2243adb
[ "Apache-2.0" ]
2
2016-01-25T14:40:44.000Z
2018-01-31T04:30:23.000Z
python/communitymanager/lib/const.py
OpenCIOC/communityrepo
63199a7b620f5c08624e534faf771e5dd2243adb
[ "Apache-2.0" ]
5
2018-02-07T20:16:49.000Z
2021-12-13T19:41:43.000Z
python/communitymanager/lib/const.py
OpenCIOC/communityrepo
63199a7b620f5c08624e534faf771e5dd2243adb
[ "Apache-2.0" ]
1
2018-02-07T20:37:52.000Z
2018-02-07T20:37:52.000Z
# ========================================================================================= # Copyright 2015 Community Information Online Consortium (CIOC) and KCL Software Solutions # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ========================================================================================= # std lib import os # jQuery and jQueryUI versions JQUERY_VERSION = "1.6.2" JQUERY_UI_VERSION = "1.8.16" # formatting constants DATE_TEXT_SIZE = 25 TEXT_SIZE = 85 TEXTAREA_COLS = 85 TEXTAREA_ROWS_SHORT = 2 TEXTAREA_ROWS_LONG = 4 TEXTAREA_ROWS_XLONG = 10 MAX_LENGTH_CHECKLIST_NOTES = 255 EMAIL_LENGTH = 60 # application running constants _app_path = None _config_file = None _app_name = None session_lock_dir = None publish_dir = None def update_cache_values(): # called from application init at startup global _app_path, _config_file, _app_name, session_lock_dir, publish_dir if _app_path is None: _app_path = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) _app_name = os.path.split(_app_path)[1] _config_file = os.path.join(_app_path, '..', '..', 'config', _app_name + '.ini') session_lock_dir = os.path.join(_app_path, 'python', 'session_lock') publish_dir = os.path.join(_app_path, 'python', 'published_files') try: os.makedirs(session_lock_dir) except os.error: pass try: os.makedirs(publish_dir) except os.error: pass
32.142857
95
0.640494
import os JQUERY_VERSION = "1.6.2" JQUERY_UI_VERSION = "1.8.16" DATE_TEXT_SIZE = 25 TEXT_SIZE = 85 TEXTAREA_COLS = 85 TEXTAREA_ROWS_SHORT = 2 TEXTAREA_ROWS_LONG = 4 TEXTAREA_ROWS_XLONG = 10 MAX_LENGTH_CHECKLIST_NOTES = 255 EMAIL_LENGTH = 60 _app_path = None _config_file = None _app_name = None session_lock_dir = None publish_dir = None def update_cache_values(): global _app_path, _config_file, _app_name, session_lock_dir, publish_dir if _app_path is None: _app_path = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..', '..')) _app_name = os.path.split(_app_path)[1] _config_file = os.path.join(_app_path, '..', '..', 'config', _app_name + '.ini') session_lock_dir = os.path.join(_app_path, 'python', 'session_lock') publish_dir = os.path.join(_app_path, 'python', 'published_files') try: os.makedirs(session_lock_dir) except os.error: pass try: os.makedirs(publish_dir) except os.error: pass
true
true
f7000d37df1b082b8f943334e45282014877347e
3,280
py
Python
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2015_08_01/aio/_configuration.py
rsdoherty/azure-sdk-for-python
6bba5326677468e6660845a703686327178bb7b1
[ "MIT" ]
2,728
2015-01-09T10:19:32.000Z
2022-03-31T14:50:33.000Z
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2015_08_01/aio/_configuration.py
rsdoherty/azure-sdk-for-python
6bba5326677468e6660845a703686327178bb7b1
[ "MIT" ]
17,773
2015-01-05T15:57:17.000Z
2022-03-31T23:50:25.000Z
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2015_08_01/aio/_configuration.py
rsdoherty/azure-sdk-for-python
6bba5326677468e6660845a703686327178bb7b1
[ "MIT" ]
1,916
2015-01-19T05:05:41.000Z
2022-03-31T19:36:44.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy from .._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class WebSiteManagementClientConfiguration(Configuration): """Configuration for WebSiteManagementClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: Your Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000). :type subscription_id: str """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") super(WebSiteManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id self.api_version = "2015-08-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-web/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs: Any ) -> None: self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
48.235294
134
0.699695
from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMHttpLoggingPolicy from .._version import VERSION if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential class WebSiteManagementClientConfiguration(Configuration): def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any ) -> None: if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") super(WebSiteManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id self.api_version = "2015-08-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-web/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs: Any ) -> None: self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
true
true
f7000e80dc69165127d93dc9b2a8e5454d07d8ca
852
py
Python
spongeauth/core/tests/test_x_real_ip_middleware.py
felixoi/SpongeAuth
d44ee52d0b35b2e1909c7bf6bad29aa7b4835b26
[ "MIT" ]
10
2016-11-18T12:37:24.000Z
2022-03-04T09:25:25.000Z
spongeauth/core/tests/test_x_real_ip_middleware.py
felixoi/SpongeAuth
d44ee52d0b35b2e1909c7bf6bad29aa7b4835b26
[ "MIT" ]
794
2016-11-19T18:34:37.000Z
2022-03-31T16:49:11.000Z
spongeauth/core/tests/test_x_real_ip_middleware.py
PowerNukkit/OreAuth
96a2926c9601fce6fac471bdb997077f07e8bf9a
[ "MIT" ]
11
2016-11-26T22:30:17.000Z
2022-03-16T17:20:14.000Z
import django.http import unittest.mock from .. import middleware def get_response(req): # dummy get_response, just return an empty response return django.http.HttpResponse() def test_leaves_remote_addr_alone_if_no_real_ip(): remote_addr = object() request = unittest.mock.MagicMock() request.META = {"REMOTE_ADDR": remote_addr} middleware.XRealIPMiddleware(get_response)(request) assert request.META["REMOTE_ADDR"] is remote_addr def test_switches_out_x_real_ip_if_available(): remote_addr = object() x_real_ip = object() request = unittest.mock.MagicMock() request.META = {"REMOTE_ADDR": remote_addr, "HTTP_X_REAL_IP": x_real_ip} middleware.XRealIPMiddleware(get_response)(request) assert request.META["REMOTE_ADDR"] is x_real_ip assert request.META["HTTP_X_REAL_IP"] is x_real_ip
25.058824
76
0.75
import django.http import unittest.mock from .. import middleware def get_response(req): return django.http.HttpResponse() def test_leaves_remote_addr_alone_if_no_real_ip(): remote_addr = object() request = unittest.mock.MagicMock() request.META = {"REMOTE_ADDR": remote_addr} middleware.XRealIPMiddleware(get_response)(request) assert request.META["REMOTE_ADDR"] is remote_addr def test_switches_out_x_real_ip_if_available(): remote_addr = object() x_real_ip = object() request = unittest.mock.MagicMock() request.META = {"REMOTE_ADDR": remote_addr, "HTTP_X_REAL_IP": x_real_ip} middleware.XRealIPMiddleware(get_response)(request) assert request.META["REMOTE_ADDR"] is x_real_ip assert request.META["HTTP_X_REAL_IP"] is x_real_ip
true
true
f7000f03c62e8b3dcc7083fb8b218b5a6f499aa8
198
py
Python
test-relay.py
rn-santos227/medsys
d72ef3b419bdb84cc21022af7ce43813090ef211
[ "MIT" ]
null
null
null
test-relay.py
rn-santos227/medsys
d72ef3b419bdb84cc21022af7ce43813090ef211
[ "MIT" ]
null
null
null
test-relay.py
rn-santos227/medsys
d72ef3b419bdb84cc21022af7ce43813090ef211
[ "MIT" ]
null
null
null
#!/usr/bin/env python import time import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM) GPIO.setup(21, GPIO.OUT) GPIO.output(21, GPIO.LOW) time.sleep(3.00) GPIO.output(21, GPIO.HIGH) GPIO.cleanup()
11.647059
26
0.717172
import time import RPi.GPIO as GPIO GPIO.setmode(GPIO.BCM) GPIO.setup(21, GPIO.OUT) GPIO.output(21, GPIO.LOW) time.sleep(3.00) GPIO.output(21, GPIO.HIGH) GPIO.cleanup()
true
true
f7000f942ae83e6e025768748f579184365a76d4
305
py
Python
server/objects/notifier.py
jaxsenh/the-devil-that-lurks
89fa85c461a8da55a0b7d28e32dd8144d6cac8ca
[ "MIT" ]
1
2020-05-28T03:21:44.000Z
2020-05-28T03:21:44.000Z
server/objects/notifier.py
jaxsenh/the-devil-that-lurks
89fa85c461a8da55a0b7d28e32dd8144d6cac8ca
[ "MIT" ]
null
null
null
server/objects/notifier.py
jaxsenh/the-devil-that-lurks
89fa85c461a8da55a0b7d28e32dd8144d6cac8ca
[ "MIT" ]
null
null
null
from direct.directnotify.DirectNotifyGlobal import directNotify class Notifier: def __init__(self, name): """ @param name: The name of the notifier. Be sure to add it to your config/Config.prc! @type name: str """ self.notify = directNotify.newCategory(name)
27.727273
91
0.659016
from direct.directnotify.DirectNotifyGlobal import directNotify class Notifier: def __init__(self, name): self.notify = directNotify.newCategory(name)
true
true
f70011c6182da69473e565cf0d8aee9ee61da27a
221
py
Python
packaging/squarer/ml_squarer.py
g-nightingale/tox_examples
d7714375c764580b4b8af9db61332ced4e851def
[ "BSD-3-Clause" ]
10
2020-05-23T15:40:43.000Z
2022-02-06T22:34:10.000Z
packaging/squarer/ml_squarer.py
g-nightingale/tox_examples
d7714375c764580b4b8af9db61332ced4e851def
[ "BSD-3-Clause" ]
null
null
null
packaging/squarer/ml_squarer.py
g-nightingale/tox_examples
d7714375c764580b4b8af9db61332ced4e851def
[ "BSD-3-Clause" ]
12
2020-08-04T11:37:56.000Z
2022-03-31T23:21:13.000Z
import numpy as np def train_ml_squarer() -> None: print("Training!") def square() -> int: """Square a number...maybe""" return np.random.randint(1, 100) if __name__ == '__main__': train_ml_squarer()
15.785714
36
0.633484
import numpy as np def train_ml_squarer() -> None: print("Training!") def square() -> int: return np.random.randint(1, 100) if __name__ == '__main__': train_ml_squarer()
true
true
f70012b80af6d540ea4880f63579ca63dcbdd2f2
6,034
py
Python
arcade/examples/platform_tutorial/09_load_map.py
yegarti/arcade
1862e61aab9a7dc646265005b0e808d953a9dfe3
[ "MIT" ]
null
null
null
arcade/examples/platform_tutorial/09_load_map.py
yegarti/arcade
1862e61aab9a7dc646265005b0e808d953a9dfe3
[ "MIT" ]
null
null
null
arcade/examples/platform_tutorial/09_load_map.py
yegarti/arcade
1862e61aab9a7dc646265005b0e808d953a9dfe3
[ "MIT" ]
null
null
null
""" Platformer Game """ import arcade # Constants SCREEN_WIDTH = 1000 SCREEN_HEIGHT = 650 SCREEN_TITLE = "Platformer" # Constants used to scale our sprites from their original size CHARACTER_SCALING = 1 TILE_SCALING = 0.5 COIN_SCALING = 0.5 SPRITE_PIXEL_SIZE = 128 GRID_PIXEL_SIZE = SPRITE_PIXEL_SIZE * TILE_SCALING # Movement speed of player, in pixels per frame PLAYER_MOVEMENT_SPEED = 10 GRAVITY = 1 PLAYER_JUMP_SPEED = 20 class MyGame(arcade.Window): """ Main application class. """ def __init__(self): # Call the parent class and set up the window super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE) # Our TileMap Object self.tile_map = None # Our Scene Object self.scene = None # Separate variable that holds the player sprite self.player_sprite = None # Our physics engine self.physics_engine = None # A Camera that can be used for scrolling the screen self.camera = None # A Camera that can be used to draw GUI elements self.gui_camera = None # Keep track of the score self.score = 0 # Load sounds self.collect_coin_sound = arcade.load_sound(":resources:sounds/coin1.wav") self.jump_sound = arcade.load_sound(":resources:sounds/jump1.wav") arcade.set_background_color(arcade.csscolor.CORNFLOWER_BLUE) def setup(self): """Set up the game here. Call this function to restart the game.""" # Setup the Cameras self.camera = arcade.Camera(self.width, self.height) self.gui_camera = arcade.Camera(self.width, self.height) # Name of map file to load map_name = ":resources:tiled_maps/map.json" # Layer specific options are defined based on Layer names in a dictionary # Doing this will make the SpriteList for the platforms layer # use spatial hashing for detection. layer_options = { "Platforms": { "use_spatial_hash": True, }, } # Read in the tiled map self.tile_map = arcade.load_tilemap(map_name, TILE_SCALING, layer_options) # Initialize Scene with our TileMap, this will automatically add all layers # from the map as SpriteLists in the scene in the proper order. self.scene = arcade.Scene.from_tilemap(self.tile_map) # Keep track of the score self.score = 0 # Set up the player, specifically placing it at these coordinates. image_source = ":resources:images/animated_characters/female_adventurer/femaleAdventurer_idle.png" self.player_sprite = arcade.Sprite(image_source, CHARACTER_SCALING) self.player_sprite.center_x = 128 self.player_sprite.center_y = 128 self.scene.add_sprite("Player", self.player_sprite) # --- Other stuff # Set the background color if self.tile_map.background_color: arcade.set_background_color(self.tile_map.background_color) # Create the 'physics engine' self.physics_engine = arcade.PhysicsEnginePlatformer( self.player_sprite, gravity_constant=GRAVITY, walls=self.scene["Platforms"] ) def on_draw(self): """Render the screen.""" # Clear the screen to the background color arcade.start_render() # Activate the game camera self.camera.use() # Draw our Scene self.scene.draw() # Activate the GUI camera before drawing GUI elements self.gui_camera.use() # Draw our score on the screen, scrolling it with the viewport score_text = f"Score: {self.score}" arcade.draw_text( score_text, 10, 10, arcade.csscolor.WHITE, 18, ) def on_key_press(self, key, modifiers): """Called whenever a key is pressed.""" if key == arcade.key.UP or key == arcade.key.W: if self.physics_engine.can_jump(): self.player_sprite.change_y = PLAYER_JUMP_SPEED arcade.play_sound(self.jump_sound) elif key == arcade.key.LEFT or key == arcade.key.A: self.player_sprite.change_x = -PLAYER_MOVEMENT_SPEED elif key == arcade.key.RIGHT or key == arcade.key.D: self.player_sprite.change_x = PLAYER_MOVEMENT_SPEED def on_key_release(self, key, modifiers): """Called when the user releases a key.""" if key == arcade.key.LEFT or key == arcade.key.A: self.player_sprite.change_x = 0 elif key == arcade.key.RIGHT or key == arcade.key.D: self.player_sprite.change_x = 0 def center_camera_to_player(self): screen_center_x = self.player_sprite.center_x - (self.camera.viewport_width / 2) screen_center_y = self.player_sprite.center_y - ( self.camera.viewport_height / 2 ) if screen_center_x < 0: screen_center_x = 0 if screen_center_y < 0: screen_center_y = 0 player_centered = screen_center_x, screen_center_y self.camera.move_to(player_centered) def on_update(self, delta_time): """Movement and game logic""" # Move the player with the physics engine self.physics_engine.update() # See if we hit any coins coin_hit_list = arcade.check_for_collision_with_list( self.player_sprite, self.scene["Coins"] ) # Loop through each coin we hit (if any) and remove it for coin in coin_hit_list: # Remove the coin coin.remove_from_sprite_lists() # Play a sound arcade.play_sound(self.collect_coin_sound) # Add one to the score self.score += 1 # Position the camera self.center_camera_to_player() def main(): """Main function""" window = MyGame() window.setup() arcade.run() if __name__ == "__main__": main()
30.474747
106
0.632748
import arcade SCREEN_WIDTH = 1000 SCREEN_HEIGHT = 650 SCREEN_TITLE = "Platformer" CHARACTER_SCALING = 1 TILE_SCALING = 0.5 COIN_SCALING = 0.5 SPRITE_PIXEL_SIZE = 128 GRID_PIXEL_SIZE = SPRITE_PIXEL_SIZE * TILE_SCALING PLAYER_MOVEMENT_SPEED = 10 GRAVITY = 1 PLAYER_JUMP_SPEED = 20 class MyGame(arcade.Window): def __init__(self): super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE) self.tile_map = None self.scene = None self.player_sprite = None self.physics_engine = None self.camera = None self.gui_camera = None self.score = 0 self.collect_coin_sound = arcade.load_sound(":resources:sounds/coin1.wav") self.jump_sound = arcade.load_sound(":resources:sounds/jump1.wav") arcade.set_background_color(arcade.csscolor.CORNFLOWER_BLUE) def setup(self): self.camera = arcade.Camera(self.width, self.height) self.gui_camera = arcade.Camera(self.width, self.height) map_name = ":resources:tiled_maps/map.json" layer_options = { "Platforms": { "use_spatial_hash": True, }, } self.tile_map = arcade.load_tilemap(map_name, TILE_SCALING, layer_options) self.scene = arcade.Scene.from_tilemap(self.tile_map) self.score = 0 image_source = ":resources:images/animated_characters/female_adventurer/femaleAdventurer_idle.png" self.player_sprite = arcade.Sprite(image_source, CHARACTER_SCALING) self.player_sprite.center_x = 128 self.player_sprite.center_y = 128 self.scene.add_sprite("Player", self.player_sprite) if self.tile_map.background_color: arcade.set_background_color(self.tile_map.background_color) self.physics_engine = arcade.PhysicsEnginePlatformer( self.player_sprite, gravity_constant=GRAVITY, walls=self.scene["Platforms"] ) def on_draw(self): arcade.start_render() self.camera.use() self.scene.draw() self.gui_camera.use() score_text = f"Score: {self.score}" arcade.draw_text( score_text, 10, 10, arcade.csscolor.WHITE, 18, ) def on_key_press(self, key, modifiers): if key == arcade.key.UP or key == arcade.key.W: if self.physics_engine.can_jump(): self.player_sprite.change_y = PLAYER_JUMP_SPEED arcade.play_sound(self.jump_sound) elif key == arcade.key.LEFT or key == arcade.key.A: self.player_sprite.change_x = -PLAYER_MOVEMENT_SPEED elif key == arcade.key.RIGHT or key == arcade.key.D: self.player_sprite.change_x = PLAYER_MOVEMENT_SPEED def on_key_release(self, key, modifiers): if key == arcade.key.LEFT or key == arcade.key.A: self.player_sprite.change_x = 0 elif key == arcade.key.RIGHT or key == arcade.key.D: self.player_sprite.change_x = 0 def center_camera_to_player(self): screen_center_x = self.player_sprite.center_x - (self.camera.viewport_width / 2) screen_center_y = self.player_sprite.center_y - ( self.camera.viewport_height / 2 ) if screen_center_x < 0: screen_center_x = 0 if screen_center_y < 0: screen_center_y = 0 player_centered = screen_center_x, screen_center_y self.camera.move_to(player_centered) def on_update(self, delta_time): self.physics_engine.update() coin_hit_list = arcade.check_for_collision_with_list( self.player_sprite, self.scene["Coins"] ) for coin in coin_hit_list: coin.remove_from_sprite_lists() arcade.play_sound(self.collect_coin_sound) self.score += 1 self.center_camera_to_player() def main(): window = MyGame() window.setup() arcade.run() if __name__ == "__main__": main()
true
true
f7001373a7204c33b024b0c7595942d17d82aa97
19,924
py
Python
lib/ycmd/start.py
mrmansano/sublime-ycmd
fece62f0ce4e9cbf96ed8ba07f5cecb24b21427e
[ "MIT" ]
12
2018-01-24T20:58:10.000Z
2021-12-21T15:02:10.000Z
lib/ycmd/start.py
mrmansano/sublime-ycmd
fece62f0ce4e9cbf96ed8ba07f5cecb24b21427e
[ "MIT" ]
4
2018-01-13T14:39:45.000Z
2020-11-25T00:05:27.000Z
lib/ycmd/start.py
mrmansano/sublime-ycmd
fece62f0ce4e9cbf96ed8ba07f5cecb24b21427e
[ "MIT" ]
2
2018-10-23T17:13:44.000Z
2019-05-12T04:10:17.000Z
#!/usr/bin/env python3 ''' lib/ycmd/start.py Server bootstrap logic. Includes a utility class for normalizing parameters and calculating default ones. Also includes a helper to set up the temporary options file. ''' import logging import os import tempfile from ..process import ( FileHandles, Process, ) from ..util.fs import ( default_python_binary_path, save_json_file, ) from ..ycmd.constants import ( YCMD_LOG_SPOOL_OUTPUT, YCMD_LOG_SPOOL_SIZE, YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS, YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS, ) from ..ycmd.settings import ( get_default_settings_path, generate_settings_data, ) logger = logging.getLogger('sublime-ycmd.' + __name__) class StartupParameters(object): ''' Startup parameters for a ycmd server instance. Should include all the necessary configuration for creating the ycmd server process. Also calculates defaults for certain parameters. ''' def __init__(self, ycmd_root_directory=None, ycmd_settings_path=None, working_directory=None, python_binary_path=None, server_idle_suicide_seconds=None, server_check_interval_seconds=None): self._ycmd_root_directory = None self._ycmd_settings_path = None self._working_directory = None self._python_binary_path = None self._server_idle_suicide_seconds = None self._server_check_interval_seconds = None # additional attributes, can be set via the properties self._log_level = None self._stdout_log_path = None self._stderr_log_path = None self._keep_logs = None self.ycmd_root_directory = ycmd_root_directory self.ycmd_settings_path = ycmd_settings_path self.working_directory = working_directory self.python_binary_path = python_binary_path self.server_idle_suicide_seconds = server_idle_suicide_seconds self.server_check_interval_seconds = server_check_interval_seconds @property def ycmd_root_directory(self): if self._ycmd_root_directory is None: logger.warning('no ycmd root directory has been set') return self._ycmd_root_directory @ycmd_root_directory.setter def ycmd_root_directory(self, ycmd_root_directory): if ycmd_root_directory is not None and \ not isinstance(ycmd_root_directory, str): raise TypeError(ycmd_root_directory,) self._ycmd_root_directory = ycmd_root_directory @property def ycmd_settings_path(self): if self._ycmd_settings_path is None: if self._ycmd_root_directory is not None: return get_default_settings_path(self._ycmd_root_directory) logger.warning('no ycmd root directory has been set') return self._ycmd_settings_path @ycmd_settings_path.setter def ycmd_settings_path(self, ycmd_settings_path): if ycmd_settings_path is not None and \ not isinstance(ycmd_settings_path, str): raise TypeError(ycmd_settings_path,) self._ycmd_settings_path = ycmd_settings_path @property def working_directory(self): if self._working_directory is None: return os.getcwd() return self._working_directory @working_directory.setter def working_directory(self, working_directory): if working_directory is not None and \ not isinstance(working_directory, str): raise TypeError(working_directory,) self._working_directory = working_directory @property def python_binary_path(self): if self._python_binary_path is None: return default_python_binary_path() return self._python_binary_path @python_binary_path.setter def python_binary_path(self, python_binary_path): if python_binary_path is not None and \ not isinstance(python_binary_path, str): raise TypeError(python_binary_path,) self._python_binary_path = python_binary_path @property def server_idle_suicide_seconds(self): if self._server_idle_suicide_seconds is None: return YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS return self._server_idle_suicide_seconds @server_idle_suicide_seconds.setter def server_idle_suicide_seconds(self, server_idle_suicide_seconds): if server_idle_suicide_seconds is not None and \ not isinstance(server_idle_suicide_seconds, int): raise TypeError(server_idle_suicide_seconds,) self._server_idle_suicide_seconds = server_idle_suicide_seconds @property def server_check_interval_seconds(self): if self._server_check_interval_seconds is None: return YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS return self._server_check_interval_seconds @server_check_interval_seconds.setter def server_check_interval_seconds(self, server_check_interval_seconds): if server_check_interval_seconds is not None and \ not isinstance(server_check_interval_seconds, int): raise TypeError(server_check_interval_seconds,) self._server_check_interval_seconds = server_check_interval_seconds @property def log_level(self): return self._log_level @log_level.setter def log_level(self, log_level): if log_level is not None and not isinstance(log_level, str): raise TypeError('log level must be a str: %r' % (log_level)) if log_level is not None and not _is_valid_log_level(log_level): logger.warning('log level unrecognized: %r', log_level) # but fall through and do it anyway self._log_level = log_level @property def stdout_log_path(self): return self._stdout_log_path @stdout_log_path.setter def stdout_log_path(self, stdout_log_path): if stdout_log_path is not None and \ not isinstance(stdout_log_path, str): raise TypeError( 'stdout log path must be a str: %r' % (stdout_log_path) ) self._stdout_log_path = stdout_log_path @property def stderr_log_path(self): return self._stderr_log_path @stderr_log_path.setter def stderr_log_path(self, stderr_log_path): if stderr_log_path is not None and \ not isinstance(stderr_log_path, str): raise TypeError( 'stderr_log_path must be a str: %r' % (stderr_log_path) ) self._stderr_log_path = stderr_log_path @property def keep_logs(self): if self._keep_logs is None: return False return self._keep_logs @keep_logs.setter def keep_logs(self, keep_logs): if keep_logs is not None and not isinstance(keep_logs, bool): raise TypeError('keep-logs must be a bool: %r' % (keep_logs)) self._keep_logs = keep_logs @property def ycmd_module_directory(self): if self._ycmd_root_directory is None: logger.error('no ycmd root directory set') raise AttributeError return os.path.join(self._ycmd_root_directory, 'ycmd') def copy(self): ''' Creates a shallow-copy of the startup parameters. ''' raw_attrs = [ '_ycmd_root_directory', '_ycmd_settings_path', '_working_directory', '_python_binary_path', '_server_idle_suicide_seconds', '_server_check_interval_seconds', '_log_level', '_stdout_log_path', '_stderr_log_path', '_keep_logs', ] result = StartupParameters() for attr in raw_attrs: attr_value = getattr(self, attr) setattr(result, attr, attr_value) return result def __iter__(self): ''' Dictionary-compatible iterator. ''' return iter(( ('ycmd_root_directory', self.ycmd_root_directory), ('ycmd_settings_path', self.ycmd_settings_path), ('working_directory', self.working_directory), ('python_binary_path', self.python_binary_path), ('server_idle_suicide_seconds', self.server_idle_suicide_seconds), ( 'server_check_interval_seconds', self.server_check_interval_seconds, ), ('ycmd_module_directory', self.ycmd_module_directory), ('log_level', self.log_level), ('stdout_log_path', self.stdout_log_path), ('stderr_log_path', self.stderr_log_path), ('keep_logs', self.keep_logs), )) def __str__(self): return ( 'ycmd path, default settings path, ' 'python binary path, working directory: ' '%(ycmd_root_directory)s, %(ycmd_settings_path)s, ' '%(python_binary_path)s, %(working_directory)s' % (dict(self)) ) def __repr__(self): return '%s(%r)' % (StartupParameters, dict(self)) def to_startup_parameters(ycmd_root_directory, ycmd_settings_path=None, working_directory=None, python_binary_path=None, server_idle_suicide_seconds=None, server_check_interval_seconds=None): ''' Internal convenience function. Receives the raw arguments to starting a ycmd server and returns a `StartupParameters` instance from it. If the first argument is already `StartupParameters`, it is returned as-is, and the remaining parameters are ignored. Otherwise, a `StartupParameters` instance is constructed with all the given parameters and returned. ''' if isinstance(ycmd_root_directory, StartupParameters): # great, already in the desired state # check if other params are provided and issue a warning # (they get ignored in that case) if ycmd_settings_path is not None: logger.warning( 'ycmd settings path will be ignored: %s', ycmd_settings_path, ) if working_directory is not None: logger.warning( 'working directory will be ignored: %s', working_directory, ) if python_binary_path is not None: logger.warning( 'python binary path will be ignored: %s', python_binary_path, ) if server_idle_suicide_seconds is not None: logger.warning( 'server idle suicide seconds will be ignored: %s', server_idle_suicide_seconds, ) if server_check_interval_seconds is not None: logger.warning( 'server check interval seconds will be ignored: %s', server_check_interval_seconds, ) return ycmd_root_directory # else, generate them logger.warning('[DEPRECATED] to startup parameters', stack_info=True) logger.debug( 'generating startup parameters with root: %s', ycmd_root_directory, ) return StartupParameters( ycmd_root_directory, ycmd_settings_path=ycmd_settings_path, working_directory=working_directory, python_binary_path=python_binary_path, server_idle_suicide_seconds=server_idle_suicide_seconds, server_check_interval_seconds=server_check_interval_seconds, ) def check_startup_parameters(startup_parameters): ''' Performs quick, non-blocking validation on startup parameters to catch type mismatches or empty configurations. Raises an exception or returns `None`. This is meant to be run on the main thread to catch common startup errors before initializing the server off-thread. It isn't strictly necessary, but produces nicer error messages when the plugin is not configured correctly. NOTE : This does not check the file system for things like missing files, as that can be a blocking operation. ''' if not isinstance(startup_parameters, StartupParameters): raise TypeError( 'startup parameters must be StartupParameters: %r' % (startup_parameters) ) ycmd_root_directory = startup_parameters.ycmd_root_directory if not ycmd_root_directory: raise RuntimeError('no ycmd root directory has been set') ycmd_settings_path = startup_parameters.ycmd_settings_path if not ycmd_settings_path: raise RuntimeError('no ycmd default settings path has been set') logger.debug( 'startup parameters seem to be filled in, ' 'ready to attempt startup: %r', startup_parameters, ) def write_ycmd_settings_file(ycmd_settings_path, ycmd_hmac_secret, out=None): ''' Writes out a ycmd server settings file based on the template file `ycmd_settings_path`. A uniquely-generated `ycmd_hmac_secret` must also be supplied, as it needs to be written into this file. The return value is the path to the settings file, as a `str`. If `out` is omitted, a secure temporary file is created, and the returned path should be passed via the options flag to ycmd. If `out` is provided, it should be a path to an output file (`str`), or a file-like handle (must support `.write`). This is not recommended for use with ycmd, as it may be insecure. ''' ycmd_settings_data = generate_settings_data( ycmd_settings_path, ycmd_hmac_secret, ) out_path = None if out is None: # no point using `with` for this, since we also use `delete=False` temp_file_object = tempfile.NamedTemporaryFile( prefix='ycmd_settings_', suffix='.json', delete=False, ) temp_file_name = temp_file_object.name temp_file_handle = temp_file_object.file # type: io.TextIOWrapper out = temp_file_handle out_path = temp_file_name def flush(): temp_file_handle.flush() def close(): temp_file_object.close() else: raise NotImplementedError('unimplemented: output to specific file') if out_path is None and out is not None: logger.error('failed to get path for output file: %r', out) # fall through and write it out anyway save_json_file(out, ycmd_settings_data) flush() close() logger.debug('successfully wrote file: %s', out_path) return out_path def prepare_ycmd_process(startup_parameters, ycmd_settings_tempfile_path, ycmd_server_hostname, ycmd_server_port): ''' Initializes and returns a `Process` handle, correctly configured to launch a ycmd server process. It does not automatically start it though. The `ycmd_settings_tempfile_path` should be created by (return value of) `write_ycmd_settings_file`. The ycmd server process will read that file on startup and then immediately delete it. The `ycmd_server_hostname` and `ycmd_server_port` must also be provided to instruct the server to listen on the given address. ''' assert isinstance(startup_parameters, StartupParameters), \ 'startup parameters must be StartupParameters: %r' % \ (startup_parameters) assert isinstance(ycmd_settings_tempfile_path, str), \ 'ycmd settings temporary file path must be a str: %r' % \ (ycmd_settings_tempfile_path) # this may throw: check_startup_parameters(startup_parameters) working_directory = startup_parameters.working_directory python_binary_path = startup_parameters.python_binary_path server_idle_suicide_seconds = \ startup_parameters.server_idle_suicide_seconds server_check_interval_seconds = \ startup_parameters.server_check_interval_seconds ycmd_module_directory = startup_parameters.ycmd_module_directory if YCMD_LOG_SPOOL_OUTPUT: stdout_log_spool = \ tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE) stderr_log_spool = \ tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE) logger.debug( 'using temporary spools for stdout, stderr: %r, %r', stdout_log_spool, stderr_log_spool, ) stdout_handle = stdout_log_spool stderr_handle = stderr_log_spool else: # explicitly close handles - don't inherit from this process stdout_handle = FileHandles.DEVNULL stderr_handle = FileHandles.DEVNULL ycmd_process_handle = Process() ycmd_process_handle.binary = python_binary_path ycmd_process_handle.args.extend([ ycmd_module_directory, '--host=%s' % (ycmd_server_hostname), '--port=%s' % (ycmd_server_port), '--idle_suicide_seconds=%s' % (server_idle_suicide_seconds), '--check_interval_seconds=%s' % (server_check_interval_seconds), '--options_file=%s' % (ycmd_settings_tempfile_path), ]) ycmd_process_handle.cwd = working_directory ycmd_process_handle.filehandles.stdout = stdout_handle ycmd_process_handle.filehandles.stderr = stderr_handle if startup_parameters.log_level is not None: add_ycmd_debug_args( ycmd_process_handle, log_level=startup_parameters.log_level, stdout_file_name=startup_parameters.stdout_log_path, stderr_file_name=startup_parameters.stderr_log_path, keep_logfiles=startup_parameters.keep_logs, ) return ycmd_process_handle def add_ycmd_debug_args(ycmd_process_handle, log_level='info', stdout_file_name=None, stderr_file_name=None, keep_logfiles=False): ''' Adds startup flags to `ycmd_process_handle` to enable logging output. The `ycmd_process_handle` should be an instance of `Process`. The `log_level` should be one of 'debug', 'info', 'warning', 'error', or 'critical'. Any `str` is accepted, this routine does not actually check it. If `stdout_file_name` and `stderr_file_name` are provided, the server will write log messages to the given files. The bulk of the logs will be on stderr, with only a few startup messages appearing on stdout. If `keep_logfiles` is `True`, then the server won't delete the log files when it exits. Otherwise, the log files will be deleted when it shuts down. ''' if not isinstance(ycmd_process_handle, Process): raise TypeError( 'ycmd process handle must be a Process: %r' % (ycmd_process_handle) ) assert isinstance(ycmd_process_handle, Process) if ycmd_process_handle.alive(): raise ValueError( 'ycmd process is already started, cannot modify it: %r' % (ycmd_process_handle) ) if not _is_valid_log_level(log_level): logger.warning('log level unrecognized: %r', log_level) # but fall through and do it anyway ycmd_debug_args = [ '--log=%s' % (log_level), ] if stdout_file_name and stderr_file_name: ycmd_debug_args.extend([ '--stdout=%s' % (stdout_file_name), '--stderr=%s' % (stderr_file_name), ]) if keep_logfiles: ycmd_debug_args.append( '--keep_logfiles', ) logger.debug('adding ycmd debug args: %r', ycmd_debug_args) ycmd_process_handle.args.extend(ycmd_debug_args) def _is_valid_log_level(log_level): if not isinstance(log_level, str): raise TypeError('log level must be a str: %r' % (log_level)) # these can be found by running `python /path/to/ycmd/ycmd --help` recognized_log_levels = [ 'debug', 'info', 'warning', 'error', 'critical', ] return log_level in recognized_log_levels
36.291439
79
0.672656
import logging import os import tempfile from ..process import ( FileHandles, Process, ) from ..util.fs import ( default_python_binary_path, save_json_file, ) from ..ycmd.constants import ( YCMD_LOG_SPOOL_OUTPUT, YCMD_LOG_SPOOL_SIZE, YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS, YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS, ) from ..ycmd.settings import ( get_default_settings_path, generate_settings_data, ) logger = logging.getLogger('sublime-ycmd.' + __name__) class StartupParameters(object): def __init__(self, ycmd_root_directory=None, ycmd_settings_path=None, working_directory=None, python_binary_path=None, server_idle_suicide_seconds=None, server_check_interval_seconds=None): self._ycmd_root_directory = None self._ycmd_settings_path = None self._working_directory = None self._python_binary_path = None self._server_idle_suicide_seconds = None self._server_check_interval_seconds = None self._log_level = None self._stdout_log_path = None self._stderr_log_path = None self._keep_logs = None self.ycmd_root_directory = ycmd_root_directory self.ycmd_settings_path = ycmd_settings_path self.working_directory = working_directory self.python_binary_path = python_binary_path self.server_idle_suicide_seconds = server_idle_suicide_seconds self.server_check_interval_seconds = server_check_interval_seconds @property def ycmd_root_directory(self): if self._ycmd_root_directory is None: logger.warning('no ycmd root directory has been set') return self._ycmd_root_directory @ycmd_root_directory.setter def ycmd_root_directory(self, ycmd_root_directory): if ycmd_root_directory is not None and \ not isinstance(ycmd_root_directory, str): raise TypeError(ycmd_root_directory,) self._ycmd_root_directory = ycmd_root_directory @property def ycmd_settings_path(self): if self._ycmd_settings_path is None: if self._ycmd_root_directory is not None: return get_default_settings_path(self._ycmd_root_directory) logger.warning('no ycmd root directory has been set') return self._ycmd_settings_path @ycmd_settings_path.setter def ycmd_settings_path(self, ycmd_settings_path): if ycmd_settings_path is not None and \ not isinstance(ycmd_settings_path, str): raise TypeError(ycmd_settings_path,) self._ycmd_settings_path = ycmd_settings_path @property def working_directory(self): if self._working_directory is None: return os.getcwd() return self._working_directory @working_directory.setter def working_directory(self, working_directory): if working_directory is not None and \ not isinstance(working_directory, str): raise TypeError(working_directory,) self._working_directory = working_directory @property def python_binary_path(self): if self._python_binary_path is None: return default_python_binary_path() return self._python_binary_path @python_binary_path.setter def python_binary_path(self, python_binary_path): if python_binary_path is not None and \ not isinstance(python_binary_path, str): raise TypeError(python_binary_path,) self._python_binary_path = python_binary_path @property def server_idle_suicide_seconds(self): if self._server_idle_suicide_seconds is None: return YCMD_DEFAULT_SERVER_IDLE_SUICIDE_SECONDS return self._server_idle_suicide_seconds @server_idle_suicide_seconds.setter def server_idle_suicide_seconds(self, server_idle_suicide_seconds): if server_idle_suicide_seconds is not None and \ not isinstance(server_idle_suicide_seconds, int): raise TypeError(server_idle_suicide_seconds,) self._server_idle_suicide_seconds = server_idle_suicide_seconds @property def server_check_interval_seconds(self): if self._server_check_interval_seconds is None: return YCMD_DEFAULT_SERVER_CHECK_INTERVAL_SECONDS return self._server_check_interval_seconds @server_check_interval_seconds.setter def server_check_interval_seconds(self, server_check_interval_seconds): if server_check_interval_seconds is not None and \ not isinstance(server_check_interval_seconds, int): raise TypeError(server_check_interval_seconds,) self._server_check_interval_seconds = server_check_interval_seconds @property def log_level(self): return self._log_level @log_level.setter def log_level(self, log_level): if log_level is not None and not isinstance(log_level, str): raise TypeError('log level must be a str: %r' % (log_level)) if log_level is not None and not _is_valid_log_level(log_level): logger.warning('log level unrecognized: %r', log_level) self._log_level = log_level @property def stdout_log_path(self): return self._stdout_log_path @stdout_log_path.setter def stdout_log_path(self, stdout_log_path): if stdout_log_path is not None and \ not isinstance(stdout_log_path, str): raise TypeError( 'stdout log path must be a str: %r' % (stdout_log_path) ) self._stdout_log_path = stdout_log_path @property def stderr_log_path(self): return self._stderr_log_path @stderr_log_path.setter def stderr_log_path(self, stderr_log_path): if stderr_log_path is not None and \ not isinstance(stderr_log_path, str): raise TypeError( 'stderr_log_path must be a str: %r' % (stderr_log_path) ) self._stderr_log_path = stderr_log_path @property def keep_logs(self): if self._keep_logs is None: return False return self._keep_logs @keep_logs.setter def keep_logs(self, keep_logs): if keep_logs is not None and not isinstance(keep_logs, bool): raise TypeError('keep-logs must be a bool: %r' % (keep_logs)) self._keep_logs = keep_logs @property def ycmd_module_directory(self): if self._ycmd_root_directory is None: logger.error('no ycmd root directory set') raise AttributeError return os.path.join(self._ycmd_root_directory, 'ycmd') def copy(self): raw_attrs = [ '_ycmd_root_directory', '_ycmd_settings_path', '_working_directory', '_python_binary_path', '_server_idle_suicide_seconds', '_server_check_interval_seconds', '_log_level', '_stdout_log_path', '_stderr_log_path', '_keep_logs', ] result = StartupParameters() for attr in raw_attrs: attr_value = getattr(self, attr) setattr(result, attr, attr_value) return result def __iter__(self): return iter(( ('ycmd_root_directory', self.ycmd_root_directory), ('ycmd_settings_path', self.ycmd_settings_path), ('working_directory', self.working_directory), ('python_binary_path', self.python_binary_path), ('server_idle_suicide_seconds', self.server_idle_suicide_seconds), ( 'server_check_interval_seconds', self.server_check_interval_seconds, ), ('ycmd_module_directory', self.ycmd_module_directory), ('log_level', self.log_level), ('stdout_log_path', self.stdout_log_path), ('stderr_log_path', self.stderr_log_path), ('keep_logs', self.keep_logs), )) def __str__(self): return ( 'ycmd path, default settings path, ' 'python binary path, working directory: ' '%(ycmd_root_directory)s, %(ycmd_settings_path)s, ' '%(python_binary_path)s, %(working_directory)s' % (dict(self)) ) def __repr__(self): return '%s(%r)' % (StartupParameters, dict(self)) def to_startup_parameters(ycmd_root_directory, ycmd_settings_path=None, working_directory=None, python_binary_path=None, server_idle_suicide_seconds=None, server_check_interval_seconds=None): if isinstance(ycmd_root_directory, StartupParameters): if ycmd_settings_path is not None: logger.warning( 'ycmd settings path will be ignored: %s', ycmd_settings_path, ) if working_directory is not None: logger.warning( 'working directory will be ignored: %s', working_directory, ) if python_binary_path is not None: logger.warning( 'python binary path will be ignored: %s', python_binary_path, ) if server_idle_suicide_seconds is not None: logger.warning( 'server idle suicide seconds will be ignored: %s', server_idle_suicide_seconds, ) if server_check_interval_seconds is not None: logger.warning( 'server check interval seconds will be ignored: %s', server_check_interval_seconds, ) return ycmd_root_directory logger.warning('[DEPRECATED] to startup parameters', stack_info=True) logger.debug( 'generating startup parameters with root: %s', ycmd_root_directory, ) return StartupParameters( ycmd_root_directory, ycmd_settings_path=ycmd_settings_path, working_directory=working_directory, python_binary_path=python_binary_path, server_idle_suicide_seconds=server_idle_suicide_seconds, server_check_interval_seconds=server_check_interval_seconds, ) def check_startup_parameters(startup_parameters): if not isinstance(startup_parameters, StartupParameters): raise TypeError( 'startup parameters must be StartupParameters: %r' % (startup_parameters) ) ycmd_root_directory = startup_parameters.ycmd_root_directory if not ycmd_root_directory: raise RuntimeError('no ycmd root directory has been set') ycmd_settings_path = startup_parameters.ycmd_settings_path if not ycmd_settings_path: raise RuntimeError('no ycmd default settings path has been set') logger.debug( 'startup parameters seem to be filled in, ' 'ready to attempt startup: %r', startup_parameters, ) def write_ycmd_settings_file(ycmd_settings_path, ycmd_hmac_secret, out=None): ycmd_settings_data = generate_settings_data( ycmd_settings_path, ycmd_hmac_secret, ) out_path = None if out is None: temp_file_object = tempfile.NamedTemporaryFile( prefix='ycmd_settings_', suffix='.json', delete=False, ) temp_file_name = temp_file_object.name temp_file_handle = temp_file_object.file out = temp_file_handle out_path = temp_file_name def flush(): temp_file_handle.flush() def close(): temp_file_object.close() else: raise NotImplementedError('unimplemented: output to specific file') if out_path is None and out is not None: logger.error('failed to get path for output file: %r', out) save_json_file(out, ycmd_settings_data) flush() close() logger.debug('successfully wrote file: %s', out_path) return out_path def prepare_ycmd_process(startup_parameters, ycmd_settings_tempfile_path, ycmd_server_hostname, ycmd_server_port): assert isinstance(startup_parameters, StartupParameters), \ 'startup parameters must be StartupParameters: %r' % \ (startup_parameters) assert isinstance(ycmd_settings_tempfile_path, str), \ 'ycmd settings temporary file path must be a str: %r' % \ (ycmd_settings_tempfile_path) check_startup_parameters(startup_parameters) working_directory = startup_parameters.working_directory python_binary_path = startup_parameters.python_binary_path server_idle_suicide_seconds = \ startup_parameters.server_idle_suicide_seconds server_check_interval_seconds = \ startup_parameters.server_check_interval_seconds ycmd_module_directory = startup_parameters.ycmd_module_directory if YCMD_LOG_SPOOL_OUTPUT: stdout_log_spool = \ tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE) stderr_log_spool = \ tempfile.SpooledTemporaryFile(max_size=YCMD_LOG_SPOOL_SIZE) logger.debug( 'using temporary spools for stdout, stderr: %r, %r', stdout_log_spool, stderr_log_spool, ) stdout_handle = stdout_log_spool stderr_handle = stderr_log_spool else: stdout_handle = FileHandles.DEVNULL stderr_handle = FileHandles.DEVNULL ycmd_process_handle = Process() ycmd_process_handle.binary = python_binary_path ycmd_process_handle.args.extend([ ycmd_module_directory, '--host=%s' % (ycmd_server_hostname), '--port=%s' % (ycmd_server_port), '--idle_suicide_seconds=%s' % (server_idle_suicide_seconds), '--check_interval_seconds=%s' % (server_check_interval_seconds), '--options_file=%s' % (ycmd_settings_tempfile_path), ]) ycmd_process_handle.cwd = working_directory ycmd_process_handle.filehandles.stdout = stdout_handle ycmd_process_handle.filehandles.stderr = stderr_handle if startup_parameters.log_level is not None: add_ycmd_debug_args( ycmd_process_handle, log_level=startup_parameters.log_level, stdout_file_name=startup_parameters.stdout_log_path, stderr_file_name=startup_parameters.stderr_log_path, keep_logfiles=startup_parameters.keep_logs, ) return ycmd_process_handle def add_ycmd_debug_args(ycmd_process_handle, log_level='info', stdout_file_name=None, stderr_file_name=None, keep_logfiles=False): if not isinstance(ycmd_process_handle, Process): raise TypeError( 'ycmd process handle must be a Process: %r' % (ycmd_process_handle) ) assert isinstance(ycmd_process_handle, Process) if ycmd_process_handle.alive(): raise ValueError( 'ycmd process is already started, cannot modify it: %r' % (ycmd_process_handle) ) if not _is_valid_log_level(log_level): logger.warning('log level unrecognized: %r', log_level) # but fall through and do it anyway ycmd_debug_args = [ '--log=%s' % (log_level), ] if stdout_file_name and stderr_file_name: ycmd_debug_args.extend([ '--stdout=%s' % (stdout_file_name), '--stderr=%s' % (stderr_file_name), ]) if keep_logfiles: ycmd_debug_args.append( '--keep_logfiles', ) logger.debug('adding ycmd debug args: %r', ycmd_debug_args) ycmd_process_handle.args.extend(ycmd_debug_args) def _is_valid_log_level(log_level): if not isinstance(log_level, str): raise TypeError('log level must be a str: %r' % (log_level)) # these can be found by running `python /path/to/ycmd/ycmd --help` recognized_log_levels = [ 'debug', 'info', 'warning', 'error', 'critical', ] return log_level in recognized_log_levels
true
true
f700138011862872e2cc9ea8d4b8a3ff7174ef9d
7,359
py
Python
xflash-serial.py
walczakp/xbox360-teensy-flasher
594990e835fa0f9111ced1901f52d2fb80d61b4d
[ "MIT" ]
11
2020-04-05T02:36:43.000Z
2021-11-11T21:56:17.000Z
xflash-serial.py
walczakp/xbox360-flasher
699a9a80c1a2fbfd7663144267dac950becc0405
[ "MIT" ]
1
2021-05-11T22:36:39.000Z
2021-05-13T11:16:01.000Z
xflash-serial.py
walczakp/xbox360-teensy-flasher
594990e835fa0f9111ced1901f52d2fb80d61b4d
[ "MIT" ]
2
2022-02-21T06:42:06.000Z
2022-03-26T19:28:09.000Z
#!/usr/bin/env python import serial import sys import struct import pprint import argparse import code pp = pprint.PrettyPrinter() class ConsoleUI: def opStart(self, name): sys.stdout.write(name.ljust(40)) def opProgress(self, progress, total=-1): if (total >= 0): prstr = "0x%04x / 0x%04x" % (progress, total) else: prstr = "0x%04x" % (progress) sys.stdout.write(prstr.ljust(20)) sys.stdout.write('\x08' * 20) sys.stdout.flush() def opEnd(self, result): sys.stdout.write(result.ljust(20)) sys.stdout.write("\n") class XFlash: def __init__(self, serialport): self.serial = serial.Serial(serialport, baudrate=115200) def __del__(self): try: self.serial.close() del self.serial except: pass def cmd(self, cmd, argA=0, argB=0): buffer = struct.pack("<LL", argA, argB) self.serial.write(bytes([cmd])) self.serial.write(buffer) self.serial.flush() def flashPowerOn(self): self.cmd(0x10) def flashShutdown(self): self.cmd(0x11) def update(self): try: self.cmd(0xF0) except: pass def flashInit(self): self.cmd(0x03) buffer = self.serial.read(4) return struct.unpack("<L", buffer)[0] def flashDeInit(self): self.cmd(0x04) def flashStatus(self): self.cmd(0x05) buffer = self.serial.read(2) return struct.unpack("<H", buffer)[0] def flashErase(self, block): self.cmd(0x06, block) # return self.flashStatus() def flashReadBlock(self, block): self.cmd(0x01, block, 528 * 32) # for i in range(0, 32): buffer = self.serial.read(528 * 32) status = self.flashStatus() return (status, buffer) def flashWriteBlock(self, block, buffer): self.cmd(0x02, block, len(buffer)) self.serial.write(buffer) return self.flashStatus() # def calcecc(data): # assert len(data) == 0x210 # val = 0 # for i in range(0x1066): # if not i & 31: # v = ~struct.unpack("<L", data[i/8:i/8+4])[0] # val ^= v & 1 # v >>= 1 # if val & 1: # val ^= 0x6954559 # val >>= 1 # # val = ~val # return data[:-4] + struct.pack("<L", (val << 6) & 0xFFFFFFFF) # # def addecc(data, block = 0, off_8 = "\x00" * 4): # res = "" # while len(data): # d = (data[:0x200] + "\x00" * 0x200)[:0x200] # data = data[0x200:] # # d += struct.pack("<L4B4s4s", block / 32, 0, 0xFF, 0, 0, off_8, "\0\0\0\0") # d = calcecc(d) # block += 1 # res += d # return res def main(argv): parser = argparse.ArgumentParser(description='XBox 360 NAND Flasher') parser.add_argument('port', metavar='port', type=str, help='serial port for comms (e.g. COM5 or /dev/ttyUSB0)') subparsers = parser.add_subparsers(title='Operations', dest='action') parser_read = subparsers.add_parser('read', help='Dumps an image from the NAND') parser_read.add_argument('file', nargs=1, type=argparse.FileType('wb'), help='The file to dump the NAND to') parser_read.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0, help='The block to start the action from') parser_read.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400, help='The count of blocks to perform the action to') parser_write = subparsers.add_parser('write', help='Writes an image into the NAND') parser_write.add_argument('file', nargs=1, type=argparse.FileType('rb'), help='The image file to write to the NAND') parser_write.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0, help='The block to start the action from') parser_write.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400, help='The count of blocks to perform the action to') # parser_erase = subparsers.add_parser('erase', help='Erases blocks in the NAND') # parser_erase.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0, # help='The block to start the action from') # parser_erase.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400, # help='The count of blocks to perform the action to') # # parser_update = subparsers.add_parser('update', # help='Jumps into the bootloader of the NAND Flashing device for updating the firmware') # parser_shutdown = subparsers.add_parser('shutdown', help='Shuts down the attached XBox 360') # parser_poweron = subparsers.add_parser('powerup', help='Powers up the attached XBox 360') arguments = parser.parse_args(argv[1:]) ui = ConsoleUI() xf = XFlash(arguments.port) if arguments.action in ('erase', 'write', 'read'): try: flash_config = xf.flashInit() print("FlashConfig: 0x%08x" % (flash_config)) if flash_config <= 0: raise Exception("FlashConfig invalid!") except Exception as e: print("Error!", e) xf.flashDeInit() return 1 try: if arguments.action == 'erase': # start = 0 # end = (options.flashsize * 1024) / 16 start = arguments.start end = arguments.end ui.opStart('Erase') ui.opProgress(0, end) for b in range(start, end): status = xf.flashErase(b) ui.opProgress(b + 1, end) ui.opEnd('0x%04x blocks OK' % (end)) if arguments.action == 'read': # start = 0 # end = (options.flashsize * 1024) / 16 start = arguments.start end = arguments.end ui.opStart('Read') ui.opProgress(0, end) for b in range(start, end): (status, buffer) = xf.flashReadBlock(b) ui.opProgress(b + 1, end) arguments.file[0].write(buffer) if arguments.action == 'write': # start = 0 # end = (options.flashsize * 1024) / 16 start = arguments.start end = arguments.end blocksize = 528 * 32 ui.opStart('Write') ui.opProgress(0, end) for b in range(start, end): buffer = arguments.file[0].read(blocksize) if len(buffer) < blocksize: buffer += ('\xFF' * (blocksize - len(buffer))) status = xf.flashWriteBlock(b, buffer) ui.opProgress(b + 1, end) # # if arguments.action == 'update': # xf.update() # # if arguments.action == 'powerup': # xf.flashPowerOn() # # if arguments.action == 'shutdown': # xf.flashShutdown() except Exception as e: raise e finally: xf.flashDeInit() return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
30.409091
131
0.552792
import serial import sys import struct import pprint import argparse import code pp = pprint.PrettyPrinter() class ConsoleUI: def opStart(self, name): sys.stdout.write(name.ljust(40)) def opProgress(self, progress, total=-1): if (total >= 0): prstr = "0x%04x / 0x%04x" % (progress, total) else: prstr = "0x%04x" % (progress) sys.stdout.write(prstr.ljust(20)) sys.stdout.write('\x08' * 20) sys.stdout.flush() def opEnd(self, result): sys.stdout.write(result.ljust(20)) sys.stdout.write("\n") class XFlash: def __init__(self, serialport): self.serial = serial.Serial(serialport, baudrate=115200) def __del__(self): try: self.serial.close() del self.serial except: pass def cmd(self, cmd, argA=0, argB=0): buffer = struct.pack("<LL", argA, argB) self.serial.write(bytes([cmd])) self.serial.write(buffer) self.serial.flush() def flashPowerOn(self): self.cmd(0x10) def flashShutdown(self): self.cmd(0x11) def update(self): try: self.cmd(0xF0) except: pass def flashInit(self): self.cmd(0x03) buffer = self.serial.read(4) return struct.unpack("<L", buffer)[0] def flashDeInit(self): self.cmd(0x04) def flashStatus(self): self.cmd(0x05) buffer = self.serial.read(2) return struct.unpack("<H", buffer)[0] def flashErase(self, block): self.cmd(0x06, block) def flashReadBlock(self, block): self.cmd(0x01, block, 528 * 32) buffer = self.serial.read(528 * 32) status = self.flashStatus() return (status, buffer) def flashWriteBlock(self, block, buffer): self.cmd(0x02, block, len(buffer)) self.serial.write(buffer) return self.flashStatus() def main(argv): parser = argparse.ArgumentParser(description='XBox 360 NAND Flasher') parser.add_argument('port', metavar='port', type=str, help='serial port for comms (e.g. COM5 or /dev/ttyUSB0)') subparsers = parser.add_subparsers(title='Operations', dest='action') parser_read = subparsers.add_parser('read', help='Dumps an image from the NAND') parser_read.add_argument('file', nargs=1, type=argparse.FileType('wb'), help='The file to dump the NAND to') parser_read.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0, help='The block to start the action from') parser_read.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400, help='The count of blocks to perform the action to') parser_write = subparsers.add_parser('write', help='Writes an image into the NAND') parser_write.add_argument('file', nargs=1, type=argparse.FileType('rb'), help='The image file to write to the NAND') parser_write.add_argument('start', nargs='?', metavar='start', action='store', type=int, default=0, help='The block to start the action from') parser_write.add_argument('end', nargs='?', metavar='end', action='store', type=int, default=0x400, help='The count of blocks to perform the action to') arguments = parser.parse_args(argv[1:]) ui = ConsoleUI() xf = XFlash(arguments.port) if arguments.action in ('erase', 'write', 'read'): try: flash_config = xf.flashInit() print("FlashConfig: 0x%08x" % (flash_config)) if flash_config <= 0: raise Exception("FlashConfig invalid!") except Exception as e: print("Error!", e) xf.flashDeInit() return 1 try: if arguments.action == 'erase': start = arguments.start end = arguments.end ui.opStart('Erase') ui.opProgress(0, end) for b in range(start, end): status = xf.flashErase(b) ui.opProgress(b + 1, end) ui.opEnd('0x%04x blocks OK' % (end)) if arguments.action == 'read': start = arguments.start end = arguments.end ui.opStart('Read') ui.opProgress(0, end) for b in range(start, end): (status, buffer) = xf.flashReadBlock(b) ui.opProgress(b + 1, end) arguments.file[0].write(buffer) if arguments.action == 'write': start = arguments.start end = arguments.end blocksize = 528 * 32 ui.opStart('Write') ui.opProgress(0, end) for b in range(start, end): buffer = arguments.file[0].read(blocksize) if len(buffer) < blocksize: buffer += ('\xFF' * (blocksize - len(buffer))) status = xf.flashWriteBlock(b, buffer) ui.opProgress(b + 1, end) except Exception as e: raise e finally: xf.flashDeInit() return 0 if __name__ == '__main__': sys.exit(main(sys.argv))
true
true
f70014551e3b05adace4276bbdf12330b4b1aaf7
2,503
py
Python
CIM16/IEC61970/Informative/InfCustomers/ComplianceEvent.py
MaximeBaudette/PyCIM
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
[ "MIT" ]
null
null
null
CIM16/IEC61970/Informative/InfCustomers/ComplianceEvent.py
MaximeBaudette/PyCIM
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
[ "MIT" ]
null
null
null
CIM16/IEC61970/Informative/InfCustomers/ComplianceEvent.py
MaximeBaudette/PyCIM
d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14
[ "MIT" ]
1
2021-04-02T18:04:49.000Z
2021-04-02T18:04:49.000Z
# Copyright (C) 2010-2011 Richard Lincoln # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from CIM16.IEC61968.Common.ActivityRecord import ActivityRecord class ComplianceEvent(ActivityRecord): """Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated.Compliance events are used for reporting regulatory or contract compliance issues and/or variances. These might be created as a consequence of local business processes and associated rules. It is anticipated that this class will be customised extensively to meet local implementation needs. Use inherited 'category' to indicate that, for example, expected performance will not be met or reported as mandated. """ def __init__(self, deadline='', *args, **kw_args): """Initialises a new 'ComplianceEvent' instance. @param deadline: The deadline for compliance. """ #: The deadline for compliance. self.deadline = deadline super(ComplianceEvent, self).__init__(*args, **kw_args) _attrs = ["deadline"] _attr_types = {"deadline": str} _defaults = {"deadline": ''} _enums = {} _refs = [] _many_refs = []
56.886364
823
0.75869
from CIM16.IEC61968.Common.ActivityRecord import ActivityRecord class ComplianceEvent(ActivityRecord): def __init__(self, deadline='', *args, **kw_args): self.deadline = deadline super(ComplianceEvent, self).__init__(*args, **kw_args) _attrs = ["deadline"] _attr_types = {"deadline": str} _defaults = {"deadline": ''} _enums = {} _refs = [] _many_refs = []
true
true
f700169f42c4405db98ca51444ca7070b1d5d538
43,726
py
Python
looking_for_group/games/api_views.py
andrlik/looking-for-group
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
[ "BSD-3-Clause" ]
null
null
null
looking_for_group/games/api_views.py
andrlik/looking-for-group
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
[ "BSD-3-Clause" ]
null
null
null
looking_for_group/games/api_views.py
andrlik/looking-for-group
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
[ "BSD-3-Clause" ]
null
null
null
import logging from django.db.models.query_utils import Q from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator from django_filters.rest_framework import DjangoFilterBackend from drf_yasg import openapi from drf_yasg.openapi import Parameter from drf_yasg.utils import no_body, swagger_auto_schema from notifications.signals import notify from rest_framework import mixins, status, viewsets from rest_framework.decorators import action from rest_framework.decorators import parser_classes as dparser_classes from rest_framework.parsers import FormParser, JSONParser, MultiPartParser from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework_extensions.mixins import DetailSerializerMixin, NestedViewSetMixin from looking_for_group.mixins import AutoPermissionViewSetMixin, ParentObjectAutoPermissionViewSetMixin from . import models, serializers from .signals import player_kicked, player_left logger = logging.getLogger("api") parent_lookup_game__slug = Parameter( name="parent_lookup_game__slug", in_="path", type="string", format=openapi.FORMAT_SLUG, description="Slug of related game object.", ) parent_lookup_session__slug = Parameter( name="parent_lookup_session__slug", in_="path", type="string", format=openapi.FORMAT_SLUG, description="Slug of related session object.", ) parent_lookup_session__game__slug = Parameter( name="parent_lookup_session__game__slug", in_="path", type="string", format=openapi.FORMAT_SLUG, description="Slug of related game object.", ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Games", operation_description="Fetch a list of game records. **NOTE**: You will probably want to filter by status at least.", ), ) @method_decorator( name="create", decorator=swagger_auto_schema( operation_summary="Game: Create", operation_description="Create a new game posting.", request_body=serializers.GameDataSerializer, responses={201: serializers.GameDataSerializer}, ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game: Details", operation_description="Fetch the details for the given game. **NOTE**: If you are not a member of the game, only a subset of the available information will be displayed.", responses={ 200: serializers.GameDataSerializer, 403: "You are not authorized to view this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Game: Update", operation_description="Update the details of this game. (Only available to GM)", request_body=serializers.GameDataSerializer, responses={ 200: serializers.GameDataSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Game: Update", operation_description="Update the details of this game. (Only available to GM)", request_body=serializers.GameDataSerializer, responses={ 200: serializers.GameDataSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Game: Delete", operation_description="Delete the given game. (Only available to GM.)", request_body=no_body, responses={204: "Game was deleted.", 403: "You are not the GM of this game."}, ), ) @method_decorator( name="leave", decorator=swagger_auto_schema( operation_summary="Game: Leave", operation_description="Leave the current game. (Players only.)", request_body=no_body, reponses={ 204: "You have successfully left the game.", 400: "You are not a member of this game.", 403: "You are the GM and cannot leave.", }, ), ) @method_decorator( name="apply", decorator=swagger_auto_schema( operation_summary="Game: Apply", operation_description="Apply to join this game.", request_body=serializers.GameApplicationSerializer, responses={ 201: serializers.GameApplicationSerializer, 400: "You are already a member of this game.", 403: "You are not permitted to apply to this game either due to your access rights or the game's status.", }, ), ) class GamePostingViewSet( AutoPermissionViewSetMixin, DetailSerializerMixin, NestedViewSetMixin, viewsets.ModelViewSet, ): """ A view set that allows the retrieval and manipulation of posted game data. """ permission_classes = (IsAuthenticated,) parser_classes = [FormParser, MultiPartParser] model = models.GamePosting lookup_field = "slug" lookup_url_kwarg = "slug" serializer_class = serializers.GameDataListSerializer serializer_detail_class = serializers.GameDataSerializer filter_backends = [DjangoFilterBackend] filterset_fields = [ "published_game", "game_system", "published_module", "status", "game_type", "game_mode", ] permission_type_map = { **AutoPermissionViewSetMixin.permission_type_map, "apply": "apply", "leave": "leave", } def get_queryset(self): gamer = self.request.user.gamerprofile friends = gamer.friends.all() communities = [f.id for f in gamer.communities.all()] game_player_ids = [ obj.game.id for obj in models.Player.objects.filter(gamer=gamer).select_related("game") ] q_gm = Q(gm=gamer) q_gm_is_friend = Q(gm__in=friends) & Q(privacy_level="community") q_isplayer = Q(id__in=game_player_ids) q_community = Q(communities__id__in=communities) & Q(privacy_level="community") q_public = Q(privacy_level="public") qs = models.GamePosting.objects.filter( q_gm | q_public | q_gm_is_friend | q_isplayer | q_community ).distinct() return qs def create(self, request, *args, **kwargs): self.serializer_class = serializers.GameDataSerializer return super().create(request, *args, **kwargs) def retrieve(self, request, *args, **kwargs): if not request.user.has_perm("game.is_member", self.get_object()): logger.debug( "User is not a member of game, swtiching serializer to list view mode." ) self.serializer_detail_class = serializers.GameDataListSerializer return super().retrieve(request, *args, **kwargs) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def apply(self, request, *args, **kwargs): obj = self.get_object() logger.debug("Retrieved game object of {}".format(obj)) if request.user.has_perm("game.is_member", obj): return Response( data={"errors": "You are already in this game..."}, status=status.HTTP_400_BAD_REQUEST, ) new_application = serializers.GameApplicationSerializer( data=request.data, context={"request": request} ) if not new_application.is_valid(): return Response( data=new_application.errors, status=status.HTTP_400_BAD_REQUEST ) app = models.GamePostingApplication.objects.create( game=obj, gamer=request.user.gamerprofile, message=new_application.validated_data["message"], status="pending", ) notify.send( request.user.gamerprofile, recipient=obj.gm.user, verb="submitted application", action_object=app, target=obj, ) return Response( data=serializers.GameApplicationSerializer( app, context={"request": request} ).data, status=status.HTTP_201_CREATED, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def leave(self, request, *args, **kwargs): obj = self.get_object() if request.user == obj.gm.user: return Response( data={"errors": "The GM cannot leave the game."}, status=status.HTTP_400_BAD_REQUEST, ) player = models.Player.objects.get(gamer=request.user.gamerprofile, game=obj) player_left.send(models.Player, player=player) player.delete() return Response(status=status.HTTP_204_NO_CONTENT) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="Game: List Sessions", operation_description="List the sessions for the given game.", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game Session: Details", operation_description="Get the details for the given session. **NOTE**: If the user is just a player, the GM notes and player details will not be included.", manual_parameters=[parent_lookup_game__slug], responses={ 200: serializers.GameSessionGMSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Game Session: Update", operation_description="Update details of the game session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.GameSessionGMSerializer, responses={ 200: serializers.GameSessionGMSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Game Session: Update", operation_description="Update details of the game session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.GameSessionGMSerializer, responses={ 200: serializers.GameSessionGMSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Game Session: Delete", operation_description="Delete the game session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.GameSessionGMSerializer, responses={ 204: "Session was deleted.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="cancel", decorator=swagger_auto_schema( operation_summary="Game Session: Cancel", operation_description="Cancel the game session.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session is already canceled or complete.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="uncancel", decorator=swagger_auto_schema( operation_summary="Game Session: Uncancel", operation_description="Uncancel the game session.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session is not canceled.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="complete", decorator=swagger_auto_schema( operation_summary="Game Session: Mark Complete", operation_description="Mark the game session as complete.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session is already canceled or complete.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="uncomplete", decorator=swagger_auto_schema( operation_summary="Game Session: Uncomplete", operation_description="Undo the completion status of the session.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session isn't marked as complete.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="reschedule", decorator=swagger_auto_schema( operation_summary="Game Session: Reschedule", operation_description="Reschedule the game session to another date/time.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.ScheduleSerializer, responses={ 200: serializers.GameSessionGMSerializer, 400: "Your date and time were invalid or the session is already marked as complete or canceled.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="addlog", decorator=swagger_auto_schema( operation_summary="Game Session: Add Adventure Log", operation_description="Add an adventure log to this session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.AdventureLogSerializer, responses={ 201: serializers.AdventureLogSerializer, 400: "This session already has an adventure log. You should update that instead.", 403: "You don't have permission to add an adventure log.", }, ), ) class GameSessionViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): """ Views for seeing game session data. """ model = models.GameSession serializer_class = serializers.GameSessionSerializer lookup_field = "slug" lookup_url_kwarg = "slug" parent_dependent_actions = [ "create", "retrieve", "update", "partial_update", "list", "destroy", "reschedule", "cancel", "uncancel", "addlog", "complete", "uncomplete", ] parent_lookup_field = "game" parent_object_model = models.GamePosting parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_game__slug" permission_type_map = { **ParentObjectAutoPermissionViewSetMixin.permission_type_map, "addlog": "view", "reschedule": "change", "cancel": "change", "uncancel": "change", "complete": "change", "uncomplete": "change", } permission_type_map["list"] = "view" def get_parent_game(self): return get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) def get_queryset(self): return self.model.objects.filter( game__slug=self.kwargs["parent_lookup_game__slug"] ).order_by("-scheduled_time") def dispatch(self, request, *args, **kwargs): if ( request.user.is_authenticated and request.user.gamerprofile == self.get_parent_game().gm ): self.serializer_class = serializers.GameSessionGMSerializer return super().dispatch(request, *args, **kwargs) @action(methods=["post"], detail=True) def reschedule(self, request, *args, **kwargs): date_serializer = serializers.ScheduleSerializer(data=request.data) if not date_serializer.is_valid(): return Response( data=date_serializer.errors, status=status.HTTP_400_BAD_REQUEST ) obj = self.get_object() if obj.status in ["complete", "cancel"]: return Response( data={ "errors": "This session is already marked as {} and cannot be rescheduled.".format( obj.get_status_display() ) }, status=status.HTTP_400_BAD_REQUEST, ) obj.move(date_serializer.validated_data["new_scheduled_time"]) return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def complete(self, request, *args, **kwargs): obj = self.get_object() if obj.status in ["complete", "cancel"]: return Response( data={ "errors": "This object is either already completed or canceled and cannot be toggled to complete." }, status=status.HTTP_400_BAD_REQUEST, ) obj.status = "complete" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def uncomplete(self, request, *args, **kwargs): obj = self.get_object() if obj.status != "complete": return Response( data={ "errors": "This object is not completed and so completion cannot be undone." }, status=status.HTTP_400_BAD_REQUEST, ) obj.status = "pending" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def cancel(self, request, *args, **kwargs): obj = self.get_object() if obj.status in ["complete", "cancel"]: return Response( data={"errors": "This session is already completed or canceled."}, status=status.HTTP_400_BAD_REQUEST, ) obj.cancel() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def uncancel(self, request, *args, **kwargs): obj = self.get_object() if obj.status != "cancel": return Response( data={ "errors": "This session is not canceled and can't be changed this way." }, status=status.HTTP_400_BAD_REQUEST, ) obj.uncancel() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def addlog(self, request, *args, **kwargs): """ Create the adventure log for this session. """ session = self.get_object() if hasattr(session, "adventurelog"): return Response( data={"errors": "This session already has an adventure log."}, status=status.HTTP_400_BAD_REQUEST, ) log_serializer = serializers.AdventureLogSerializer( session=session, data=request.data, context={"request": request} ) if not log_serializer.is_valid(): return Response( data=log_serializer.errors, status=status.HTTP_400_BAD_REQUEST ) new_log = log_serializer.save() return Response( data=serializers.AdventureLogSerializer( new_log, context={"request": request} ).data, status=status.HTTP_201_CREATED, ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Adventure Log: Details", operation_description="Fetch the details for a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], responses={ 200: serializers.AdventureLogSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Adventure Log: Update", operation_description="Update the details for a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], request_body=serializers.AdventureLogSerializer, responses={ 200: serializers.AdventureLogSerializer, 403: "You don't have permissions to edit this adventure log.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Adventure Log: Update", operation_description="Update the details for a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], request_body=serializers.AdventureLogSerializer, responses={ 200: serializers.AdventureLogSerializer, 403: "You don't have permissions to edit this adventure log.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Adventure Log: Delete", operation_description="Delete a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], request_body=no_body, responses={ 204: "The adventure log was successfully deleted.", 403: "You don't have permissions to edit this adventure log.", }, ), ) class AdventureLogViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): """ Allows the manipulation of view sets. """ model = models.AdventureLog parent_lookup_field = "session__game" parent_object_model = models.GamePosting parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_session__game__slug" serializer_class = serializers.AdventureLogSerializer lookup_field = "slug" lookup_url_kwarg = "slug" permission_required = "game.is_member" permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map} permission_type_map["list"] = "add" parent_dependent_actions = [ "create", "retrieve", "update", "partial_update", "destroy", ] def get_queryset(self): return models.AdventureLog.objects.filter( session__slug=self.kwargs["parent_lookup_session__slug"] ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Your Game Applications", operation_description="Fetch a list of all your game applications.", ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Your Game Application: Details", operation_description="Fetch the details of your game application.", ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Your Game Application: Update", operation_description="Update the details of your game application.", ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Your Game Application: Update", operation_description="Update the details of your game application.", ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Your Game Application: Withdraw", operation_description="Withdraw your game application by deleting the record.", ), ) class GameApplicationViewSet( AutoPermissionViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): """ View for an applicant to review, create, update, and delete their applications to games. """ permission_classes = (IsAuthenticated,) serializer_class = serializers.GameApplicationSerializer filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] lookup_field = "slug" lookup_url_kwarg = "slug" permission_type_map = {**AutoPermissionViewSetMixin.permission_type_map} def get_queryset(self): logger.debug("Fetching gamerprofile from request...") gamer = self.request.user.gamerprofile logger.debug("Fetching game applications for gamer {}".format(gamer)) qs = models.GamePostingApplication.objects.filter( gamer=self.request.user.gamerprofile ).order_by("-modified", "-created", "status") logger.debug( "Retrieved queryset of length {} for gamer {}".format( qs.count(), self.request.user.gamerprofile ) ) return qs @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Applicants for Game", operation_description="List the applicants for the current game. (GM Only)", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game Applicant: Details", operation_description="Fetch details for a given game application. (GM Only)", manual_parameters=[parent_lookup_game__slug], reponses={ 200: serializers.GameApplicationGMSerializer, 403: "You are not the GM for this game.", }, ), ) @method_decorator( name="approve", decorator=swagger_auto_schema( operation_summary="Game Applicant: Approve", operation_description="Approve the game applicant and add as a player to game.", request_body=no_body, responses={ 201: serializers.PlayerSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="reject", decorator=swagger_auto_schema( operation_summary="Game Applicant: Reject", operation_description="Reject the game applicant.", request_body=no_body, responses={ 200: serializers.GameApplicationGMSerializer, 403: "You are not the GM of this game.", }, ), ) class GMGameApplicationViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet, ): """ View for a GM to review and approve applicants. """ permission_classes = (IsAuthenticated,) serializer_class = serializers.GameApplicationGMSerializer filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] lookup_field = "slug" lookup_url_kwarg = "slug" parent_lookup_field = "game" parent_object_lookup_field = "slug" parent_object_model = models.GamePosting parent_object_url_kwarg = "parent_lookup_game__slug" parent_dependent_actions = ["list", "retrieve", "approve", "reject"] permission_type_map = { **ParentObjectAutoPermissionViewSetMixin.permission_type_map, "approve": "approve", "reject": "approve", } permission_type_map["retrieve"] = "approve" permission_type_map["list"] = "approve" def get_queryset(self): return models.GamePostingApplication.objects.filter( game__slug=self.kwargs["parent_lookup_game__slug"] ).exclude(status="new") def get_parent_game(self): return get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) @action(methods=["post"], detail=True) def approve(self, request, *args, **kwargs): """ Approves the game application. """ obj = self.get_object() obj.status = "approve" player = models.Player.objects.create(game=obj.game, gamer=obj.gamer) obj.save() return Response( data=serializers.PlayerSerializer( player, context={"request", request} ).data, status=status.HTTP_201_CREATED, ) @action(methods=["post"], detail=True) def reject(self, request, *args, **kwargs): """ Rejects the game application. """ obj = self.get_object() obj.status = "deny" obj.save() notify.send( obj, recipient=obj.gamer.user, verb="Your player application was not accepted", action_object=obj, target=obj.game, ) return Response( data=serializers.GameApplicationSerializer( obj, context={"request": request} ).data, status=status.HTTP_200_OK, ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="Game: Player List", operation_description="List players for a given game", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Player: Details", operation_description="Details for a player record in a given game.", manual_parameters=[parent_lookup_game__slug], responses={ 200: serializers.PlayerSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="kick", decorator=swagger_auto_schema( operation_summary="Player: Kick from game", operation_description="Kick the player out of the game.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 204: "Player was removed from the game.", 403: "You are not the GM of this game.", }, ), ) class PlayerViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet, ): """ Provides views for players in a given game. """ permission_classes = (IsAuthenticated,) serializer_class = serializers.PlayerSerializer permission_required = "game.is_member" lookup_field = "slug" lookup_url_kwarg = "slug" parent_lookup_field = "game" parent_object_model = models.GamePosting parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_game__slug" parent_dependent_actions = ["list", "retrieve"] permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map} permission_type_map["list"] = "view" def get_parent_game(self): return get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) def get_queryset(self): return models.Player.objects.filter(game=self.get_parent_game()) @action(methods=["post"], detail=True) def kick(self, request, *args, **kwargs): obj = self.get_object() player_kicked.send(request.user, player=obj) obj.delete() return Response(status=status.HTTP_204_NO_CONTENT) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="Game: List Characters", operation_description="Fetch the list of characters for a given game.", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game: Character Details", operation_description="Fetch the details of a character for a given game.", manual_parameters=[parent_lookup_game__slug], responses={ 200: serializers.CharacterSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Game: Update Character Details", operation_description="Update the character for the given game.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.CharacterSerializer, responses={ 200: serializers.CharacterSerializer, 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Game: Update Character Details", operation_description="Update the character for the given game.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.CharacterSerializer, responses={ 200: serializers.CharacterSerializer, 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="deactivate", decorator=swagger_auto_schema( operation_summary="Game: Deactivate Character", operation_description="Mark the character as inactive.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already inactive.", 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="reactivate", decorator=swagger_auto_schema( operation_summary="Game: Reactivate Character", operation_description="Mark the character as active.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already active.", 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Game: Delete Character", operation_description="Delete the character.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 204: "Character was deleted.", 403: "You are not the owner of this character.", }, ), ) @method_decorator( name="approve", decorator=swagger_auto_schema( operation_summary="Game: Approve Character", operation_description="Mark the character as approved (GM Only).", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already approved.", 403: "You are not the GM of the game.", }, ), ) @method_decorator( name="reject", decorator=swagger_auto_schema( operation_summary="Game: Reject Character", operation_description="Mark the character as rejected (GM Only).", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already rejected.", 403: "You are not the GM of the game.", }, ), ) class CharacterViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, viewsets.ModelViewSet ): """ Provides views for the characters in a game. """ permission_classes = (IsAuthenticated,) parser_classes = [FormParser, MultiPartParser] parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_game__slug" parent_lookup_field = "game" parent_object_model = models.GamePosting parent_dependent_actions = ["create", "list", "retrieve"] serializer_class = serializers.CharacterSerializer lookup_field = "slug" lookup_url_kwarg = "slug" filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] parent_game = None permission_type_map = { **ParentObjectAutoPermissionViewSetMixin.permission_type_map, "approve": "approve", "reject": "approve", "deactivate": "delete", "reactivate": "delete", } permission_type_map["list"] = "gamelist" def get_parent_game(self): if not self.parent_game: self.parent_game = get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) return self.parent_game def get_queryset(self): return models.Character.objects.filter(game=self.get_parent_game()) def create(self, request, *args, **kwargs): if request.user.gamerprofile == self.get_parent_game().gm: return Response( data={"errors": "Only a player can create a character."}, status=status.HTTP_403_FORBIDDEN, ) char_ser = serializers.CharacterSerializer( data=request.data, context={"request": request, "game": self.get_parent_game()}, ) if not char_ser.is_valid(): return Response(data=char_ser.errors, status=status.HTTP_400_BAD_REQUEST) char_ser.save() return Response(data=char_ser.data, status=status.HTTP_201_CREATED) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def approve(self, request, *args, **kwargs): """ Approves the proposed character. """ obj = self.get_object() obj.status = "approved" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def reject(self, request, *args, **kwargs): """ Rejects the proposed character. """ obj = self.get_object() obj.status = "rejected" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def deactivate(self, request, *args, **kwargs): """ Make a character inactive. """ obj = self.get_object() obj.status = "inactive" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def reactivate(self, request, *args, **kwargs): """ Reactivate an inactive character. """ obj = self.get_object() obj.status = "pending" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Your Characters", operation_description="Fetch a list of all of your characters.", ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Your Character: Details", operation_description="Fetch the details of your character.", ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Your Character: Update", operation_description="Update the details of your character.", ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Your Character: Update", operation_description="Update the details of your character.", ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Your Character: Delete", operation_description="Delete your character.", request_body=no_body, responses={204: "Character was deleted."}, ), ) @method_decorator( name="deactivate", decorator=swagger_auto_schema( operation_summary="Your Character: Deactivate", operation_description="Mark your character as inactive.", request_body=no_body, responses={ 200: "Character was marked as inactive.", 400: "Character was already inactive.", }, ), ) @method_decorator( name="reactivate", decorator=swagger_auto_schema( operation_summary="Your Character: Reactivate", operation_description="Mark your character as active.", request_body=no_body, responses={ 200: "Character was marked as active.", 400: "Character was already active.", }, ), ) class MyCharacterViewSet( AutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): """ Provides a vew so that players can view all their characters in one place. """ serializer_class = serializers.CharacterSerializer permission_classes = (IsAuthenticated,) lookup_field = "slug" lookup_url_kwarg = "slug" filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] permission_type_map = { **AutoPermissionViewSetMixin.permission_type_map, "deactivate": "delete", "reactivate": "delete", } permission_type_map["retrieve"] = "delete" parser_classes = [FormParser, MultiPartParser] def get_queryset(self): return models.Character.objects.filter( player__gamer=self.request.user.gamerprofile ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def deactivate(self, request, *args, **kwargs): """ Make a character inactive. """ obj = self.get_object() obj.status = "inactive" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def reactivate(self, request, *args, **kwargs): """ Reactivate an inactive character. """ obj = self.get_object() obj.status = "pending" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @dparser_classes([FormParser, JSONParser]) def destroy(self, request, *args, **kwargs): self.parser_classes = [FormParser, JSONParser] return super().destroy(request, *args, **kwargs)
34.160938
179
0.644353
import logging from django.db.models.query_utils import Q from django.shortcuts import get_object_or_404 from django.utils.decorators import method_decorator from django_filters.rest_framework import DjangoFilterBackend from drf_yasg import openapi from drf_yasg.openapi import Parameter from drf_yasg.utils import no_body, swagger_auto_schema from notifications.signals import notify from rest_framework import mixins, status, viewsets from rest_framework.decorators import action from rest_framework.decorators import parser_classes as dparser_classes from rest_framework.parsers import FormParser, JSONParser, MultiPartParser from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework_extensions.mixins import DetailSerializerMixin, NestedViewSetMixin from looking_for_group.mixins import AutoPermissionViewSetMixin, ParentObjectAutoPermissionViewSetMixin from . import models, serializers from .signals import player_kicked, player_left logger = logging.getLogger("api") parent_lookup_game__slug = Parameter( name="parent_lookup_game__slug", in_="path", type="string", format=openapi.FORMAT_SLUG, description="Slug of related game object.", ) parent_lookup_session__slug = Parameter( name="parent_lookup_session__slug", in_="path", type="string", format=openapi.FORMAT_SLUG, description="Slug of related session object.", ) parent_lookup_session__game__slug = Parameter( name="parent_lookup_session__game__slug", in_="path", type="string", format=openapi.FORMAT_SLUG, description="Slug of related game object.", ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Games", operation_description="Fetch a list of game records. **NOTE**: You will probably want to filter by status at least.", ), ) @method_decorator( name="create", decorator=swagger_auto_schema( operation_summary="Game: Create", operation_description="Create a new game posting.", request_body=serializers.GameDataSerializer, responses={201: serializers.GameDataSerializer}, ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game: Details", operation_description="Fetch the details for the given game. **NOTE**: If you are not a member of the game, only a subset of the available information will be displayed.", responses={ 200: serializers.GameDataSerializer, 403: "You are not authorized to view this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Game: Update", operation_description="Update the details of this game. (Only available to GM)", request_body=serializers.GameDataSerializer, responses={ 200: serializers.GameDataSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Game: Update", operation_description="Update the details of this game. (Only available to GM)", request_body=serializers.GameDataSerializer, responses={ 200: serializers.GameDataSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Game: Delete", operation_description="Delete the given game. (Only available to GM.)", request_body=no_body, responses={204: "Game was deleted.", 403: "You are not the GM of this game."}, ), ) @method_decorator( name="leave", decorator=swagger_auto_schema( operation_summary="Game: Leave", operation_description="Leave the current game. (Players only.)", request_body=no_body, reponses={ 204: "You have successfully left the game.", 400: "You are not a member of this game.", 403: "You are the GM and cannot leave.", }, ), ) @method_decorator( name="apply", decorator=swagger_auto_schema( operation_summary="Game: Apply", operation_description="Apply to join this game.", request_body=serializers.GameApplicationSerializer, responses={ 201: serializers.GameApplicationSerializer, 400: "You are already a member of this game.", 403: "You are not permitted to apply to this game either due to your access rights or the game's status.", }, ), ) class GamePostingViewSet( AutoPermissionViewSetMixin, DetailSerializerMixin, NestedViewSetMixin, viewsets.ModelViewSet, ): permission_classes = (IsAuthenticated,) parser_classes = [FormParser, MultiPartParser] model = models.GamePosting lookup_field = "slug" lookup_url_kwarg = "slug" serializer_class = serializers.GameDataListSerializer serializer_detail_class = serializers.GameDataSerializer filter_backends = [DjangoFilterBackend] filterset_fields = [ "published_game", "game_system", "published_module", "status", "game_type", "game_mode", ] permission_type_map = { **AutoPermissionViewSetMixin.permission_type_map, "apply": "apply", "leave": "leave", } def get_queryset(self): gamer = self.request.user.gamerprofile friends = gamer.friends.all() communities = [f.id for f in gamer.communities.all()] game_player_ids = [ obj.game.id for obj in models.Player.objects.filter(gamer=gamer).select_related("game") ] q_gm = Q(gm=gamer) q_gm_is_friend = Q(gm__in=friends) & Q(privacy_level="community") q_isplayer = Q(id__in=game_player_ids) q_community = Q(communities__id__in=communities) & Q(privacy_level="community") q_public = Q(privacy_level="public") qs = models.GamePosting.objects.filter( q_gm | q_public | q_gm_is_friend | q_isplayer | q_community ).distinct() return qs def create(self, request, *args, **kwargs): self.serializer_class = serializers.GameDataSerializer return super().create(request, *args, **kwargs) def retrieve(self, request, *args, **kwargs): if not request.user.has_perm("game.is_member", self.get_object()): logger.debug( "User is not a member of game, swtiching serializer to list view mode." ) self.serializer_detail_class = serializers.GameDataListSerializer return super().retrieve(request, *args, **kwargs) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def apply(self, request, *args, **kwargs): obj = self.get_object() logger.debug("Retrieved game object of {}".format(obj)) if request.user.has_perm("game.is_member", obj): return Response( data={"errors": "You are already in this game..."}, status=status.HTTP_400_BAD_REQUEST, ) new_application = serializers.GameApplicationSerializer( data=request.data, context={"request": request} ) if not new_application.is_valid(): return Response( data=new_application.errors, status=status.HTTP_400_BAD_REQUEST ) app = models.GamePostingApplication.objects.create( game=obj, gamer=request.user.gamerprofile, message=new_application.validated_data["message"], status="pending", ) notify.send( request.user.gamerprofile, recipient=obj.gm.user, verb="submitted application", action_object=app, target=obj, ) return Response( data=serializers.GameApplicationSerializer( app, context={"request": request} ).data, status=status.HTTP_201_CREATED, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def leave(self, request, *args, **kwargs): obj = self.get_object() if request.user == obj.gm.user: return Response( data={"errors": "The GM cannot leave the game."}, status=status.HTTP_400_BAD_REQUEST, ) player = models.Player.objects.get(gamer=request.user.gamerprofile, game=obj) player_left.send(models.Player, player=player) player.delete() return Response(status=status.HTTP_204_NO_CONTENT) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="Game: List Sessions", operation_description="List the sessions for the given game.", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game Session: Details", operation_description="Get the details for the given session. **NOTE**: If the user is just a player, the GM notes and player details will not be included.", manual_parameters=[parent_lookup_game__slug], responses={ 200: serializers.GameSessionGMSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Game Session: Update", operation_description="Update details of the game session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.GameSessionGMSerializer, responses={ 200: serializers.GameSessionGMSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Game Session: Update", operation_description="Update details of the game session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.GameSessionGMSerializer, responses={ 200: serializers.GameSessionGMSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Game Session: Delete", operation_description="Delete the game session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.GameSessionGMSerializer, responses={ 204: "Session was deleted.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="cancel", decorator=swagger_auto_schema( operation_summary="Game Session: Cancel", operation_description="Cancel the game session.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session is already canceled or complete.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="uncancel", decorator=swagger_auto_schema( operation_summary="Game Session: Uncancel", operation_description="Uncancel the game session.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session is not canceled.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="complete", decorator=swagger_auto_schema( operation_summary="Game Session: Mark Complete", operation_description="Mark the game session as complete.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session is already canceled or complete.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="uncomplete", decorator=swagger_auto_schema( operation_summary="Game Session: Uncomplete", operation_description="Undo the completion status of the session.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.GameSessionGMSerializer, 400: "This session isn't marked as complete.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="reschedule", decorator=swagger_auto_schema( operation_summary="Game Session: Reschedule", operation_description="Reschedule the game session to another date/time.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.ScheduleSerializer, responses={ 200: serializers.GameSessionGMSerializer, 400: "Your date and time were invalid or the session is already marked as complete or canceled.", 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="addlog", decorator=swagger_auto_schema( operation_summary="Game Session: Add Adventure Log", operation_description="Add an adventure log to this session.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.AdventureLogSerializer, responses={ 201: serializers.AdventureLogSerializer, 400: "This session already has an adventure log. You should update that instead.", 403: "You don't have permission to add an adventure log.", }, ), ) class GameSessionViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): model = models.GameSession serializer_class = serializers.GameSessionSerializer lookup_field = "slug" lookup_url_kwarg = "slug" parent_dependent_actions = [ "create", "retrieve", "update", "partial_update", "list", "destroy", "reschedule", "cancel", "uncancel", "addlog", "complete", "uncomplete", ] parent_lookup_field = "game" parent_object_model = models.GamePosting parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_game__slug" permission_type_map = { **ParentObjectAutoPermissionViewSetMixin.permission_type_map, "addlog": "view", "reschedule": "change", "cancel": "change", "uncancel": "change", "complete": "change", "uncomplete": "change", } permission_type_map["list"] = "view" def get_parent_game(self): return get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) def get_queryset(self): return self.model.objects.filter( game__slug=self.kwargs["parent_lookup_game__slug"] ).order_by("-scheduled_time") def dispatch(self, request, *args, **kwargs): if ( request.user.is_authenticated and request.user.gamerprofile == self.get_parent_game().gm ): self.serializer_class = serializers.GameSessionGMSerializer return super().dispatch(request, *args, **kwargs) @action(methods=["post"], detail=True) def reschedule(self, request, *args, **kwargs): date_serializer = serializers.ScheduleSerializer(data=request.data) if not date_serializer.is_valid(): return Response( data=date_serializer.errors, status=status.HTTP_400_BAD_REQUEST ) obj = self.get_object() if obj.status in ["complete", "cancel"]: return Response( data={ "errors": "This session is already marked as {} and cannot be rescheduled.".format( obj.get_status_display() ) }, status=status.HTTP_400_BAD_REQUEST, ) obj.move(date_serializer.validated_data["new_scheduled_time"]) return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def complete(self, request, *args, **kwargs): obj = self.get_object() if obj.status in ["complete", "cancel"]: return Response( data={ "errors": "This object is either already completed or canceled and cannot be toggled to complete." }, status=status.HTTP_400_BAD_REQUEST, ) obj.status = "complete" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def uncomplete(self, request, *args, **kwargs): obj = self.get_object() if obj.status != "complete": return Response( data={ "errors": "This object is not completed and so completion cannot be undone." }, status=status.HTTP_400_BAD_REQUEST, ) obj.status = "pending" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def cancel(self, request, *args, **kwargs): obj = self.get_object() if obj.status in ["complete", "cancel"]: return Response( data={"errors": "This session is already completed or canceled."}, status=status.HTTP_400_BAD_REQUEST, ) obj.cancel() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def uncancel(self, request, *args, **kwargs): obj = self.get_object() if obj.status != "cancel": return Response( data={ "errors": "This session is not canceled and can't be changed this way." }, status=status.HTTP_400_BAD_REQUEST, ) obj.uncancel() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True) def addlog(self, request, *args, **kwargs): session = self.get_object() if hasattr(session, "adventurelog"): return Response( data={"errors": "This session already has an adventure log."}, status=status.HTTP_400_BAD_REQUEST, ) log_serializer = serializers.AdventureLogSerializer( session=session, data=request.data, context={"request": request} ) if not log_serializer.is_valid(): return Response( data=log_serializer.errors, status=status.HTTP_400_BAD_REQUEST ) new_log = log_serializer.save() return Response( data=serializers.AdventureLogSerializer( new_log, context={"request": request} ).data, status=status.HTTP_201_CREATED, ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Adventure Log: Details", operation_description="Fetch the details for a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], responses={ 200: serializers.AdventureLogSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Adventure Log: Update", operation_description="Update the details for a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], request_body=serializers.AdventureLogSerializer, responses={ 200: serializers.AdventureLogSerializer, 403: "You don't have permissions to edit this adventure log.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Adventure Log: Update", operation_description="Update the details for a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], request_body=serializers.AdventureLogSerializer, responses={ 200: serializers.AdventureLogSerializer, 403: "You don't have permissions to edit this adventure log.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Adventure Log: Delete", operation_description="Delete a given adventure log.", manual_parameters=[ parent_lookup_session__game__slug, parent_lookup_session__slug, ], request_body=no_body, responses={ 204: "The adventure log was successfully deleted.", 403: "You don't have permissions to edit this adventure log.", }, ), ) class AdventureLogViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): model = models.AdventureLog parent_lookup_field = "session__game" parent_object_model = models.GamePosting parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_session__game__slug" serializer_class = serializers.AdventureLogSerializer lookup_field = "slug" lookup_url_kwarg = "slug" permission_required = "game.is_member" permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map} permission_type_map["list"] = "add" parent_dependent_actions = [ "create", "retrieve", "update", "partial_update", "destroy", ] def get_queryset(self): return models.AdventureLog.objects.filter( session__slug=self.kwargs["parent_lookup_session__slug"] ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Your Game Applications", operation_description="Fetch a list of all your game applications.", ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Your Game Application: Details", operation_description="Fetch the details of your game application.", ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Your Game Application: Update", operation_description="Update the details of your game application.", ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Your Game Application: Update", operation_description="Update the details of your game application.", ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Your Game Application: Withdraw", operation_description="Withdraw your game application by deleting the record.", ), ) class GameApplicationViewSet( AutoPermissionViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): permission_classes = (IsAuthenticated,) serializer_class = serializers.GameApplicationSerializer filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] lookup_field = "slug" lookup_url_kwarg = "slug" permission_type_map = {**AutoPermissionViewSetMixin.permission_type_map} def get_queryset(self): logger.debug("Fetching gamerprofile from request...") gamer = self.request.user.gamerprofile logger.debug("Fetching game applications for gamer {}".format(gamer)) qs = models.GamePostingApplication.objects.filter( gamer=self.request.user.gamerprofile ).order_by("-modified", "-created", "status") logger.debug( "Retrieved queryset of length {} for gamer {}".format( qs.count(), self.request.user.gamerprofile ) ) return qs @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Applicants for Game", operation_description="List the applicants for the current game. (GM Only)", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game Applicant: Details", operation_description="Fetch details for a given game application. (GM Only)", manual_parameters=[parent_lookup_game__slug], reponses={ 200: serializers.GameApplicationGMSerializer, 403: "You are not the GM for this game.", }, ), ) @method_decorator( name="approve", decorator=swagger_auto_schema( operation_summary="Game Applicant: Approve", operation_description="Approve the game applicant and add as a player to game.", request_body=no_body, responses={ 201: serializers.PlayerSerializer, 403: "You are not the GM of this game.", }, ), ) @method_decorator( name="reject", decorator=swagger_auto_schema( operation_summary="Game Applicant: Reject", operation_description="Reject the game applicant.", request_body=no_body, responses={ 200: serializers.GameApplicationGMSerializer, 403: "You are not the GM of this game.", }, ), ) class GMGameApplicationViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet, ): permission_classes = (IsAuthenticated,) serializer_class = serializers.GameApplicationGMSerializer filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] lookup_field = "slug" lookup_url_kwarg = "slug" parent_lookup_field = "game" parent_object_lookup_field = "slug" parent_object_model = models.GamePosting parent_object_url_kwarg = "parent_lookup_game__slug" parent_dependent_actions = ["list", "retrieve", "approve", "reject"] permission_type_map = { **ParentObjectAutoPermissionViewSetMixin.permission_type_map, "approve": "approve", "reject": "approve", } permission_type_map["retrieve"] = "approve" permission_type_map["list"] = "approve" def get_queryset(self): return models.GamePostingApplication.objects.filter( game__slug=self.kwargs["parent_lookup_game__slug"] ).exclude(status="new") def get_parent_game(self): return get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) @action(methods=["post"], detail=True) def approve(self, request, *args, **kwargs): obj = self.get_object() obj.status = "approve" player = models.Player.objects.create(game=obj.game, gamer=obj.gamer) obj.save() return Response( data=serializers.PlayerSerializer( player, context={"request", request} ).data, status=status.HTTP_201_CREATED, ) @action(methods=["post"], detail=True) def reject(self, request, *args, **kwargs): obj = self.get_object() obj.status = "deny" obj.save() notify.send( obj, recipient=obj.gamer.user, verb="Your player application was not accepted", action_object=obj, target=obj.game, ) return Response( data=serializers.GameApplicationSerializer( obj, context={"request": request} ).data, status=status.HTTP_200_OK, ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="Game: Player List", operation_description="List players for a given game", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Player: Details", operation_description="Details for a player record in a given game.", manual_parameters=[parent_lookup_game__slug], responses={ 200: serializers.PlayerSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="kick", decorator=swagger_auto_schema( operation_summary="Player: Kick from game", operation_description="Kick the player out of the game.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 204: "Player was removed from the game.", 403: "You are not the GM of this game.", }, ), ) class PlayerViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet, ): permission_classes = (IsAuthenticated,) serializer_class = serializers.PlayerSerializer permission_required = "game.is_member" lookup_field = "slug" lookup_url_kwarg = "slug" parent_lookup_field = "game" parent_object_model = models.GamePosting parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_game__slug" parent_dependent_actions = ["list", "retrieve"] permission_type_map = {**ParentObjectAutoPermissionViewSetMixin.permission_type_map} permission_type_map["list"] = "view" def get_parent_game(self): return get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) def get_queryset(self): return models.Player.objects.filter(game=self.get_parent_game()) @action(methods=["post"], detail=True) def kick(self, request, *args, **kwargs): obj = self.get_object() player_kicked.send(request.user, player=obj) obj.delete() return Response(status=status.HTTP_204_NO_CONTENT) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="Game: List Characters", operation_description="Fetch the list of characters for a given game.", manual_parameters=[parent_lookup_game__slug], ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Game: Character Details", operation_description="Fetch the details of a character for a given game.", manual_parameters=[parent_lookup_game__slug], responses={ 200: serializers.CharacterSerializer, 403: "You are not a member of this game.", }, ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Game: Update Character Details", operation_description="Update the character for the given game.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.CharacterSerializer, responses={ 200: serializers.CharacterSerializer, 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Game: Update Character Details", operation_description="Update the character for the given game.", manual_parameters=[parent_lookup_game__slug], request_body=serializers.CharacterSerializer, responses={ 200: serializers.CharacterSerializer, 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="deactivate", decorator=swagger_auto_schema( operation_summary="Game: Deactivate Character", operation_description="Mark the character as inactive.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already inactive.", 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="reactivate", decorator=swagger_auto_schema( operation_summary="Game: Reactivate Character", operation_description="Mark the character as active.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already active.", 403: "You are not the owner of this character or the GM of the game.", }, ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Game: Delete Character", operation_description="Delete the character.", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 204: "Character was deleted.", 403: "You are not the owner of this character.", }, ), ) @method_decorator( name="approve", decorator=swagger_auto_schema( operation_summary="Game: Approve Character", operation_description="Mark the character as approved (GM Only).", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already approved.", 403: "You are not the GM of the game.", }, ), ) @method_decorator( name="reject", decorator=swagger_auto_schema( operation_summary="Game: Reject Character", operation_description="Mark the character as rejected (GM Only).", manual_parameters=[parent_lookup_game__slug], request_body=no_body, responses={ 200: serializers.CharacterSerializer, 400: "This character is already rejected.", 403: "You are not the GM of the game.", }, ), ) class CharacterViewSet( ParentObjectAutoPermissionViewSetMixin, NestedViewSetMixin, viewsets.ModelViewSet ): permission_classes = (IsAuthenticated,) parser_classes = [FormParser, MultiPartParser] parent_object_lookup_field = "slug" parent_object_url_kwarg = "parent_lookup_game__slug" parent_lookup_field = "game" parent_object_model = models.GamePosting parent_dependent_actions = ["create", "list", "retrieve"] serializer_class = serializers.CharacterSerializer lookup_field = "slug" lookup_url_kwarg = "slug" filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] parent_game = None permission_type_map = { **ParentObjectAutoPermissionViewSetMixin.permission_type_map, "approve": "approve", "reject": "approve", "deactivate": "delete", "reactivate": "delete", } permission_type_map["list"] = "gamelist" def get_parent_game(self): if not self.parent_game: self.parent_game = get_object_or_404( models.GamePosting, slug=self.kwargs["parent_lookup_game__slug"] ) return self.parent_game def get_queryset(self): return models.Character.objects.filter(game=self.get_parent_game()) def create(self, request, *args, **kwargs): if request.user.gamerprofile == self.get_parent_game().gm: return Response( data={"errors": "Only a player can create a character."}, status=status.HTTP_403_FORBIDDEN, ) char_ser = serializers.CharacterSerializer( data=request.data, context={"request": request, "game": self.get_parent_game()}, ) if not char_ser.is_valid(): return Response(data=char_ser.errors, status=status.HTTP_400_BAD_REQUEST) char_ser.save() return Response(data=char_ser.data, status=status.HTTP_201_CREATED) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def approve(self, request, *args, **kwargs): obj = self.get_object() obj.status = "approved" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def reject(self, request, *args, **kwargs): obj = self.get_object() obj.status = "rejected" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def deactivate(self, request, *args, **kwargs): obj = self.get_object() obj.status = "inactive" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def reactivate(self, request, *args, **kwargs): obj = self.get_object() obj.status = "pending" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @method_decorator( name="list", decorator=swagger_auto_schema( operation_summary="List Your Characters", operation_description="Fetch a list of all of your characters.", ), ) @method_decorator( name="retrieve", decorator=swagger_auto_schema( operation_summary="Your Character: Details", operation_description="Fetch the details of your character.", ), ) @method_decorator( name="update", decorator=swagger_auto_schema( operation_summary="Your Character: Update", operation_description="Update the details of your character.", ), ) @method_decorator( name="partial_update", decorator=swagger_auto_schema( operation_summary="Your Character: Update", operation_description="Update the details of your character.", ), ) @method_decorator( name="destroy", decorator=swagger_auto_schema( operation_summary="Your Character: Delete", operation_description="Delete your character.", request_body=no_body, responses={204: "Character was deleted."}, ), ) @method_decorator( name="deactivate", decorator=swagger_auto_schema( operation_summary="Your Character: Deactivate", operation_description="Mark your character as inactive.", request_body=no_body, responses={ 200: "Character was marked as inactive.", 400: "Character was already inactive.", }, ), ) @method_decorator( name="reactivate", decorator=swagger_auto_schema( operation_summary="Your Character: Reactivate", operation_description="Mark your character as active.", request_body=no_body, responses={ 200: "Character was marked as active.", 400: "Character was already active.", }, ), ) class MyCharacterViewSet( AutoPermissionViewSetMixin, NestedViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, viewsets.GenericViewSet, ): serializer_class = serializers.CharacterSerializer permission_classes = (IsAuthenticated,) lookup_field = "slug" lookup_url_kwarg = "slug" filter_backends = [DjangoFilterBackend] filterset_fields = ["status"] permission_type_map = { **AutoPermissionViewSetMixin.permission_type_map, "deactivate": "delete", "reactivate": "delete", } permission_type_map["retrieve"] = "delete" parser_classes = [FormParser, MultiPartParser] def get_queryset(self): return models.Character.objects.filter( player__gamer=self.request.user.gamerprofile ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def deactivate(self, request, *args, **kwargs): obj = self.get_object() obj.status = "inactive" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @action(methods=["post"], detail=True, parser_classes=[FormParser, JSONParser]) def reactivate(self, request, *args, **kwargs): obj = self.get_object() obj.status = "pending" obj.save() return Response( data=self.serializer_class(obj, context={"request": request}).data, status=status.HTTP_200_OK, ) @dparser_classes([FormParser, JSONParser]) def destroy(self, request, *args, **kwargs): self.parser_classes = [FormParser, JSONParser] return super().destroy(request, *args, **kwargs)
true
true
f700171313ea3cd9aba9f32edd0869bb57ac51e3
2,851
py
Python
test/testsql.py
ckarnell/sqlalchemy-stubs
055c198e26ca700b2d4a365c06bedcfbed0de176
[ "Apache-2.0" ]
null
null
null
test/testsql.py
ckarnell/sqlalchemy-stubs
055c198e26ca700b2d4a365c06bedcfbed0de176
[ "Apache-2.0" ]
1
2019-11-29T16:25:29.000Z
2020-01-23T16:13:05.000Z
test/testsql.py
ckarnell/sqlalchemy-stubs
055c198e26ca700b2d4a365c06bedcfbed0de176
[ "Apache-2.0" ]
1
2019-11-29T16:12:37.000Z
2019-11-29T16:12:37.000Z
"""Mypy style test cases for SQLAlchemy stubs and plugin.""" import os import os.path import sys import pytest # type: ignore # no pytest in typeshed from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.util import try_find_python2_interpreter from mypy import api this_file_dir = os.path.dirname(os.path.realpath(__file__)) prefix = os.path.dirname(this_file_dir) inipath = os.path.abspath(os.path.join(prefix, 'test')) # Locations of test data files such as test case descriptions (.test). test_data_prefix = os.path.join(prefix, 'test', 'test-data') class SQLDataSuite(DataSuite): files = ['sqlalchemy-basics.test', 'sqlalchemy-sql-elements.test', 'sqlalchemy-sql-sqltypes.test', 'sqlalchemy-sql-selectable.test', 'sqlalchemy-sql-schema.test', 'sqlalchemy-plugin-features.test', 'sqlalchemy-plugin-query.test'] data_prefix = test_data_prefix def run_case(self, testcase: DataDrivenTestCase) -> None: assert testcase.old_cwd is not None, "test was not properly set up" mypy_cmdline = [ '--show-traceback', '--no-silence-site-packages', '--config-file={}/sqlalchemy.ini'.format(inipath), ] py2 = testcase.name.lower().endswith('python2') if py2: if try_find_python2_interpreter() is None: pytest.skip() return mypy_cmdline.append('--py2') else: mypy_cmdline.append('--python-version={}'.format('.'.join(map(str, sys.version_info[:2])))) # Write the program to a file. program_path = os.path.join(test_temp_dir, 'main.py') mypy_cmdline.append(program_path) with open(program_path, 'w') as file: for s in testcase.input: file.write('{}\n'.format(s)) output = [] # Type check the program. out, err, returncode = api.run(mypy_cmdline) # split lines, remove newlines, and remove directory of test case for line in (out + err).splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n").replace('.py', '')) else: output.append(line.rstrip("\r\n")) # Remove temp file. os.remove(program_path) assert_string_arrays_equal(testcase.output, output, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line))
39.597222
98
0.58155
import os import os.path import sys import pytest from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.util import try_find_python2_interpreter from mypy import api this_file_dir = os.path.dirname(os.path.realpath(__file__)) prefix = os.path.dirname(this_file_dir) inipath = os.path.abspath(os.path.join(prefix, 'test')) test_data_prefix = os.path.join(prefix, 'test', 'test-data') class SQLDataSuite(DataSuite): files = ['sqlalchemy-basics.test', 'sqlalchemy-sql-elements.test', 'sqlalchemy-sql-sqltypes.test', 'sqlalchemy-sql-selectable.test', 'sqlalchemy-sql-schema.test', 'sqlalchemy-plugin-features.test', 'sqlalchemy-plugin-query.test'] data_prefix = test_data_prefix def run_case(self, testcase: DataDrivenTestCase) -> None: assert testcase.old_cwd is not None, "test was not properly set up" mypy_cmdline = [ '--show-traceback', '--no-silence-site-packages', '--config-file={}/sqlalchemy.ini'.format(inipath), ] py2 = testcase.name.lower().endswith('python2') if py2: if try_find_python2_interpreter() is None: pytest.skip() return mypy_cmdline.append('--py2') else: mypy_cmdline.append('--python-version={}'.format('.'.join(map(str, sys.version_info[:2])))) program_path = os.path.join(test_temp_dir, 'main.py') mypy_cmdline.append(program_path) with open(program_path, 'w') as file: for s in testcase.input: file.write('{}\n'.format(s)) output = [] out, err, returncode = api.run(mypy_cmdline) for line in (out + err).splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n").replace('.py', '')) else: output.append(line.rstrip("\r\n")) os.remove(program_path) assert_string_arrays_equal(testcase.output, output, 'Invalid output ({}, line {})'.format( testcase.file, testcase.line))
true
true
f70018519847d548a5e05bd0b049391f90c31394
4,932
py
Python
tools/release-announcement.py
19317362/gerrit
bd1e5e9a194dbef18ca502db5c5564f4863011a4
[ "Apache-2.0" ]
1
2020-09-05T16:50:52.000Z
2020-09-05T16:50:52.000Z
tools/release-announcement.py
19317362/gerrit
bd1e5e9a194dbef18ca502db5c5564f4863011a4
[ "Apache-2.0" ]
null
null
null
tools/release-announcement.py
19317362/gerrit
bd1e5e9a194dbef18ca502db5c5564f4863011a4
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # Copyright (C) 2017 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Generates the text to paste into the email for announcing a new # release of Gerrit. The text is generated based on a template that # is filled with values either passed to the script or calculated # at runtime. # # The script outputs a plain text file with the announcement text: # # release-announcement-gerrit-X.Y.txt # # and, if GPG is available, the announcement text wrapped with a # signature: # # release-announcement-gerrit-X.Y.txt.asc # # Usage: # # ./tools/release-announcement.py -v 2.14.2 -p 2.14.1 \ # -s "This release fixes several bugs since 2.14.1" # # Parameters: # # --version (-v): The version of Gerrit being released. # # --previous (-p): The previous version of Gerrit. Optional. If # specified, the generated text includes a link to the gitiles # log of commits between the previous and new versions. # # --summary (-s): Short summary of the release. Optional. When # specified, the summary is inserted in the introductory sentence # of the generated text. # # Prerequisites: # # - The Jinja2 python library [1] must be installed. # # - For GPG signing to work, the python-gnupg library [2] must be # installed, and the ~/.gnupg folder must exist. # # - The war file must have been installed to the local Maven repository # using the `./tools/mvn/api.sh war_install` command. # # [1] http://jinja.pocoo.org/ # [2] http://pythonhosted.org/gnupg/ from __future__ import print_function import argparse import hashlib import os import sys from gnupg import GPG from jinja2 import Template class Version: def __init__(self, version): self.version = version parts = version.split('.') if len(parts) > 2: self.major = ".".join(parts[:2]) self.patch = version else: self.major = version self.patch = None def __str__(self): return self.version def _main(): descr = 'Generate Gerrit release announcement email text' parser = argparse.ArgumentParser( description=descr, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-v', '--version', dest='version', required=True, help='gerrit version to release') parser.add_argument('-p', '--previous', dest='previous', help='previous gerrit version (optional)') parser.add_argument('-s', '--summary', dest='summary', help='summary of the release content (optional)') options = parser.parse_args() summary = options.summary if summary and not summary.endswith("."): summary = summary + "." data = { "version": Version(options.version), "previous": options.previous, "summary": summary } war = os.path.join( os.path.expanduser("~/.m2/repository/com/google/gerrit/gerrit-war/"), "%(version)s/gerrit-war-%(version)s.war" % data) if not os.path.isfile(war): print("Could not find war file for Gerrit %s in local Maven repository" % data["version"], file=sys.stderr) sys.exit(1) md5 = hashlib.md5() sha1 = hashlib.sha1() sha256 = hashlib.sha256() BUF_SIZE = 65536 # Read data in 64kb chunks with open(war, 'rb') as f: while True: d = f.read(BUF_SIZE) if not d: break md5.update(d) sha1.update(d) sha256.update(d) data["sha1"] = sha1.hexdigest() data["sha256"] = sha256.hexdigest() data["md5"] = md5.hexdigest() template = Template(open("tools/release-announcement-template.txt").read()) output = template.render(data=data) filename = "release-announcement-gerrit-%s.txt" % data["version"] with open(filename, "w") as f: f.write(output) gpghome = os.path.abspath(os.path.expanduser("~/.gnupg")) if not os.path.isdir(gpghome): print("Skipping signing due to missing gnupg home folder") else: try: gpg = GPG(homedir=gpghome) except TypeError: gpg = GPG(gnupghome=gpghome) signed = gpg.sign(output) filename = filename + ".asc" with open(filename, "w") as f: f.write(str(signed)) if __name__ == "__main__": _main()
31.414013
79
0.642336
from __future__ import print_function import argparse import hashlib import os import sys from gnupg import GPG from jinja2 import Template class Version: def __init__(self, version): self.version = version parts = version.split('.') if len(parts) > 2: self.major = ".".join(parts[:2]) self.patch = version else: self.major = version self.patch = None def __str__(self): return self.version def _main(): descr = 'Generate Gerrit release announcement email text' parser = argparse.ArgumentParser( description=descr, formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('-v', '--version', dest='version', required=True, help='gerrit version to release') parser.add_argument('-p', '--previous', dest='previous', help='previous gerrit version (optional)') parser.add_argument('-s', '--summary', dest='summary', help='summary of the release content (optional)') options = parser.parse_args() summary = options.summary if summary and not summary.endswith("."): summary = summary + "." data = { "version": Version(options.version), "previous": options.previous, "summary": summary } war = os.path.join( os.path.expanduser("~/.m2/repository/com/google/gerrit/gerrit-war/"), "%(version)s/gerrit-war-%(version)s.war" % data) if not os.path.isfile(war): print("Could not find war file for Gerrit %s in local Maven repository" % data["version"], file=sys.stderr) sys.exit(1) md5 = hashlib.md5() sha1 = hashlib.sha1() sha256 = hashlib.sha256() BUF_SIZE = 65536 with open(war, 'rb') as f: while True: d = f.read(BUF_SIZE) if not d: break md5.update(d) sha1.update(d) sha256.update(d) data["sha1"] = sha1.hexdigest() data["sha256"] = sha256.hexdigest() data["md5"] = md5.hexdigest() template = Template(open("tools/release-announcement-template.txt").read()) output = template.render(data=data) filename = "release-announcement-gerrit-%s.txt" % data["version"] with open(filename, "w") as f: f.write(output) gpghome = os.path.abspath(os.path.expanduser("~/.gnupg")) if not os.path.isdir(gpghome): print("Skipping signing due to missing gnupg home folder") else: try: gpg = GPG(homedir=gpghome) except TypeError: gpg = GPG(gnupghome=gpghome) signed = gpg.sign(output) filename = filename + ".asc" with open(filename, "w") as f: f.write(str(signed)) if __name__ == "__main__": _main()
true
true
f70019c6508df560087bb4bd7cafe83ac8584df1
19,693
py
Python
.vscode-server/data/User/History/-1f47d17c/QkLc.py
UNIZAR-30226-2022-09/back-end
7f20e141e34bf0ae7cce70515a1e4bb0cd85b173
[ "MIT" ]
null
null
null
.vscode-server/data/User/History/-1f47d17c/QkLc.py
UNIZAR-30226-2022-09/back-end
7f20e141e34bf0ae7cce70515a1e4bb0cd85b173
[ "MIT" ]
1
2022-02-16T12:12:43.000Z
2022-02-16T12:15:03.000Z
.vscode-server/data/User/History/-1f47d17c/QkLc.py
UNIZAR-30226-2022-09/back-end
7f20e141e34bf0ae7cce70515a1e4bb0cd85b173
[ "MIT" ]
null
null
null
# from flask import Flask, Blueprint # from flask_sqlalchemy import SQLAlchemy # from flask_login import LoginManager # import os from flask import Flask, jsonify, request, make_response, redirect, url_for import jwt import datetime import os from functools import wraps from flask_sqlalchemy import SQLAlchemy import uuid from werkzeug.security import generate_password_hash, check_password_hash from werkzeug.utils import secure_filename from sqlalchemy import select from flask_migrate import Migrate, migrate from flask_cors import CORS from sqlalchemy import inspect from sqlalchemy import Table, Column, MetaData, Integer, Computed from numpy import array app = Flask(__name__) app.config['SECRET_KEY'] = 'secretollave' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///todo.db' ABSOLUTE_PATH_TO_YOUR_FOLDER ='/home/dani/flask/static/fotosPerfil' ABSOLUTE_PATH_TO_YOUR_PDF_FOLDER ='/home/dani/flask/static/pdf' CORS(app) db = SQLAlchemy(app) migrate = Migrate(app, db) # Models class Usuario(db.Model): nick = db.Column(db.String(20), primary_key=True) Nombre_de_usuario = db.Column(db.String(50)) password = db.Column(db.String(50)) e_mail = db.Column(db.String(50), unique=True, nullable=False) descripcion = db.Column(db.String(1000)) link = db.Column(db.String(200)) foto_de_perfil = db.Column(db.String(400)) class Sigue(db.Model): #id = db.Column(db.Integer, primary_key=True ) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) Usuario_Nickb = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Chat(db.Model): #Column('timestamp', TIMESTAMP(timezone=False), nullable=False, default=datetime.now()) timestamp = db.Column(db.TIMESTAMP, nullable=False, server_default=db.func.now(), onupdate=db.func.now()) mensaje = db.Column(db.String(1000)) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) Usuario_Nickb = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Publicacion(db.Model): id = db.Column(Integer,primary_key=True) #id = db.Sequence('id', start=1, increment=1) descripcion = db.Column(db.String(1000)) #Column('timestamp', TIMESTAMP(timezone=False), nullable=False, default=datetime.now()) timestamp = db.Column(db.TIMESTAMP, nullable=False, server_default=db.func.now(), onupdate=db.func.now()) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick')) class Propia(db.Model): pdf = db.Column(db.String(400)) id = db.Column(db.String(20), db.ForeignKey('publicacion.id'),primary_key=True) class Recomendacion(db.Model): link = db.Column(db.String(200),nullable=False) titulo = db.Column(db.String(200),nullable=False) autor = db.Column(db.String(200),nullable=False) id = db.Column(db.String(20), db.ForeignKey('publicacion.id'),primary_key=True) class Tematica(db.Model): tema = db.Column(db.String(50), primary_key=True ) class Notificaciones(db.Model): id = db.Column(db.Integer, primary_key=True ) fecha = db.Column(db.Date) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Prefiere(db.Model): Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) tema = db.Column(db.String(50), db.ForeignKey('tematica.tema'),primary_key=True) class Trata_pub_del_tema(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) tema = db.Column(db.String(50), db.ForeignKey('tematica.tema'),primary_key=True) class Gusta(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Comenta(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) comentario = db.Column(db.String(1000)) class Guarda(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Trata(db.Model): id_publi = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) id_notif = db.Column(db.String(20), db.ForeignKey('notificaciones.id'),primary_key=True) class Genera(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) def token_required(f): @wraps(f) def decorated(*args, **kwargs): #token = request.args.get('token') #http://127.0.0.1:5000/route?token=djsnvidnoffofn #data = request.get_json() token = request.headers['token'] #token = data['token'] if not token: return jsonify({'error': 'Token no existe'}), 403 try: data = jwt.decode(token, app.config['SECRET_KEY']) current_user = Usuario.query.filter_by(nick=data['nick']).first() current_user = data['nick'] except: return jsonify({'error': 'Token no valido'}), 403 return f(current_user,*args, **kwargs) return decorated def token_required_id(f): @wraps(f) def decorated(*args, **kwargs): #token = request.args.get('token') #http://127.0.0.1:5000/route?token=djsnvidnoffofn #data = request.get_json() token = request.headers['token'] #token = data['token'] if not token: return jsonify({'error': 'Token no existe'}), 403 try: data = jwt.decode(token, app.config['SECRET_KEY']) current_user = Usuario.query.filter_by(nick=data['nick']).first() current_user = data['nick'] current_id = Publicacion.query.filter_by(id=data['id']).first() _id = data['id'] except: return jsonify({'error': 'Token no valido'}), 403 return f(current_user,_id,*args, **kwargs) return decorated @app.route('/unprotected') def unprotected(): return jsonify({'message': 'Puede entrar tol mundo'}) @app.route('/protected') @token_required def protected(current_user): print(current_user) return jsonify({'message': 'Puedes entrar si puedes'}) # Ruta para el login @app.route('/register', methods=['POST']) def add_data(): data= request.get_json() #nick = request.form.get("nick") #password = request.form.get("password") #e_mail = request.form.get("e_mail") user = Usuario.query.filter_by(e_mail=data['e_mail']).first() nick = Usuario.query.filter_by(nick=data['nick']).first() if user: # si esto devuelve algo entonces el email existe return jsonify({'error': 'Existe correo'}) #json diciendo error existe email if nick: return jsonify({'error': 'Existe nick'}) #if (check_email(e_mail) == True and check_password(data['password']) == True ): register = Usuario(nick=data['nick'],password=generate_password_hash(data['password']), e_mail=data['e_mail'],foto_de_perfil="platon.jpg") db.session.add(register) db.session.commit() token = jwt.encode({'nick' : data['nick'], 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/login', methods=['POST']) def login(): # auth = request.authorization #new ESTO SI LO HACES CON AUTH data= request.get_json() if '@' in data['nickOcorreo']: user = Usuario.query.filter_by(e_mail=data['nickOcorreo']).first() else: user = Usuario.query.filter_by(nick=data['nickOcorreo']).first() if not user: return jsonify({'error': 'No existe ese usuario'})#error mal user if not check_password_hash(user.password, data['password']): return jsonify({'error': 'Mal contraseña'}) #error mala contraseña token = jwt.encode({'nick' : data['nickOcorreo'], 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=9999999)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/editarPerfil', methods=['GET']) @token_required def editarPerfilget(current_user): s = select([Usuario.Nombre_de_usuario, Usuario.descripcion,Usuario.link, Usuario.foto_de_perfil]).where((Usuario.nick == current_user)) result = db.session.execute(s) seguidos= db.session.query(Sigue).filter(Sigue.Usuario_Nicka == current_user ).count() seguidores= db.session.query(Sigue).filter(Sigue.Usuario_Nickb == current_user ).count() nposts= db.session.query(Publicacion).filter(Publicacion.Usuario_Nicka == current_user ).count() tema = select([Prefiere.tema]).where((Prefiere.Usuario_Nicka == current_user)) temas = db.session.execute(tema) vector = [] for row in temas: vector += row for row in result: fila = { "nick": current_user, "nombre_de_usuario":row[0], "descripcion":row[1], "link":row[2], "foto_de_perfil": 'http://51.255.50.207:5000/display/' + row[3], "nsiguiendo": seguidos, "nseguidores": seguidores, "nposts": nposts, "tematicas": vector #"foto_de_perfil" :url_for('static', filename='fotosPerfil/' + row[3]) } return fila @app.route('/display/<filename>') def foto(filename): return redirect(url_for('static', filename='fotosPerfil/' + filename),code = 301) @app.route('/editarPerfil', methods=['POST']) @token_required def editarPerfilpost(current_user): data= request.get_json() user = Usuario.query.filter_by(nick=current_user).first() user.Nombre_de_usuario = data['nombre_de_usuario'] print(data['nombre_de_usuario']) print(data['descripcion']) print(data['link']) print(data['tematicas']) user.descripcion = data['descripcion'] user.link = data['link'] tematicas = data['tematicas'] for temas in tematicas: tema = Prefiere.query.filter_by(tema=temas).first() if not tema: tema = Prefiere(Usuario_Nicka=current_user, tema = temas) db.session.add(tema) #db.session.commit() #cambia_foto db.session.commit() token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/actualizarImagen', methods=['POST']) @token_required def actualizarImagen(current_user): user = Usuario.query.filter_by(nick=current_user).first() if request.files['nueva_foto'] is not None: #data['cambia_foto']: file = request.files['nueva_foto'] print(request.files['nueva_foto']) filename = secure_filename(file.filename) file.save(os.path.join(ABSOLUTE_PATH_TO_YOUR_FOLDER, filename)) user.foto_de_perfil = filename db.session.commit() token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/subirPost', methods=['POST']) @token_required def subirPost(current_user): data= request.get_json() publicacion = Publicacion(descripcion=data['descripcion'],Usuario_Nicka=current_user) #coger id db.session.add(publicacion) db.session.commit() tematicas = data['tematicas'] for temas in tematicas: temita = Tematica.query.filter_by(tema=temas).first() if temita: nuevo = Trata_pub_del_tema(id=publicacion.id, tema = temita.tema) db.session.add(nuevo) db.session.commit() if (data['tipo']=="1"): # articulo return jsonify({'id' : publicacion.id}) #guardarPDF(request.files['pdf'], publicacion.id) elif(data['tipo']=="2"): # recomendacion recomendacion = Recomendacion(link=data['link'],titulo=data['titulo'], autor = data['autor'], id = publicacion.id) db.session.add(recomendacion) db.session.commit() token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/subirPdf', methods=['POST']) @token_required def guardarPDF(current_user): _id=request.headers['id'] propia = Propia( id = _id) db.session.add(propia) db.session.commit() propia = Propia.query.filter_by(id=_id).first() if request.files['pdf'] is not None: file = request.files['pdf'] #print(pdf) filename = secure_filename(file.filename) file.save(os.path.join(ABSOLUTE_PATH_TO_YOUR_PDF_FOLDER, filename)) propia.pdf = filename db.session.add(propia) db.session.commit() else: print("pdf nulisimo") token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/getPostsPropios', methods=['GET']) @token_required def getPostsPropios(current_user): data= request.get_json() x = select([Usuario.Nombre_de_usuario]).where((Usuario.nick == current_user)) resultb = db.session.execute(x) Nombre_de_usuario = "" for b in resultb: Nombre_de_usuario=b.Nombre_de_usuario id = select([Publicacion.id]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) descripcion = select( [Publicacion.descripcion]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) timestamp = select([Publicacion.timestamp]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) results = db.session.execute(id) resultss = db.session.execute(descripcion) resultsss = db.session.execute(timestamp) vector0 = [] vector1 = [] vector2 = [] Gustas = [] Comentarios= [] Guardados= [] for r in results: #print(str(r)) vector0 += r Gustas += str(db.session.query(Gusta).filter(Gusta.Usuario_Nicka == current_user, Gusta.id == 'r' ).count()) Comentarios += str(db.session.query(Comenta).filter(Comenta.Usuario_Nicka == current_user, Comenta.id == 'r' ).count()) Guardados += str(db.session.query(Guarda).filter(Guarda.Usuario_Nicka == current_user, Guarda.id == 'r').count()) for r in resultss: vector1 += r for r in resultsss: vector2 += r vector3 = [] vector4 = [] vector5 = [] for r in vector0: link = select([Recomendacion.link]).where((Recomendacion.id == r)) titulo = select([Recomendacion.titulo]).where((Recomendacion.id == r)) autor = select([Recomendacion.autor]).where((Recomendacion.id == r)) resulta = db.session.execute(link) resultaa = db.session.execute(titulo) resultaaa = db.session.execute(autor) for a in resulta: vector3 +=a for a in resultaa: vector4 +=a for a in resultaaa: vector5 +=a fila = { "id": r.id, "nick": current_user, "descripcion":r.descripcion, "timestamp":r.timestamp, "pdf": 'http://51.255.50.207:5000/display2/' + a.pdf, "nlikes": Gustas, "ncomentarios": Comentarios, "nguardados": Guardados, "usuario": resulta.nombre_de_usuario } fila = { "id": vector0, "link": vector3, "titulo": vector4, "autor": vector5, "nick": current_user, "descripcion": vector1, "timestamp": vector2, "nlikes": Gustas, "ncomentarios": Comentarios, "nguardados": Guardados, "usuario": Nombre_de_usuario, #"likemio", #"guardadomio" } return fila @app.route('/display2/<filename>') def pdf(filename): return redirect(url_for('static', filename='pdf/' + filename),code = 301) @app.route('/getPostsRecomendados', methods=['GET']) @token_required def getPostsRecomendados(current_user): data= request.get_json() x = select([Usuario.Nombre_de_usuario]).where((Usuario.nick == current_user)) resultb = db.session.execute(x) Nombre_de_usuario = "" for b in resultb: Nombre_de_usuario=b.Nombre_de_usuario id = select([Publicacion.id]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) descripcion = select( [Publicacion.descripcion]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) timestamp = select([Publicacion.timestamp]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) results = db.session.execute(id) resultss = db.session.execute(descripcion) resultsss = db.session.execute(timestamp) vector0 = [] vector1 = [] vector2 = [] Gustas = [] Comentarios= [] Guardados= [] for r in results: #print(str(r)) vector0 += r Gustas += str(db.session.query(Gusta).filter(Gusta.Usuario_Nicka == current_user, Gusta.id == 'r' ).count()) Comentarios += str(db.session.query(Comenta).filter(Comenta.Usuario_Nicka == current_user, Comenta.id == 'r' ).count()) Guardados += str(db.session.query(Guarda).filter(Guarda.Usuario_Nicka == current_user, Guarda.id == 'r').count()) for r in resultss: vector1 += r for r in resultsss: vector2 += r vector3 = [] vector4 = [] vector5 = [] for r in vector0: link = select([Recomendacion.link]).where((Recomendacion.id == r)) titulo = select([Recomendacion.titulo]).where((Recomendacion.id == r)) autor = select([Recomendacion.autor]).where((Recomendacion.id == r)) resulta = db.session.execute(link) resultaa = db.session.execute(titulo) resultaaa = db.session.execute(autor) for a in resulta: vector3 +=a for a in resultaa: vector4 +=a for a in resultaaa: vector5 +=a fila = { "id": vector0, "link": vector3, "titulo": vector4, "autor": vector5, "nick": current_user, "descripcion": vector1, "timestamp": vector2, "nlikes": Gustas, "ncomentarios": Comentarios, "nguardados": Guardados, "usuario": Nombre_de_usuario, #"likemio", #"guardadomio" } return fila def check_email(email): regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$' if(re.search(regex,email)): return True else: return False # Contraseñas de entre 8 y 32 carácteres. def check_password(password): regex = '^(?=.*[0-9])(?=.*[a-z])(?=.*[A-Z])(?=.*[*.!@$%^&(){}[]:;<>,.?/~_+-=|\]).{8,32}$' if(re.search(regex,password)): return True else: return False if __name__ == '__main__': app.run(debug=True)
33.321489
153
0.640583
from flask import Flask, jsonify, request, make_response, redirect, url_for import jwt import datetime import os from functools import wraps from flask_sqlalchemy import SQLAlchemy import uuid from werkzeug.security import generate_password_hash, check_password_hash from werkzeug.utils import secure_filename from sqlalchemy import select from flask_migrate import Migrate, migrate from flask_cors import CORS from sqlalchemy import inspect from sqlalchemy import Table, Column, MetaData, Integer, Computed from numpy import array app = Flask(__name__) app.config['SECRET_KEY'] = 'secretollave' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///todo.db' ABSOLUTE_PATH_TO_YOUR_FOLDER ='/home/dani/flask/static/fotosPerfil' ABSOLUTE_PATH_TO_YOUR_PDF_FOLDER ='/home/dani/flask/static/pdf' CORS(app) db = SQLAlchemy(app) migrate = Migrate(app, db) class Usuario(db.Model): nick = db.Column(db.String(20), primary_key=True) Nombre_de_usuario = db.Column(db.String(50)) password = db.Column(db.String(50)) e_mail = db.Column(db.String(50), unique=True, nullable=False) descripcion = db.Column(db.String(1000)) link = db.Column(db.String(200)) foto_de_perfil = db.Column(db.String(400)) class Sigue(db.Model): Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) Usuario_Nickb = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Chat(db.Model): timestamp = db.Column(db.TIMESTAMP, nullable=False, server_default=db.func.now(), onupdate=db.func.now()) mensaje = db.Column(db.String(1000)) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) Usuario_Nickb = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Publicacion(db.Model): id = db.Column(Integer,primary_key=True) descripcion = db.Column(db.String(1000)) timestamp = db.Column(db.TIMESTAMP, nullable=False, server_default=db.func.now(), onupdate=db.func.now()) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick')) class Propia(db.Model): pdf = db.Column(db.String(400)) id = db.Column(db.String(20), db.ForeignKey('publicacion.id'),primary_key=True) class Recomendacion(db.Model): link = db.Column(db.String(200),nullable=False) titulo = db.Column(db.String(200),nullable=False) autor = db.Column(db.String(200),nullable=False) id = db.Column(db.String(20), db.ForeignKey('publicacion.id'),primary_key=True) class Tematica(db.Model): tema = db.Column(db.String(50), primary_key=True ) class Notificaciones(db.Model): id = db.Column(db.Integer, primary_key=True ) fecha = db.Column(db.Date) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Prefiere(db.Model): Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) tema = db.Column(db.String(50), db.ForeignKey('tematica.tema'),primary_key=True) class Trata_pub_del_tema(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) tema = db.Column(db.String(50), db.ForeignKey('tematica.tema'),primary_key=True) class Gusta(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Comenta(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) comentario = db.Column(db.String(1000)) class Guarda(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) class Trata(db.Model): id_publi = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) id_notif = db.Column(db.String(20), db.ForeignKey('notificaciones.id'),primary_key=True) class Genera(db.Model): id = db.Column(db.Integer, db.ForeignKey('publicacion.id'),primary_key=True) Usuario_Nicka = db.Column(db.String(20), db.ForeignKey('usuario.nick'),primary_key=True) def token_required(f): @wraps(f) def decorated(*args, **kwargs): token = request.headers['token'] if not token: return jsonify({'error': 'Token no existe'}), 403 try: data = jwt.decode(token, app.config['SECRET_KEY']) current_user = Usuario.query.filter_by(nick=data['nick']).first() current_user = data['nick'] except: return jsonify({'error': 'Token no valido'}), 403 return f(current_user,*args, **kwargs) return decorated def token_required_id(f): @wraps(f) def decorated(*args, **kwargs): token = request.headers['token'] if not token: return jsonify({'error': 'Token no existe'}), 403 try: data = jwt.decode(token, app.config['SECRET_KEY']) current_user = Usuario.query.filter_by(nick=data['nick']).first() current_user = data['nick'] current_id = Publicacion.query.filter_by(id=data['id']).first() _id = data['id'] except: return jsonify({'error': 'Token no valido'}), 403 return f(current_user,_id,*args, **kwargs) return decorated @app.route('/unprotected') def unprotected(): return jsonify({'message': 'Puede entrar tol mundo'}) @app.route('/protected') @token_required def protected(current_user): print(current_user) return jsonify({'message': 'Puedes entrar si puedes'}) @app.route('/register', methods=['POST']) def add_data(): data= request.get_json() user = Usuario.query.filter_by(e_mail=data['e_mail']).first() nick = Usuario.query.filter_by(nick=data['nick']).first() if user: return jsonify({'error': 'Existe correo'}) if nick: return jsonify({'error': 'Existe nick'}) register = Usuario(nick=data['nick'],password=generate_password_hash(data['password']), e_mail=data['e_mail'],foto_de_perfil="platon.jpg") db.session.add(register) db.session.commit() token = jwt.encode({'nick' : data['nick'], 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/login', methods=['POST']) def login(): data= request.get_json() if '@' in data['nickOcorreo']: user = Usuario.query.filter_by(e_mail=data['nickOcorreo']).first() else: user = Usuario.query.filter_by(nick=data['nickOcorreo']).first() if not user: return jsonify({'error': 'No existe ese usuario'}) if not check_password_hash(user.password, data['password']): return jsonify({'error': 'Mal contraseña'}) token = jwt.encode({'nick' : data['nickOcorreo'], 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=9999999)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/editarPerfil', methods=['GET']) @token_required def editarPerfilget(current_user): s = select([Usuario.Nombre_de_usuario, Usuario.descripcion,Usuario.link, Usuario.foto_de_perfil]).where((Usuario.nick == current_user)) result = db.session.execute(s) seguidos= db.session.query(Sigue).filter(Sigue.Usuario_Nicka == current_user ).count() seguidores= db.session.query(Sigue).filter(Sigue.Usuario_Nickb == current_user ).count() nposts= db.session.query(Publicacion).filter(Publicacion.Usuario_Nicka == current_user ).count() tema = select([Prefiere.tema]).where((Prefiere.Usuario_Nicka == current_user)) temas = db.session.execute(tema) vector = [] for row in temas: vector += row for row in result: fila = { "nick": current_user, "nombre_de_usuario":row[0], "descripcion":row[1], "link":row[2], "foto_de_perfil": 'http://51.255.50.207:5000/display/' + row[3], "nsiguiendo": seguidos, "nseguidores": seguidores, "nposts": nposts, "tematicas": vector } return fila @app.route('/display/<filename>') def foto(filename): return redirect(url_for('static', filename='fotosPerfil/' + filename),code = 301) @app.route('/editarPerfil', methods=['POST']) @token_required def editarPerfilpost(current_user): data= request.get_json() user = Usuario.query.filter_by(nick=current_user).first() user.Nombre_de_usuario = data['nombre_de_usuario'] print(data['nombre_de_usuario']) print(data['descripcion']) print(data['link']) print(data['tematicas']) user.descripcion = data['descripcion'] user.link = data['link'] tematicas = data['tematicas'] for temas in tematicas: tema = Prefiere.query.filter_by(tema=temas).first() if not tema: tema = Prefiere(Usuario_Nicka=current_user, tema = temas) db.session.add(tema) db.session.commit() token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/actualizarImagen', methods=['POST']) @token_required def actualizarImagen(current_user): user = Usuario.query.filter_by(nick=current_user).first() if request.files['nueva_foto'] is not None: file = request.files['nueva_foto'] print(request.files['nueva_foto']) filename = secure_filename(file.filename) file.save(os.path.join(ABSOLUTE_PATH_TO_YOUR_FOLDER, filename)) user.foto_de_perfil = filename db.session.commit() token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/subirPost', methods=['POST']) @token_required def subirPost(current_user): data= request.get_json() publicacion = Publicacion(descripcion=data['descripcion'],Usuario_Nicka=current_user) db.session.add(publicacion) db.session.commit() tematicas = data['tematicas'] for temas in tematicas: temita = Tematica.query.filter_by(tema=temas).first() if temita: nuevo = Trata_pub_del_tema(id=publicacion.id, tema = temita.tema) db.session.add(nuevo) db.session.commit() if (data['tipo']=="1"): return jsonify({'id' : publicacion.id}) elif(data['tipo']=="2"): recomendacion = Recomendacion(link=data['link'],titulo=data['titulo'], autor = data['autor'], id = publicacion.id) db.session.add(recomendacion) db.session.commit() token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/subirPdf', methods=['POST']) @token_required def guardarPDF(current_user): _id=request.headers['id'] propia = Propia( id = _id) db.session.add(propia) db.session.commit() propia = Propia.query.filter_by(id=_id).first() if request.files['pdf'] is not None: file = request.files['pdf'] filename = secure_filename(file.filename) file.save(os.path.join(ABSOLUTE_PATH_TO_YOUR_PDF_FOLDER, filename)) propia.pdf = filename db.session.add(propia) db.session.commit() else: print("pdf nulisimo") token = jwt.encode({'nick' : current_user, 'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)}, app.config['SECRET_KEY']) return jsonify({'token' : token.decode('UTF-8')}) @app.route('/getPostsPropios', methods=['GET']) @token_required def getPostsPropios(current_user): data= request.get_json() x = select([Usuario.Nombre_de_usuario]).where((Usuario.nick == current_user)) resultb = db.session.execute(x) Nombre_de_usuario = "" for b in resultb: Nombre_de_usuario=b.Nombre_de_usuario id = select([Publicacion.id]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) descripcion = select( [Publicacion.descripcion]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) timestamp = select([Publicacion.timestamp]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) results = db.session.execute(id) resultss = db.session.execute(descripcion) resultsss = db.session.execute(timestamp) vector0 = [] vector1 = [] vector2 = [] Gustas = [] Comentarios= [] Guardados= [] for r in results: vector0 += r Gustas += str(db.session.query(Gusta).filter(Gusta.Usuario_Nicka == current_user, Gusta.id == 'r' ).count()) Comentarios += str(db.session.query(Comenta).filter(Comenta.Usuario_Nicka == current_user, Comenta.id == 'r' ).count()) Guardados += str(db.session.query(Guarda).filter(Guarda.Usuario_Nicka == current_user, Guarda.id == 'r').count()) for r in resultss: vector1 += r for r in resultsss: vector2 += r vector3 = [] vector4 = [] vector5 = [] for r in vector0: link = select([Recomendacion.link]).where((Recomendacion.id == r)) titulo = select([Recomendacion.titulo]).where((Recomendacion.id == r)) autor = select([Recomendacion.autor]).where((Recomendacion.id == r)) resulta = db.session.execute(link) resultaa = db.session.execute(titulo) resultaaa = db.session.execute(autor) for a in resulta: vector3 +=a for a in resultaa: vector4 +=a for a in resultaaa: vector5 +=a fila = { "id": r.id, "nick": current_user, "descripcion":r.descripcion, "timestamp":r.timestamp, "pdf": 'http://51.255.50.207:5000/display2/' + a.pdf, "nlikes": Gustas, "ncomentarios": Comentarios, "nguardados": Guardados, "usuario": resulta.nombre_de_usuario } fila = { "id": vector0, "link": vector3, "titulo": vector4, "autor": vector5, "nick": current_user, "descripcion": vector1, "timestamp": vector2, "nlikes": Gustas, "ncomentarios": Comentarios, "nguardados": Guardados, "usuario": Nombre_de_usuario, } return fila @app.route('/display2/<filename>') def pdf(filename): return redirect(url_for('static', filename='pdf/' + filename),code = 301) @app.route('/getPostsRecomendados', methods=['GET']) @token_required def getPostsRecomendados(current_user): data= request.get_json() x = select([Usuario.Nombre_de_usuario]).where((Usuario.nick == current_user)) resultb = db.session.execute(x) Nombre_de_usuario = "" for b in resultb: Nombre_de_usuario=b.Nombre_de_usuario id = select([Publicacion.id]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) descripcion = select( [Publicacion.descripcion]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) timestamp = select([Publicacion.timestamp]).where(Publicacion.Usuario_Nicka == current_user).order_by(Publicacion.id.desc()) results = db.session.execute(id) resultss = db.session.execute(descripcion) resultsss = db.session.execute(timestamp) vector0 = [] vector1 = [] vector2 = [] Gustas = [] Comentarios= [] Guardados= [] for r in results: vector0 += r Gustas += str(db.session.query(Gusta).filter(Gusta.Usuario_Nicka == current_user, Gusta.id == 'r' ).count()) Comentarios += str(db.session.query(Comenta).filter(Comenta.Usuario_Nicka == current_user, Comenta.id == 'r' ).count()) Guardados += str(db.session.query(Guarda).filter(Guarda.Usuario_Nicka == current_user, Guarda.id == 'r').count()) for r in resultss: vector1 += r for r in resultsss: vector2 += r vector3 = [] vector4 = [] vector5 = [] for r in vector0: link = select([Recomendacion.link]).where((Recomendacion.id == r)) titulo = select([Recomendacion.titulo]).where((Recomendacion.id == r)) autor = select([Recomendacion.autor]).where((Recomendacion.id == r)) resulta = db.session.execute(link) resultaa = db.session.execute(titulo) resultaaa = db.session.execute(autor) for a in resulta: vector3 +=a for a in resultaa: vector4 +=a for a in resultaaa: vector5 +=a fila = { "id": vector0, "link": vector3, "titulo": vector4, "autor": vector5, "nick": current_user, "descripcion": vector1, "timestamp": vector2, "nlikes": Gustas, "ncomentarios": Comentarios, "nguardados": Guardados, "usuario": Nombre_de_usuario, } return fila def check_email(email): regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$' if(re.search(regex,email)): return True else: return False def check_password(password): regex = '^(?=.*[0-9])(?=.*[a-z])(?=.*[A-Z])(?=.*[*.!@$%^&(){}[]:;<>,.?/~_+-=|\]).{8,32}$' if(re.search(regex,password)): return True else: return False if __name__ == '__main__': app.run(debug=True)
false
true
f70019c95c7be20517b76954cfb0e4af80ac4c4a
6,708
py
Python
grafeas/models/api_artifact.py
atlassian-forks/client-python
111cb2184324595931e42233707a58cff77cb6ec
[ "Apache-2.0" ]
6
2018-01-22T21:54:56.000Z
2020-07-26T14:52:13.000Z
grafeas/models/api_artifact.py
atlassian-forks/client-python
111cb2184324595931e42233707a58cff77cb6ec
[ "Apache-2.0" ]
6
2018-07-12T12:56:16.000Z
2021-07-13T00:33:24.000Z
grafeas/models/api_artifact.py
atlassian-forks/client-python
111cb2184324595931e42233707a58cff77cb6ec
[ "Apache-2.0" ]
19
2018-07-12T11:08:44.000Z
2022-03-09T06:17:04.000Z
# coding: utf-8 """ An API to insert and retrieve metadata on cloud artifacts. No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: v1alpha1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class ApiArtifact(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'name': 'str', 'checksum': 'str', 'id': 'str', 'names': 'list[str]' } attribute_map = { 'name': 'name', 'checksum': 'checksum', 'id': 'id', 'names': 'names' } def __init__(self, name=None, checksum=None, id=None, names=None): # noqa: E501 """ApiArtifact - a model defined in Swagger""" # noqa: E501 self._name = None self._checksum = None self._id = None self._names = None self.discriminator = None if name is not None: self.name = name if checksum is not None: self.checksum = checksum if id is not None: self.id = id if names is not None: self.names = names @property def name(self): """Gets the name of this ApiArtifact. # noqa: E501 Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. # noqa: E501 :return: The name of this ApiArtifact. # noqa: E501 :rtype: str """ return self._name @name.setter def name(self, name): """Sets the name of this ApiArtifact. Name of the artifact. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. This field is deprecated in favor of the plural `names` field; it continues to exist here to allow existing BuildProvenance serialized to json in google.devtools.containeranalysis.v1alpha1.BuildDetails.provenance_bytes to deserialize back into proto. # noqa: E501 :param name: The name of this ApiArtifact. # noqa: E501 :type: str """ self._name = name @property def checksum(self): """Gets the checksum of this ApiArtifact. # noqa: E501 Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. # noqa: E501 :return: The checksum of this ApiArtifact. # noqa: E501 :rtype: str """ return self._checksum @checksum.setter def checksum(self, checksum): """Sets the checksum of this ApiArtifact. Hash or checksum value of a binary, or Docker Registry 2.0 digest of a container. # noqa: E501 :param checksum: The checksum of this ApiArtifact. # noqa: E501 :type: str """ self._checksum = checksum @property def id(self): """Gets the id of this ApiArtifact. # noqa: E501 :return: The id of this ApiArtifact. # noqa: E501 :rtype: str """ return self._id @id.setter def id(self, id): """Sets the id of this ApiArtifact. :param id: The id of this ApiArtifact. # noqa: E501 :type: str """ self._id = id @property def names(self): """Gets the names of this ApiArtifact. # noqa: E501 Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. # noqa: E501 :return: The names of this ApiArtifact. # noqa: E501 :rtype: list[str] """ return self._names @names.setter def names(self, names): """Sets the names of this ApiArtifact. Related artifact names. This may be the path to a binary or jar file, or in the case of a container build, the name used to push the container image to Google Container Registry, as presented to `docker push`. Note that a single Artifact ID can have multiple names, for example if two tags are applied to one image. # noqa: E501 :param names: The names of this ApiArtifact. # noqa: E501 :type: list[str] """ self._names = names def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ApiArtifact, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ApiArtifact): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
33.54
481
0.603757
import pprint import re import six class ApiArtifact(object): swagger_types = { 'name': 'str', 'checksum': 'str', 'id': 'str', 'names': 'list[str]' } attribute_map = { 'name': 'name', 'checksum': 'checksum', 'id': 'id', 'names': 'names' } def __init__(self, name=None, checksum=None, id=None, names=None): self._name = None self._checksum = None self._id = None self._names = None self.discriminator = None if name is not None: self.name = name if checksum is not None: self.checksum = checksum if id is not None: self.id = id if names is not None: self.names = names @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def checksum(self): return self._checksum @checksum.setter def checksum(self, checksum): self._checksum = checksum @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def names(self): return self._names @names.setter def names(self, names): self._names = names def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(ApiArtifact, dict): for key, value in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, ApiArtifact): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
true
true
f7001ad17b839c3551d7b4c8edcc8b1d1d322b6f
6,412
py
Python
asv/plugins/conda.py
prisae/asv
57c386d7cc27f91ecd8daf1ad2e0413f2efdd39c
[ "BSD-3-Clause" ]
2
2019-08-18T11:05:25.000Z
2019-11-17T02:07:18.000Z
asv/plugins/conda.py
prisae/asv
57c386d7cc27f91ecd8daf1ad2e0413f2efdd39c
[ "BSD-3-Clause" ]
1
2019-02-19T17:11:38.000Z
2019-02-19T17:11:38.000Z
asv/plugins/conda.py
prisae/asv
57c386d7cc27f91ecd8daf1ad2e0413f2efdd39c
[ "BSD-3-Clause" ]
null
null
null
# Licensed under a 3-clause BSD style license - see LICENSE.rst # -*- coding: utf-8 -*- from __future__ import absolute_import, division, unicode_literals, print_function import re import os import tempfile import six from .. import environment from ..console import log from .. import util WIN = (os.name == "nt") def _find_conda(): """Find the conda executable robustly across conda versions. Returns ------- conda : str Path to the conda executable. Raises ------ IOError If the executable cannot be found in either the CONDA_EXE environment variable or in the PATH. Notes ----- In POSIX platforms in conda >= 4.4, conda can be set up as a bash function rather than an executable. (This is to enable the syntax ``conda activate env-name``.) In this case, the environment variable ``CONDA_EXE`` contains the path to the conda executable. In other cases, we use standard search for the appropriate name in the PATH. See https://github.com/airspeed-velocity/asv/issues/645 for more details. """ if 'CONDA_EXE' in os.environ: conda = os.environ['CONDA_EXE'] else: conda = util.which('conda') return conda class Conda(environment.Environment): """ Manage an environment using conda. Dependencies are installed using ``conda``. The benchmarked project is installed using ``pip`` (since ``conda`` doesn't have a method to install from an arbitrary ``setup.py``). """ tool_name = "conda" _matches_cache = {} def __init__(self, conf, python, requirements): """ Parameters ---------- conf : Config instance python : str Version of Python. Must be of the form "MAJOR.MINOR". requirements : dict Dictionary mapping a PyPI package name to a version identifier string. """ self._python = python self._requirements = requirements self._conda_channels = conf.conda_channels super(Conda, self).__init__(conf, python, requirements) @classmethod def matches(cls, python): # Calling conda can take a long time, so remember the result if python not in cls._matches_cache: cls._matches_cache[python] = cls._matches(python) return cls._matches_cache[python] @classmethod def _matches(cls, python): if not re.match(r'^[0-9].*$', python): # The python name should be a version number return False try: conda = _find_conda() except IOError: return False else: # This directory never gets created, since we're just # doing a dry run below. All it needs to be is something # that doesn't already exist. path = os.path.join(tempfile.gettempdir(), 'check') # Check that the version number is valid try: util.check_call([ conda, 'create', '--yes', '-p', path, 'python={0}'.format(python), '--dry-run'], display_error=False, dots=False) except util.ProcessError: return False else: return True def _setup(self): try: conda = _find_conda() except IOError as e: raise util.UserError(str(e)) log.info("Creating conda environment for {0}".format(self.name)) # create a temporary environment.yml file # and use that to generate the env for benchmarking env_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix=".yml") try: env_file.write('name: {0}\n' 'channels:\n'.format(self.name)) env_file.writelines((' - %s\n' % ch for ch in self._conda_channels)) env_file.write('dependencies:\n' ' - python={0}\n' ' - wheel\n' ' - pip\n'.format(self._python)) # categorize & write dependencies based on pip vs. conda conda_args, pip_args = self._get_requirements(conda) env_file.writelines((' - %s\n' % s for s in conda_args)) if pip_args: # and now specify the packages that are to be installed in # the pip subsection env_file.write(' - pip:\n') env_file.writelines((' - %s\n' % s for s in pip_args)) env_file.close() util.check_output([conda] + ['env', 'create', '-f', env_file.name, '-p', self._path, '--force']) except Exception as exc: if os.path.isfile(env_file.name): with open(env_file.name, 'r') as f: text = f.read() log.info("conda env create failed: in {} with:\n{}".format(self._path, text)) raise finally: os.unlink(env_file.name) def _get_requirements(self, conda): if self._requirements: # retrieve and return all conda / pip dependencies conda_args = [] pip_args = [] for key, val in six.iteritems(self._requirements): if key.startswith('pip+'): if val: pip_args.append("{0}=={1}".format(key[4:], val)) else: pip_args.append(key[4:]) else: if val: conda_args.append("{0}={1}".format(key, val)) else: conda_args.append(key) return conda_args, pip_args else: return [], [] def run(self, args, **kwargs): log.debug("Running '{0}' in {1}".format(' '.join(args), self.name)) return self.run_executable('python', args, **kwargs) def run_executable(self, executable, args, **kwargs): # Conda doesn't guarantee that user site directories are excluded kwargs["env"] = dict(kwargs.pop("env", os.environ), PYTHONNOUSERSITE=str("True")) return super(Conda, self).run_executable(executable, args, **kwargs)
33.570681
93
0.547723
from __future__ import absolute_import, division, unicode_literals, print_function import re import os import tempfile import six from .. import environment from ..console import log from .. import util WIN = (os.name == "nt") def _find_conda(): if 'CONDA_EXE' in os.environ: conda = os.environ['CONDA_EXE'] else: conda = util.which('conda') return conda class Conda(environment.Environment): tool_name = "conda" _matches_cache = {} def __init__(self, conf, python, requirements): self._python = python self._requirements = requirements self._conda_channels = conf.conda_channels super(Conda, self).__init__(conf, python, requirements) @classmethod def matches(cls, python): if python not in cls._matches_cache: cls._matches_cache[python] = cls._matches(python) return cls._matches_cache[python] @classmethod def _matches(cls, python): if not re.match(r'^[0-9].*$', python): return False try: conda = _find_conda() except IOError: return False else: # doing a dry run below. All it needs to be is something # that doesn't already exist. path = os.path.join(tempfile.gettempdir(), 'check') try: util.check_call([ conda, 'create', '--yes', '-p', path, 'python={0}'.format(python), '--dry-run'], display_error=False, dots=False) except util.ProcessError: return False else: return True def _setup(self): try: conda = _find_conda() except IOError as e: raise util.UserError(str(e)) log.info("Creating conda environment for {0}".format(self.name)) env_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix=".yml") try: env_file.write('name: {0}\n' 'channels:\n'.format(self.name)) env_file.writelines((' - %s\n' % ch for ch in self._conda_channels)) env_file.write('dependencies:\n' ' - python={0}\n' ' - wheel\n' ' - pip\n'.format(self._python)) conda_args, pip_args = self._get_requirements(conda) env_file.writelines((' - %s\n' % s for s in conda_args)) if pip_args: env_file.write(' - pip:\n') env_file.writelines((' - %s\n' % s for s in pip_args)) env_file.close() util.check_output([conda] + ['env', 'create', '-f', env_file.name, '-p', self._path, '--force']) except Exception as exc: if os.path.isfile(env_file.name): with open(env_file.name, 'r') as f: text = f.read() log.info("conda env create failed: in {} with:\n{}".format(self._path, text)) raise finally: os.unlink(env_file.name) def _get_requirements(self, conda): if self._requirements: conda_args = [] pip_args = [] for key, val in six.iteritems(self._requirements): if key.startswith('pip+'): if val: pip_args.append("{0}=={1}".format(key[4:], val)) else: pip_args.append(key[4:]) else: if val: conda_args.append("{0}={1}".format(key, val)) else: conda_args.append(key) return conda_args, pip_args else: return [], [] def run(self, args, **kwargs): log.debug("Running '{0}' in {1}".format(' '.join(args), self.name)) return self.run_executable('python', args, **kwargs) def run_executable(self, executable, args, **kwargs): kwargs["env"] = dict(kwargs.pop("env", os.environ), PYTHONNOUSERSITE=str("True")) return super(Conda, self).run_executable(executable, args, **kwargs)
true
true
f7001b0dd9058f750bdd397267bbd285ae08c0d2
25,094
py
Python
pypy/interpreter/function.py
woodrow/pyoac
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
[ "MIT" ]
1
2019-05-27T00:58:46.000Z
2019-05-27T00:58:46.000Z
pypy/interpreter/function.py
woodrow/pyoac
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
[ "MIT" ]
null
null
null
pypy/interpreter/function.py
woodrow/pyoac
b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7
[ "MIT" ]
null
null
null
""" Function objects. In PyPy there is no difference between built-in and user-defined function objects; the difference lies in the code object found in their func_code attribute. """ from pypy.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError from pypy.interpreter.baseobjspace import Wrappable from pypy.interpreter.eval import Code from pypy.interpreter.argument import Arguments, ArgumentsFromValuestack funccallunrolling = unrolling_iterable(range(4)) class Function(Wrappable): """A function is a code object captured with some environment: an object space, a dictionary of globals, default arguments, and an arbitrary 'closure' passed to the code object.""" def __init__(self, space, code, w_globals=None, defs_w=[], closure=None, forcename=None, creator_nametoken=None): self.space = space self.name = forcename or code.co_name self.w_doc = None # lazily read from code.getdocstring() self.code = code # Code instance self.w_func_globals = w_globals # the globals dictionary self.closure = closure # normally, list of Cell instances or None self.defs_w = defs_w # list of w_default's self.w_func_dict = None # filled out below if needed self.w_module = None self.creator_nametoken = creator_nametoken def __repr__(self): # return "function %s.%s" % (self.space, self.name) # maybe we want this shorter: name = getattr(self, 'name', '?') return "<%s %s>" % (self.__class__.__name__, name) def call_args(self, args): # delegate activation to code return self.code.funcrun(self, args) def call_obj_args(self, w_obj, args): # delegate activation to code return self.code.funcrun_obj(self, w_obj, args) def getcode(self): return self.code def funccall(self, *args_w): # speed hack from pypy.interpreter import gateway from pypy.interpreter.pycode import PyCode code = self.getcode() # hook for the jit nargs = len(args_w) fast_natural_arity = code.fast_natural_arity if nargs == fast_natural_arity: if nargs == 0: assert isinstance(code, gateway.BuiltinCode0) return code.fastcall_0(self.space, self) elif nargs == 1: assert isinstance(code, gateway.BuiltinCode1) return code.fastcall_1(self.space, self, args_w[0]) elif nargs == 2: assert isinstance(code, gateway.BuiltinCode2) return code.fastcall_2(self.space, self, args_w[0], args_w[1]) elif nargs == 3: assert isinstance(code, gateway.BuiltinCode3) return code.fastcall_3(self.space, self, args_w[0], args_w[1], args_w[2]) elif nargs == 4: assert isinstance(code, gateway.BuiltinCode4) return code.fastcall_4(self.space, self, args_w[0], args_w[1], args_w[2], args_w[3]) elif (nargs|PyCode.FLATPYCALL) == fast_natural_arity: assert isinstance(code, PyCode) if nargs < 5: new_frame = self.space.createframe(code, self.w_func_globals, self.closure) for i in funccallunrolling: if i < nargs: new_frame.fastlocals_w[i] = args_w[i] return new_frame.run() elif nargs >= 1 and fast_natural_arity == -1: assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) return code.funcrun_obj(self, args_w[0], Arguments(self.space, list(args_w[1:]))) return self.call_args(Arguments(self.space, list(args_w))) def funccall_valuestack(self, nargs, frame): # speed hack from pypy.interpreter import gateway from pypy.interpreter.pycode import PyCode code = self.getcode() # hook for the jit fast_natural_arity = code.fast_natural_arity if nargs == fast_natural_arity: if nargs == 0: assert isinstance(code, gateway.BuiltinCode0) return code.fastcall_0(self.space, self) elif nargs == 1: assert isinstance(code, gateway.BuiltinCode1) return code.fastcall_1(self.space, self, frame.peekvalue(0)) elif nargs == 2: assert isinstance(code, gateway.BuiltinCode2) return code.fastcall_2(self.space, self, frame.peekvalue(1), frame.peekvalue(0)) elif nargs == 3: assert isinstance(code, gateway.BuiltinCode3) return code.fastcall_3(self.space, self, frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) elif nargs == 4: assert isinstance(code, gateway.BuiltinCode4) return code.fastcall_4(self.space, self, frame.peekvalue(3), frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) elif (nargs|PyCode.FLATPYCALL) == fast_natural_arity: assert isinstance(code, PyCode) return self._flat_pycall(code, nargs, frame) elif fast_natural_arity == -1 and nargs >= 1: assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) w_obj = frame.peekvalue(nargs-1) args = frame.make_arguments(nargs-1) try: return code.funcrun_obj(self, w_obj, args) finally: if isinstance(args, ArgumentsFromValuestack): args.frame = None args = frame.make_arguments(nargs) try: return self.call_args(args) finally: if isinstance(args, ArgumentsFromValuestack): args.frame = None def _flat_pycall(self, code, nargs, frame): # code is a PyCode new_frame = self.space.createframe(code, self.w_func_globals, self.closure) for i in xrange(nargs): w_arg = frame.peekvalue(nargs-1-i) new_frame.fastlocals_w[i] = w_arg return new_frame.run() def getdict(self): if self.w_func_dict is None: self.w_func_dict = self.space.newdict() return self.w_func_dict def setdict(self, space, w_dict): if not space.is_true(space.isinstance( w_dict, space.w_dict )): raise OperationError( space.w_TypeError, space.wrap("setting function's dictionary to a non-dict") ) self.w_func_dict = w_dict # unwrapping is done through unwrap_specs in typedef.py def descr_function__new__(space, w_subtype, w_code, w_globals, w_name=None, w_argdefs=None, w_closure=None): code = space.interp_w(Code, w_code) if not space.is_true(space.isinstance(w_globals, space.w_dict)): raise OperationError(space.w_TypeError, space.wrap("expected dict")) if not space.is_w(w_name, space.w_None): name = space.str_w(w_name) else: name = None if not space.is_w(w_argdefs, space.w_None): defs_w = space.unpackiterable(w_argdefs) else: defs_w = [] nfreevars = 0 from pypy.interpreter.pycode import PyCode if isinstance(code, PyCode): nfreevars = len(code.co_freevars) if space.is_w(w_closure, space.w_None) and nfreevars == 0: closure = None elif not space.is_w(space.type(w_closure), space.w_tuple): raise OperationError(space.w_TypeError, space.wrap("invalid closure")) else: from pypy.interpreter.nestedscope import Cell closure_w = space.unpackiterable(w_closure) n = len(closure_w) if nfreevars == 0: raise OperationError(space.w_ValueError, space.wrap("no closure needed")) elif nfreevars != n: raise OperationError(space.w_ValueError, space.wrap("closure is wrong size")) closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w] func = space.allocate_instance(Function, w_subtype) Function.__init__(func, space, code, w_globals, defs_w, closure, name) return space.wrap(func) def descr_function_call(self, __args__): return self.call_args(__args__) def descr_function_repr(self): return self.getrepr(self.space, 'function %s' % (self.name,)) # delicate _all = {'': None} def _freeze_(self): from pypy.interpreter.gateway import BuiltinCode if isinstance(self.code, BuiltinCode): identifier = self.code.identifier if Function._all.get(identifier, self) is not self: print "builtin code identifier %s used twice: %s and %s" % ( identifier, self, Function._all[identifier]) # we have been seen by other means so rtyping should not choke # on us Function._all[identifier] = self return False def find(identifier): return Function._all[identifier] find = staticmethod(find) def descr_function__reduce__(self, space): from pypy.interpreter.gateway import BuiltinCode from pypy.interpreter.mixedmodule import MixedModule w_mod = space.getbuiltinmodule('_pickle_support') mod = space.interp_w(MixedModule, w_mod) code = self.code if isinstance(code, BuiltinCode): new_inst = mod.get('builtin_function') return space.newtuple([new_inst, space.newtuple([space.wrap(code.identifier)])]) new_inst = mod.get('func_new') w = space.wrap if self.closure is None: w_closure = space.w_None else: w_closure = space.newtuple([w(cell) for cell in self.closure]) if self.w_doc is None: w_doc = space.w_None else: w_doc = self.w_doc if self.w_func_globals is None: w_func_globals = space.w_None else: w_func_globals = self.w_func_globals if self.w_func_dict is None: w_func_dict = space.w_None else: w_func_dict = self.w_func_dict nt = space.newtuple tup_base = [] tup_state = [ w(self.name), w_doc, w(self.code), w_func_globals, w_closure, nt(self.defs_w[:]), w_func_dict, self.w_module, ] return nt([new_inst, nt(tup_base), nt(tup_state)]) def descr_function__setstate__(self, space, w_args): from pypy.interpreter.pycode import PyCode args_w = space.unpackiterable(w_args) (w_name, w_doc, w_code, w_func_globals, w_closure, w_defs_w, w_func_dict, w_module) = args_w self.space = space self.name = space.str_w(w_name) self.code = space.interp_w(Code, w_code) if not space.is_w(w_closure, space.w_None): from pypy.interpreter.nestedscope import Cell closure_w = space.unpackiterable(w_closure) self.closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w] else: self.closure = None if space.is_w(w_doc, space.w_None): w_doc = None self.w_doc = w_doc if space.is_w(w_func_globals, space.w_None): w_func_globals = None self.w_func_globals = w_func_globals if space.is_w(w_func_dict, space.w_None): w_func_dict = None self.w_func_dict = w_func_dict self.defs_w = space.unpackiterable(w_defs_w) self.w_module = w_module def fget_func_defaults(space, self): values_w = self.defs_w if not values_w: return space.w_None return space.newtuple(values_w[:]) def fset_func_defaults(space, self, w_defaults): if space.is_w(w_defaults, space.w_None): self.defs_w = [] return if not space.is_true( space.isinstance( w_defaults, space.w_tuple ) ): raise OperationError( space.w_TypeError, space.wrap("func_defaults must be set to a tuple object or None") ) self.defs_w = space.unpackiterable( w_defaults ) def fdel_func_defaults(space, self): self.defs_w = [] def fget_func_doc(space, self): if self.w_doc is None: self.w_doc = self.code.getdocstring(space) return self.w_doc def fset_func_doc(space, self, w_doc): self.w_doc = w_doc def fget_func_name(space, self): return space.wrap(self.name) def fset_func_name(space, self, w_name): try: self.name = space.str_w(w_name) except OperationError, e: if e.match(space, space.w_TypeError): raise OperationError(space.w_TypeError, space.wrap("func_name must be set " "to a string object")) raise def fdel_func_doc(space, self): self.w_doc = space.w_None def fget___module__(space, self): if self.w_module is None: if self.w_func_globals is not None and not space.is_w(self.w_func_globals, space.w_None): self.w_module = space.call_method( self.w_func_globals, "get", space.wrap("__name__") ) else: self.w_module = space.w_None return self.w_module def fset___module__(space, self, w_module): self.w_module = w_module def fdel___module__(space, self): self.w_module = space.w_None def fget_func_code(space, self): from pypy.module.__builtin__.namespace_helpers import SLOTNAME_ALLTOKENS, SLOTNAME_NAMETOKEN, throw_access_exceptions, print_access_exceptions, _currentframe_has_access from sys import stderr if _currentframe_has_access(space, self.creator_nametoken): return space.wrap(self.code) else: if print_access_exceptions: print >> stderr, "\033[1;31mAccess Error:\033[1;m " + self.name + ".func_code" if throw_access_exceptions: #SRW TODO: raise pass else: return space.w_None def fset_func_code(space, self, w_code): #SRW !!!: This could be hazardous for untrusted code to set the code object which is executed by trusted code -- deal with in future from pypy.interpreter.pycode import PyCode code = space.interp_w(Code, w_code) closure_len = 0 if self.closure: closure_len = len(self.closure) if isinstance(code, PyCode) and closure_len != len(code.co_freevars): raise OperationError(space.w_ValueError, space.wrap("%s() requires a code object with %s free vars, not %s " % (self.name, closure_len, len(code.co_freevars)))) self.code = code def fget_func_closure(space, self): from pypy.module.__builtin__.namespace_helpers import SLOTNAME_ALLTOKENS, SLOTNAME_NAMETOKEN, throw_access_exceptions, print_access_exceptions, _currentframe_has_access from sys import stderr if self.closure is not None: if _currentframe_has_access(space, self.creator_nametoken): w_res = space.newtuple( [ space.wrap(i) for i in self.closure ] ) else: if print_access_exceptions: print >> stderr, "\033[1;31mAccess Error:\033[1;m " + self.name + ".func_closure" if throw_access_exceptions: #SRW TODO: raise pass else: w_res = space.w_None else: w_res = space.w_None return w_res def descr_function_get(space, w_function, w_obj, w_cls=None): """functionobject.__get__(obj[, type]) -> method""" # this is not defined as a method on Function because it's generally # useful logic: w_function can be any callable. It is used by Method too. asking_for_bound = (space.is_w(w_cls, space.w_None) or not space.is_w(w_obj, space.w_None) or space.is_w(w_cls, space.type(space.w_None))) if asking_for_bound: return space.wrap(Method(space, w_function, w_obj, w_cls)) else: return space.wrap(Method(space, w_function, None, w_cls)) class Method(Wrappable): """A method is a function bound to a specific instance or class.""" def __init__(self, space, w_function, w_instance, w_class): self.space = space self.w_function = w_function self.w_instance = w_instance # or None self.w_class = w_class # possibly space.w_None def descr_method__new__(space, w_subtype, w_function, w_instance, w_class=None): if space.is_w( w_instance, space.w_None ): w_instance = None method = space.allocate_instance(Method, w_subtype) Method.__init__(method, space, w_function, w_instance, w_class) return space.wrap(method) def __repr__(self): if self.w_instance: pre = "bound" else: pre = "unbound" return "%s method %s" % (pre, self.w_function.getname(self.space, '?')) def call_args(self, args): space = self.space if self.w_instance is not None: # bound method return space.call_obj_args(self.w_function, self.w_instance, args) # unbound method w_firstarg = args.firstarg() if w_firstarg is not None and ( space.abstract_isinstance_w(w_firstarg, self.w_class)): pass # ok else: myname = self.getname(space,"") clsdescr = self.w_class.getname(space,"") if clsdescr: clsdescr+=" " if w_firstarg is None: instdescr = "nothing" else: instname = space.abstract_getclass(w_firstarg).getname(space,"") if instname: instname += " " instdescr = "%sinstance" %instname msg = ("unbound method %s() must be called with %s" "instance as first argument (got %s instead)") % (myname, clsdescr, instdescr) raise OperationError(space.w_TypeError, space.wrap(msg)) return space.call_args(self.w_function, args) def descr_method_get(self, w_obj, w_cls=None): space = self.space if self.w_instance is not None: return space.wrap(self) # already bound else: # only allow binding to a more specific class than before if (w_cls is not None and not space.is_w(w_cls, space.w_None) and not space.abstract_issubclass_w(w_cls, self.w_class)): return space.wrap(self) # subclass test failed else: return descr_function_get(space, self.w_function, w_obj, w_cls) def descr_method_call(self, __args__): return self.call_args(__args__) def descr_method_repr(self): space = self.space name = self.w_function.getname(self.space, '?') # XXX do we handle all cases sanely here? if space.is_w(self.w_class, space.w_None): w_class = space.type(self.w_instance) else: w_class = self.w_class typename = w_class.getname(self.space, '?') if self.w_instance is None: s = "<unbound method %s.%s>" % (typename, name) return space.wrap(s) else: objrepr = space.str_w(space.repr(self.w_instance)) info = 'bound method %s.%s of %s' % (typename, name, objrepr) # info = "method %s of %s object" % (name, typename) return self.w_instance.getrepr(self.space, info) def descr_method_getattribute(self, w_attr): space = self.space if space.str_w(w_attr) != '__doc__': try: return space.call_method(space.w_object, '__getattribute__', space.wrap(self), w_attr) except OperationError, e: if not e.match(space, space.w_AttributeError): raise # fall-back to the attribute of the underlying 'im_func' return space.getattr(self.w_function, w_attr) def descr_method_eq(self, w_other): space = self.space other = space.interpclass_w(w_other) if not isinstance(other, Method): return space.w_False if self.w_instance is None: if other.w_instance is not None: return space.w_False else: if other.w_instance is None: return space.w_False if not space.eq_w(self.w_instance, other.w_instance): return space.w_False return space.eq(self.w_function, other.w_function) def descr_method_hash(self): space = self.space w_result = space.hash(self.w_function) if self.w_instance is not None: w_result = space.xor(w_result, space.hash(self.w_instance)) return w_result def descr_method__reduce__(self, space): from pypy.interpreter.mixedmodule import MixedModule from pypy.interpreter.gateway import BuiltinCode w_mod = space.getbuiltinmodule('_pickle_support') mod = space.interp_w(MixedModule, w_mod) new_inst = mod.get('method_new') w = space.wrap w_instance = self.w_instance or space.w_None function = space.interpclass_w(self.w_function) if isinstance(function, Function) and isinstance(function.code, BuiltinCode): new_inst = mod.get('builtin_method_new') if space.is_w(w_instance, space.w_None): tup = [self.w_class, space.wrap(function.name)] else: tup = [w_instance, space.wrap(function.name)] elif space.is_w( self.w_class, space.w_None ): tup = [self.w_function, w_instance] else: tup = [self.w_function, w_instance, self.w_class] return space.newtuple([new_inst, space.newtuple(tup)]) class StaticMethod(Wrappable): """The staticmethod objects.""" def __init__(self, w_function): self.w_function = w_function def descr_staticmethod_get(self, w_obj, w_cls=None): """staticmethod(x).__get__(obj[, type]) -> x""" return self.w_function def descr_staticmethod__new__(space, w_type, w_function): return space.wrap(StaticMethod(w_function)) class ClassMethod(Wrappable): """The classmethod objects.""" def __init__(self, w_function): self.w_function = w_function def descr_classmethod_get(self, space, w_obj, w_klass=None): if space.is_w(w_klass, space.w_None): w_klass = space.type(w_obj) return space.wrap(Method(space, self.w_function, w_klass, space.w_None)) def descr_classmethod__new__(space, w_type, w_function): if not space.is_true(space.callable(w_function)): typename = space.type(w_function).getname(space, '?') raise OperationError(space.w_TypeError, space.wrap( "'%s' object is not callable" % typename)) return space.wrap(ClassMethod(w_function)) class BuiltinFunction(Function): def __init__(self, func): assert isinstance(func, Function) Function.__init__(self, func.space, func.code, func.w_func_globals, func.defs_w, func.closure, func.name) self.w_doc = func.w_doc self.w_func_dict = func.w_func_dict self.w_module = func.w_module def descr_builtinfunction__new__(space, w_subtype, w_func): func = space.interp_w(Function, w_func) bltin = space.allocate_instance(BuiltinFunction, w_subtype) BuiltinFunction.__init__(bltin, func) return space.wrap(bltin) def descr_function_repr(self): return self.space.wrap('<built-in function %s>' % (self.name,)) def is_builtin_code(w_func): from pypy.interpreter.gateway import BuiltinCode if isinstance(w_func, Method): w_func = w_func.w_function if isinstance(w_func, Function): code = w_func.getcode() else: code = None return isinstance(code, BuiltinCode)
41.477686
177
0.600064
""" Function objects. In PyPy there is no difference between built-in and user-defined function objects; the difference lies in the code object found in their func_code attribute. """ from pypy.rlib.unroll import unrolling_iterable from pypy.interpreter.error import OperationError from pypy.interpreter.baseobjspace import Wrappable from pypy.interpreter.eval import Code from pypy.interpreter.argument import Arguments, ArgumentsFromValuestack funccallunrolling = unrolling_iterable(range(4)) class Function(Wrappable): """A function is a code object captured with some environment: an object space, a dictionary of globals, default arguments, and an arbitrary 'closure' passed to the code object.""" def __init__(self, space, code, w_globals=None, defs_w=[], closure=None, forcename=None, creator_nametoken=None): self.space = space self.name = forcename or code.co_name self.w_doc = None self.code = code self.w_func_globals = w_globals self.closure = closure self.defs_w = defs_w self.w_func_dict = None # filled out below if needed self.w_module = None self.creator_nametoken = creator_nametoken def __repr__(self): # return "function %s.%s" % (self.space, self.name) # maybe we want this shorter: name = getattr(self, 'name', '?') return "<%s %s>" % (self.__class__.__name__, name) def call_args(self, args): # delegate activation to code return self.code.funcrun(self, args) def call_obj_args(self, w_obj, args): # delegate activation to code return self.code.funcrun_obj(self, w_obj, args) def getcode(self): return self.code def funccall(self, *args_w): # speed hack from pypy.interpreter import gateway from pypy.interpreter.pycode import PyCode code = self.getcode() # hook for the jit nargs = len(args_w) fast_natural_arity = code.fast_natural_arity if nargs == fast_natural_arity: if nargs == 0: assert isinstance(code, gateway.BuiltinCode0) return code.fastcall_0(self.space, self) elif nargs == 1: assert isinstance(code, gateway.BuiltinCode1) return code.fastcall_1(self.space, self, args_w[0]) elif nargs == 2: assert isinstance(code, gateway.BuiltinCode2) return code.fastcall_2(self.space, self, args_w[0], args_w[1]) elif nargs == 3: assert isinstance(code, gateway.BuiltinCode3) return code.fastcall_3(self.space, self, args_w[0], args_w[1], args_w[2]) elif nargs == 4: assert isinstance(code, gateway.BuiltinCode4) return code.fastcall_4(self.space, self, args_w[0], args_w[1], args_w[2], args_w[3]) elif (nargs|PyCode.FLATPYCALL) == fast_natural_arity: assert isinstance(code, PyCode) if nargs < 5: new_frame = self.space.createframe(code, self.w_func_globals, self.closure) for i in funccallunrolling: if i < nargs: new_frame.fastlocals_w[i] = args_w[i] return new_frame.run() elif nargs >= 1 and fast_natural_arity == -1: assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) return code.funcrun_obj(self, args_w[0], Arguments(self.space, list(args_w[1:]))) return self.call_args(Arguments(self.space, list(args_w))) def funccall_valuestack(self, nargs, frame): # speed hack from pypy.interpreter import gateway from pypy.interpreter.pycode import PyCode code = self.getcode() # hook for the jit fast_natural_arity = code.fast_natural_arity if nargs == fast_natural_arity: if nargs == 0: assert isinstance(code, gateway.BuiltinCode0) return code.fastcall_0(self.space, self) elif nargs == 1: assert isinstance(code, gateway.BuiltinCode1) return code.fastcall_1(self.space, self, frame.peekvalue(0)) elif nargs == 2: assert isinstance(code, gateway.BuiltinCode2) return code.fastcall_2(self.space, self, frame.peekvalue(1), frame.peekvalue(0)) elif nargs == 3: assert isinstance(code, gateway.BuiltinCode3) return code.fastcall_3(self.space, self, frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) elif nargs == 4: assert isinstance(code, gateway.BuiltinCode4) return code.fastcall_4(self.space, self, frame.peekvalue(3), frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) elif (nargs|PyCode.FLATPYCALL) == fast_natural_arity: assert isinstance(code, PyCode) return self._flat_pycall(code, nargs, frame) elif fast_natural_arity == -1 and nargs >= 1: assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) w_obj = frame.peekvalue(nargs-1) args = frame.make_arguments(nargs-1) try: return code.funcrun_obj(self, w_obj, args) finally: if isinstance(args, ArgumentsFromValuestack): args.frame = None args = frame.make_arguments(nargs) try: return self.call_args(args) finally: if isinstance(args, ArgumentsFromValuestack): args.frame = None def _flat_pycall(self, code, nargs, frame): # code is a PyCode new_frame = self.space.createframe(code, self.w_func_globals, self.closure) for i in xrange(nargs): w_arg = frame.peekvalue(nargs-1-i) new_frame.fastlocals_w[i] = w_arg return new_frame.run() def getdict(self): if self.w_func_dict is None: self.w_func_dict = self.space.newdict() return self.w_func_dict def setdict(self, space, w_dict): if not space.is_true(space.isinstance( w_dict, space.w_dict )): raise OperationError( space.w_TypeError, space.wrap("setting function's dictionary to a non-dict") ) self.w_func_dict = w_dict def descr_function__new__(space, w_subtype, w_code, w_globals, w_name=None, w_argdefs=None, w_closure=None): code = space.interp_w(Code, w_code) if not space.is_true(space.isinstance(w_globals, space.w_dict)): raise OperationError(space.w_TypeError, space.wrap("expected dict")) if not space.is_w(w_name, space.w_None): name = space.str_w(w_name) else: name = None if not space.is_w(w_argdefs, space.w_None): defs_w = space.unpackiterable(w_argdefs) else: defs_w = [] nfreevars = 0 from pypy.interpreter.pycode import PyCode if isinstance(code, PyCode): nfreevars = len(code.co_freevars) if space.is_w(w_closure, space.w_None) and nfreevars == 0: closure = None elif not space.is_w(space.type(w_closure), space.w_tuple): raise OperationError(space.w_TypeError, space.wrap("invalid closure")) else: from pypy.interpreter.nestedscope import Cell closure_w = space.unpackiterable(w_closure) n = len(closure_w) if nfreevars == 0: raise OperationError(space.w_ValueError, space.wrap("no closure needed")) elif nfreevars != n: raise OperationError(space.w_ValueError, space.wrap("closure is wrong size")) closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w] func = space.allocate_instance(Function, w_subtype) Function.__init__(func, space, code, w_globals, defs_w, closure, name) return space.wrap(func) def descr_function_call(self, __args__): return self.call_args(__args__) def descr_function_repr(self): return self.getrepr(self.space, 'function %s' % (self.name,)) _all = {'': None} def _freeze_(self): from pypy.interpreter.gateway import BuiltinCode if isinstance(self.code, BuiltinCode): identifier = self.code.identifier if Function._all.get(identifier, self) is not self: print "builtin code identifier %s used twice: %s and %s" % ( identifier, self, Function._all[identifier]) Function._all[identifier] = self return False def find(identifier): return Function._all[identifier] find = staticmethod(find) def descr_function__reduce__(self, space): from pypy.interpreter.gateway import BuiltinCode from pypy.interpreter.mixedmodule import MixedModule w_mod = space.getbuiltinmodule('_pickle_support') mod = space.interp_w(MixedModule, w_mod) code = self.code if isinstance(code, BuiltinCode): new_inst = mod.get('builtin_function') return space.newtuple([new_inst, space.newtuple([space.wrap(code.identifier)])]) new_inst = mod.get('func_new') w = space.wrap if self.closure is None: w_closure = space.w_None else: w_closure = space.newtuple([w(cell) for cell in self.closure]) if self.w_doc is None: w_doc = space.w_None else: w_doc = self.w_doc if self.w_func_globals is None: w_func_globals = space.w_None else: w_func_globals = self.w_func_globals if self.w_func_dict is None: w_func_dict = space.w_None else: w_func_dict = self.w_func_dict nt = space.newtuple tup_base = [] tup_state = [ w(self.name), w_doc, w(self.code), w_func_globals, w_closure, nt(self.defs_w[:]), w_func_dict, self.w_module, ] return nt([new_inst, nt(tup_base), nt(tup_state)]) def descr_function__setstate__(self, space, w_args): from pypy.interpreter.pycode import PyCode args_w = space.unpackiterable(w_args) (w_name, w_doc, w_code, w_func_globals, w_closure, w_defs_w, w_func_dict, w_module) = args_w self.space = space self.name = space.str_w(w_name) self.code = space.interp_w(Code, w_code) if not space.is_w(w_closure, space.w_None): from pypy.interpreter.nestedscope import Cell closure_w = space.unpackiterable(w_closure) self.closure = [space.interp_w(Cell, w_cell) for w_cell in closure_w] else: self.closure = None if space.is_w(w_doc, space.w_None): w_doc = None self.w_doc = w_doc if space.is_w(w_func_globals, space.w_None): w_func_globals = None self.w_func_globals = w_func_globals if space.is_w(w_func_dict, space.w_None): w_func_dict = None self.w_func_dict = w_func_dict self.defs_w = space.unpackiterable(w_defs_w) self.w_module = w_module def fget_func_defaults(space, self): values_w = self.defs_w if not values_w: return space.w_None return space.newtuple(values_w[:]) def fset_func_defaults(space, self, w_defaults): if space.is_w(w_defaults, space.w_None): self.defs_w = [] return if not space.is_true( space.isinstance( w_defaults, space.w_tuple ) ): raise OperationError( space.w_TypeError, space.wrap("func_defaults must be set to a tuple object or None") ) self.defs_w = space.unpackiterable( w_defaults ) def fdel_func_defaults(space, self): self.defs_w = [] def fget_func_doc(space, self): if self.w_doc is None: self.w_doc = self.code.getdocstring(space) return self.w_doc def fset_func_doc(space, self, w_doc): self.w_doc = w_doc def fget_func_name(space, self): return space.wrap(self.name) def fset_func_name(space, self, w_name): try: self.name = space.str_w(w_name) except OperationError, e: if e.match(space, space.w_TypeError): raise OperationError(space.w_TypeError, space.wrap("func_name must be set " "to a string object")) raise def fdel_func_doc(space, self): self.w_doc = space.w_None def fget___module__(space, self): if self.w_module is None: if self.w_func_globals is not None and not space.is_w(self.w_func_globals, space.w_None): self.w_module = space.call_method( self.w_func_globals, "get", space.wrap("__name__") ) else: self.w_module = space.w_None return self.w_module def fset___module__(space, self, w_module): self.w_module = w_module def fdel___module__(space, self): self.w_module = space.w_None def fget_func_code(space, self): from pypy.module.__builtin__.namespace_helpers import SLOTNAME_ALLTOKENS, SLOTNAME_NAMETOKEN, throw_access_exceptions, print_access_exceptions, _currentframe_has_access from sys import stderr if _currentframe_has_access(space, self.creator_nametoken): return space.wrap(self.code) else: if print_access_exceptions: print >> stderr, "\033[1;31mAccess Error:\033[1;m " + self.name + ".func_code" if throw_access_exceptions: pass else: return space.w_None def fset_func_code(space, self, w_code): from pypy.interpreter.pycode import PyCode code = space.interp_w(Code, w_code) closure_len = 0 if self.closure: closure_len = len(self.closure) if isinstance(code, PyCode) and closure_len != len(code.co_freevars): raise OperationError(space.w_ValueError, space.wrap("%s() requires a code object with %s free vars, not %s " % (self.name, closure_len, len(code.co_freevars)))) self.code = code def fget_func_closure(space, self): from pypy.module.__builtin__.namespace_helpers import SLOTNAME_ALLTOKENS, SLOTNAME_NAMETOKEN, throw_access_exceptions, print_access_exceptions, _currentframe_has_access from sys import stderr if self.closure is not None: if _currentframe_has_access(space, self.creator_nametoken): w_res = space.newtuple( [ space.wrap(i) for i in self.closure ] ) else: if print_access_exceptions: print >> stderr, "\033[1;31mAccess Error:\033[1;m " + self.name + ".func_closure" if throw_access_exceptions: pass else: w_res = space.w_None else: w_res = space.w_None return w_res def descr_function_get(space, w_function, w_obj, w_cls=None): """functionobject.__get__(obj[, type]) -> method""" # useful logic: w_function can be any callable. It is used by Method too. asking_for_bound = (space.is_w(w_cls, space.w_None) or not space.is_w(w_obj, space.w_None) or space.is_w(w_cls, space.type(space.w_None))) if asking_for_bound: return space.wrap(Method(space, w_function, w_obj, w_cls)) else: return space.wrap(Method(space, w_function, None, w_cls)) class Method(Wrappable): """A method is a function bound to a specific instance or class.""" def __init__(self, space, w_function, w_instance, w_class): self.space = space self.w_function = w_function self.w_instance = w_instance # or None self.w_class = w_class # possibly space.w_None def descr_method__new__(space, w_subtype, w_function, w_instance, w_class=None): if space.is_w( w_instance, space.w_None ): w_instance = None method = space.allocate_instance(Method, w_subtype) Method.__init__(method, space, w_function, w_instance, w_class) return space.wrap(method) def __repr__(self): if self.w_instance: pre = "bound" else: pre = "unbound" return "%s method %s" % (pre, self.w_function.getname(self.space, '?')) def call_args(self, args): space = self.space if self.w_instance is not None: # bound method return space.call_obj_args(self.w_function, self.w_instance, args) # unbound method w_firstarg = args.firstarg() if w_firstarg is not None and ( space.abstract_isinstance_w(w_firstarg, self.w_class)): pass # ok else: myname = self.getname(space,"") clsdescr = self.w_class.getname(space,"") if clsdescr: clsdescr+=" " if w_firstarg is None: instdescr = "nothing" else: instname = space.abstract_getclass(w_firstarg).getname(space,"") if instname: instname += " " instdescr = "%sinstance" %instname msg = ("unbound method %s() must be called with %s" "instance as first argument (got %s instead)") % (myname, clsdescr, instdescr) raise OperationError(space.w_TypeError, space.wrap(msg)) return space.call_args(self.w_function, args) def descr_method_get(self, w_obj, w_cls=None): space = self.space if self.w_instance is not None: return space.wrap(self) # already bound else: # only allow binding to a more specific class than before if (w_cls is not None and not space.is_w(w_cls, space.w_None) and not space.abstract_issubclass_w(w_cls, self.w_class)): return space.wrap(self) # subclass test failed else: return descr_function_get(space, self.w_function, w_obj, w_cls) def descr_method_call(self, __args__): return self.call_args(__args__) def descr_method_repr(self): space = self.space name = self.w_function.getname(self.space, '?') # XXX do we handle all cases sanely here? if space.is_w(self.w_class, space.w_None): w_class = space.type(self.w_instance) else: w_class = self.w_class typename = w_class.getname(self.space, '?') if self.w_instance is None: s = "<unbound method %s.%s>" % (typename, name) return space.wrap(s) else: objrepr = space.str_w(space.repr(self.w_instance)) info = 'bound method %s.%s of %s' % (typename, name, objrepr) # info = "method %s of %s object" % (name, typename) return self.w_instance.getrepr(self.space, info) def descr_method_getattribute(self, w_attr): space = self.space if space.str_w(w_attr) != '__doc__': try: return space.call_method(space.w_object, '__getattribute__', space.wrap(self), w_attr) except OperationError, e: if not e.match(space, space.w_AttributeError): raise # fall-back to the attribute of the underlying 'im_func' return space.getattr(self.w_function, w_attr) def descr_method_eq(self, w_other): space = self.space other = space.interpclass_w(w_other) if not isinstance(other, Method): return space.w_False if self.w_instance is None: if other.w_instance is not None: return space.w_False else: if other.w_instance is None: return space.w_False if not space.eq_w(self.w_instance, other.w_instance): return space.w_False return space.eq(self.w_function, other.w_function) def descr_method_hash(self): space = self.space w_result = space.hash(self.w_function) if self.w_instance is not None: w_result = space.xor(w_result, space.hash(self.w_instance)) return w_result def descr_method__reduce__(self, space): from pypy.interpreter.mixedmodule import MixedModule from pypy.interpreter.gateway import BuiltinCode w_mod = space.getbuiltinmodule('_pickle_support') mod = space.interp_w(MixedModule, w_mod) new_inst = mod.get('method_new') w = space.wrap w_instance = self.w_instance or space.w_None function = space.interpclass_w(self.w_function) if isinstance(function, Function) and isinstance(function.code, BuiltinCode): new_inst = mod.get('builtin_method_new') if space.is_w(w_instance, space.w_None): tup = [self.w_class, space.wrap(function.name)] else: tup = [w_instance, space.wrap(function.name)] elif space.is_w( self.w_class, space.w_None ): tup = [self.w_function, w_instance] else: tup = [self.w_function, w_instance, self.w_class] return space.newtuple([new_inst, space.newtuple(tup)]) class StaticMethod(Wrappable): """The staticmethod objects.""" def __init__(self, w_function): self.w_function = w_function def descr_staticmethod_get(self, w_obj, w_cls=None): """staticmethod(x).__get__(obj[, type]) -> x""" return self.w_function def descr_staticmethod__new__(space, w_type, w_function): return space.wrap(StaticMethod(w_function)) class ClassMethod(Wrappable): """The classmethod objects.""" def __init__(self, w_function): self.w_function = w_function def descr_classmethod_get(self, space, w_obj, w_klass=None): if space.is_w(w_klass, space.w_None): w_klass = space.type(w_obj) return space.wrap(Method(space, self.w_function, w_klass, space.w_None)) def descr_classmethod__new__(space, w_type, w_function): if not space.is_true(space.callable(w_function)): typename = space.type(w_function).getname(space, '?') raise OperationError(space.w_TypeError, space.wrap( "'%s' object is not callable" % typename)) return space.wrap(ClassMethod(w_function)) class BuiltinFunction(Function): def __init__(self, func): assert isinstance(func, Function) Function.__init__(self, func.space, func.code, func.w_func_globals, func.defs_w, func.closure, func.name) self.w_doc = func.w_doc self.w_func_dict = func.w_func_dict self.w_module = func.w_module def descr_builtinfunction__new__(space, w_subtype, w_func): func = space.interp_w(Function, w_func) bltin = space.allocate_instance(BuiltinFunction, w_subtype) BuiltinFunction.__init__(bltin, func) return space.wrap(bltin) def descr_function_repr(self): return self.space.wrap('<built-in function %s>' % (self.name,)) def is_builtin_code(w_func): from pypy.interpreter.gateway import BuiltinCode if isinstance(w_func, Method): w_func = w_func.w_function if isinstance(w_func, Function): code = w_func.getcode() else: code = None return isinstance(code, BuiltinCode)
false
true
f7001b697392ceebda04fd774fb9d56f47820f4b
17,633
py
Python
venv/lib/python3.7/site-packages/torch/utils/benchmark/utils/timer.py
GOOGLE-M/SGC
78ad8d02b80808302e38559e2d0f430f66a809bd
[ "MIT" ]
null
null
null
venv/lib/python3.7/site-packages/torch/utils/benchmark/utils/timer.py
GOOGLE-M/SGC
78ad8d02b80808302e38559e2d0f430f66a809bd
[ "MIT" ]
null
null
null
venv/lib/python3.7/site-packages/torch/utils/benchmark/utils/timer.py
GOOGLE-M/SGC
78ad8d02b80808302e38559e2d0f430f66a809bd
[ "MIT" ]
null
null
null
"""Timer class based on the timeit.Timer class, but torch aware.""" import enum import timeit import textwrap from typing import Any, Callable, Dict, List, NoReturn, Optional, Type, Union import numpy as np import torch from torch.utils.benchmark.utils import common, cpp_jit from torch.utils.benchmark.utils._stubs import TimerClass, TimeitModuleType from torch.utils.benchmark.utils.valgrind_wrapper import timer_interface as valgrind_timer_interface __all__ = ["Timer", "timer", "Language"] if torch.has_cuda and torch.cuda.is_available(): def timer() -> float: torch.cuda.synchronize() return timeit.default_timer() else: timer = timeit.default_timer class Language(enum.Enum): PYTHON = 0 CPP = 1 class CPPTimer: def __init__( self, stmt: str, setup: str, timer: Callable[[], float], globals: Dict[str, Any], ) -> None: if timer is not timeit.default_timer: raise NotImplementedError( "PyTorch was built with CUDA and a GPU is present; however " "Timer does not yet support GPU measurements. If your " "code is CPU only, pass `timer=timeit.default_timer` to the " "Timer's constructor to indicate this. (Note that this will " "produce incorrect results if the GPU is in fact used, as " "Timer will not synchronize CUDA.)" ) if globals: raise ValueError("C++ timing does not support globals.") self._stmt: str = textwrap.dedent(stmt) self._setup: str = textwrap.dedent(setup) self._timeit_module: Optional[TimeitModuleType] = None def timeit(self, number: int) -> float: if self._timeit_module is None: self._timeit_module = cpp_jit.compile_timeit_template( self._stmt, self._setup, ) return self._timeit_module.timeit(number) class Timer(object): """Helper class for measuring execution time of PyTorch statements. For a full tutorial on how to use this class, see: https://pytorch.org/tutorials/recipes/recipes/benchmark.html The PyTorch Timer is based on `timeit.Timer` (and in fact uses `timeit.Timer` internally), but with several key differences: 1) Runtime aware: Timer will perform warmups (important as some elements of PyTorch are lazily initialized), set threadpool size so that comparisons are apples-to-apples, and synchronize asynchronous CUDA functions when necessary. 2) Focus on replicates: When measuring code, and particularly complex kernels / models, run-to-run variation is a significant confounding factor. It is expected that all measurements should include replicates to quantify noise and allow median computation, which is more robust than mean. To that effect, this class deviates from the `timeit` API by conceptually merging `timeit.Timer.repeat` and `timeit.Timer.autorange`. (Exact algorithms are discussed in method docstrings.) The `timeit` method is replicated for cases where an adaptive strategy is not desired. 3) Optional metadata: When defining a Timer, one can optionally specify `label`, `sub_label`, `description`, and `env`. (Defined later) These fields are included in the representation of result object and by the `Compare` class to group and display results for comparison. 4) Instruction counts In addition to wall times, Timer can run a statement under Callgrind and report instructions executed. Directly analogous to `timeit.Timer` constructor arguments: `stmt`, `setup`, `timer`, `globals` PyTorch Timer specific constructor arguments: `label`, `sub_label`, `description`, `env`, `num_threads` Args: stmt: Code snippet to be run in a loop and timed. setup: Optional setup code. Used to define variables used in `stmt` timer: Callable which returns the current time. If PyTorch was built without CUDA or there is no GPU present, this defaults to `timeit.default_timer`; otherwise it will synchronize CUDA before measuring the time. globals: A dict which defines the global variables when `stmt` is being executed. This is the other method for providing variables which `stmt` needs. label: String which summarizes `stmt`. For instance, if `stmt` is "torch.nn.functional.relu(torch.add(x, 1, out=out))" one might set label to "ReLU(x + 1)" to improve readability. sub_label: Provide supplemental information to disambiguate measurements with identical stmt or label. For instance, in our example above sub_label might be "float" or "int", so that it is easy to differentiate: "ReLU(x + 1): (float)" "ReLU(x + 1): (int)" when printing Measurements or summarizing using `Compare`. description: String to distinguish measurements with identical label and sub_label. The principal use of `description` is to signal to `Compare` the columns of data. For instance one might set it based on the input size to create a table of the form: :: | n=1 | n=4 | ... ------------- ... ReLU(x + 1): (float) | ... | ... | ... ReLU(x + 1): (int) | ... | ... | ... using `Compare`. It is also included when printing a Measurement. env: This tag indicates that otherwise identical tasks were run in different environments, and are therefore not equivilent, for instance when A/B testing a change to a kernel. `Compare` will treat Measurements with different `env` specification as distinct when merging replicate runs. num_threads: The size of the PyTorch threadpool when executing `stmt`. Single threaded performace is important as both a key inference workload and a good indicator of intrinsic algorithmic efficiency, so the default is set to one. This is in contrast to the default PyTorch threadpool size which tries to utilize all cores. """ _timer_cls: Type[TimerClass] = timeit.Timer def __init__( self, stmt: str = "pass", setup: str = "pass", timer: Callable[[], float] = timer, globals: Optional[Dict[str, Any]] = None, label: Optional[str] = None, sub_label: Optional[str] = None, description: Optional[str] = None, env: Optional[str] = None, num_threads: int = 1, language: Union[Language, str] = Language.PYTHON, ): if not isinstance(stmt, str): raise ValueError("Currently only a `str` stmt is supported.") # We copy `globals` to prevent mutations from leaking. # (For instance, `eval` adds the `__builtins__` key) self._globals = dict(globals or {}) if language in (Language.PYTHON, "py", "python"): # Include `torch` if not specified as a convenience feature. self._globals.setdefault("torch", torch) self._language: Language = Language.PYTHON elif language in (Language.CPP, "cpp", "c++"): assert self._timer_cls is timeit.Timer, "_timer_cls has already been swapped." self._timer_cls = CPPTimer setup = ("" if setup == "pass" else setup) self._language = Language.CPP else: raise ValueError(f"Invalid language `{language}`.") # Convenience adjustment so that multi-line code snippets defined in # functions do not IndentationError (Python) or look odd (C++). The # leading newline removal is for the initial newline that appears when # defining block strings. For instance: # textwrap.dedent(""" # print("This is a stmt") # """) # produces '\nprint("This is a stmt")\n'. # # Stripping this down to 'print("This is a stmt")' doesn't change # what gets executed, but it makes __repr__'s nicer. stmt = textwrap.dedent(stmt) stmt = (stmt[1:] if stmt and stmt[0] == "\n" else stmt).rstrip() setup = textwrap.dedent(setup) setup = (setup[1:] if setup and setup[0] == "\n" else setup).rstrip() self._timer = self._timer_cls( stmt=stmt, setup=setup, timer=timer, globals=valgrind_timer_interface.CopyIfCallgrind.unwrap_all(self._globals), ) self._task_spec = common.TaskSpec( stmt=stmt, setup=setup, label=label, sub_label=sub_label, description=description, env=env, num_threads=num_threads, ) def timeit(self, number: int = 1000000) -> common.Measurement: """Mirrors the semantics of timeit.Timer.timeit(). Execute the main statement (`stmt`) `number` times. https://docs.python.org/3/library/timeit.html#timeit.Timer.timeit """ with common.set_torch_threads(self._task_spec.num_threads): # Warmup self._timer.timeit(number=max(int(number // 100), 1)) return common.Measurement( number_per_run=number, raw_times=[self._timer.timeit(number=number)], task_spec=self._task_spec ) def repeat(self, repeat: int = -1, number: int = -1) -> None: raise NotImplementedError("See `Timer.blocked_autorange.`") def autorange(self, callback: Optional[Callable[[int, float], NoReturn]] = None) -> None: raise NotImplementedError("See `Timer.blocked_autorange.`") def _threaded_measurement_loop( self, number: int, time_hook: Callable[[], float], stop_hook: Callable[[List[float]], bool], min_run_time: float, max_run_time: Optional[float] = None, callback: Optional[Callable[[int, float], NoReturn]] = None ) -> List[float]: total_time = 0.0 can_stop = False times: List[float] = [] with common.set_torch_threads(self._task_spec.num_threads): while (total_time < min_run_time) or (not can_stop): time_spent = time_hook() times.append(time_spent) total_time += time_spent if callback: callback(number, time_spent) can_stop = stop_hook(times) if max_run_time and total_time > max_run_time: break return times def _estimate_block_size(self, min_run_time: float) -> int: with common.set_torch_threads(self._task_spec.num_threads): # Estimate the block size needed for measurement to be negligible # compared to the inner loop. This also serves as a warmup. overhead = np.median([self._timer.timeit(0) for _ in range(5)]) number = 1 while True: time_taken = self._timer.timeit(number) relative_overhead = overhead / time_taken if relative_overhead <= 1e-4 and time_taken >= min_run_time / 1000: break if time_taken > min_run_time: break number *= 10 return number def adaptive_autorange( self, threshold: float = 0.1, *, min_run_time: float = 0.01, max_run_time: float = 10.0, callback: Optional[Callable[[int, float], NoReturn]] = None, ) -> common.Measurement: number = self._estimate_block_size(min_run_time=0.05) def time_hook() -> float: return self._timer.timeit(number) def stop_hook(times: List[float]) -> bool: if len(times) > 3: return common.Measurement( number_per_run=number, raw_times=times, task_spec=self._task_spec ).meets_confidence(threshold=threshold) return False times = self._threaded_measurement_loop( number, time_hook, stop_hook, min_run_time, max_run_time, callback=callback) return common.Measurement( number_per_run=number, raw_times=times, task_spec=self._task_spec ) def blocked_autorange( self, callback: Optional[Callable[[int, float], NoReturn]] = None, min_run_time: float = 0.2, ) -> common.Measurement: """Measure many replicates while keeping timer overhead to a minimum. At a high level, blocked_autorange executes the following pseudo-code:: `setup` total_time = 0 while total_time < min_run_time start = timer() for _ in range(block_size): `stmt` total_time += (timer() - start) Note the variable `block_size` in the inner loop. The choice of block size is important to measurement quality, and must balance two competing objectives: 1) A small block size results in more replicates and generally better statistics. 2) A large block size better amortizes the cost of `timer` invocation, and results in a less biased measurement. This is important because CUDA syncronization time is non-trivial (order single to low double digit microseconds) and would otherwise bias the measurement. blocked_autorange sets block_size by running a warmup period, increasing block size until timer overhead is less than 0.1% of the overall computation. This value is then used for the main measurement loop. Returns: A `Measurement` object that contains measured runtimes and repetition counts, and can be used to compute statistics. (mean, median, etc.) """ number = self._estimate_block_size(min_run_time) def time_hook() -> float: return self._timer.timeit(number) def stop_hook(times: List[float]) -> bool: return True times = self._threaded_measurement_loop( number, time_hook, stop_hook, min_run_time=min_run_time, callback=callback) return common.Measurement( number_per_run=number, raw_times=times, task_spec=self._task_spec ) def collect_callgrind( self, number: int = 100, collect_baseline: bool = True ) -> valgrind_timer_interface.CallgrindStats: """Collect instruction counts using Callgrind. Unlike wall times, instruction counts are deterministic (modulo non-determinism in the program itself and small amounts of jitter from the Python interpreter.) This makes them ideal for detailed performance analysis. This method runs `stmt` in a separate process so that Valgrind can instrument the program. Performance is severely degraded due to the instrumentation, howevever this is ameliorated by the fact that a small number of iterations is generally sufficient to obtain good measurements. In order to to use this method `valgrind`, `callgrind_control`, and `callgrind_annotate` must be installed. Because there is a process boundary between the caller (this process) and the `stmt` execution, `globals` cannot contain arbitrary in-memory data structures. (Unlike timing methods) Instead, globals are restricted to builtins, `nn.Modules`'s, and TorchScripted functions/modules to reduce the surprise factor from serialization and subsequent deserialization. The `GlobalsBridge` class provides more detail on this subject. Take particular care with nn.Modules: they rely on pickle and you may need to add an import to `setup` for them to transfer properly. By default, a profile for an empty statement will be collected and cached to indicate how many instructions are from the Python loop which drives `stmt`. Returns: A `CallgrindStats` object which provides instruction counts and some basic facilities for analyzing and manipulating results. """ if not isinstance(self._task_spec.stmt, str): raise ValueError("`collect_callgrind` currently only supports string `stmt`") # Check that the statement is valid. It doesn't guarantee success, but it's much # simpler and quicker to raise an exception for a faulty `stmt` or `setup` in # the parent process rather than the valgrind subprocess. self._timer.timeit(1) is_python = (self._language == Language.PYTHON) assert is_python or not self._globals return valgrind_timer_interface.wrapper_singleton().collect_callgrind( task_spec=self._task_spec, globals=self._globals, number=number, collect_baseline=collect_baseline and is_python, is_python=is_python)
40.166287
100
0.61969
import enum import timeit import textwrap from typing import Any, Callable, Dict, List, NoReturn, Optional, Type, Union import numpy as np import torch from torch.utils.benchmark.utils import common, cpp_jit from torch.utils.benchmark.utils._stubs import TimerClass, TimeitModuleType from torch.utils.benchmark.utils.valgrind_wrapper import timer_interface as valgrind_timer_interface __all__ = ["Timer", "timer", "Language"] if torch.has_cuda and torch.cuda.is_available(): def timer() -> float: torch.cuda.synchronize() return timeit.default_timer() else: timer = timeit.default_timer class Language(enum.Enum): PYTHON = 0 CPP = 1 class CPPTimer: def __init__( self, stmt: str, setup: str, timer: Callable[[], float], globals: Dict[str, Any], ) -> None: if timer is not timeit.default_timer: raise NotImplementedError( "PyTorch was built with CUDA and a GPU is present; however " "Timer does not yet support GPU measurements. If your " "code is CPU only, pass `timer=timeit.default_timer` to the " "Timer's constructor to indicate this. (Note that this will " "produce incorrect results if the GPU is in fact used, as " "Timer will not synchronize CUDA.)" ) if globals: raise ValueError("C++ timing does not support globals.") self._stmt: str = textwrap.dedent(stmt) self._setup: str = textwrap.dedent(setup) self._timeit_module: Optional[TimeitModuleType] = None def timeit(self, number: int) -> float: if self._timeit_module is None: self._timeit_module = cpp_jit.compile_timeit_template( self._stmt, self._setup, ) return self._timeit_module.timeit(number) class Timer(object): _timer_cls: Type[TimerClass] = timeit.Timer def __init__( self, stmt: str = "pass", setup: str = "pass", timer: Callable[[], float] = timer, globals: Optional[Dict[str, Any]] = None, label: Optional[str] = None, sub_label: Optional[str] = None, description: Optional[str] = None, env: Optional[str] = None, num_threads: int = 1, language: Union[Language, str] = Language.PYTHON, ): if not isinstance(stmt, str): raise ValueError("Currently only a `str` stmt is supported.") # We copy `globals` to prevent mutations from leaking. # (For instance, `eval` adds the `__builtins__` key) self._globals = dict(globals or {}) if language in (Language.PYTHON, "py", "python"): # Include `torch` if not specified as a convenience feature. self._globals.setdefault("torch", torch) self._language: Language = Language.PYTHON elif language in (Language.CPP, "cpp", "c++"): assert self._timer_cls is timeit.Timer, "_timer_cls has already been swapped." self._timer_cls = CPPTimer setup = ("" if setup == "pass" else setup) self._language = Language.CPP else: raise ValueError(f"Invalid language `{language}`.") # Convenience adjustment so that multi-line code snippets defined in # functions do not IndentationError (Python) or look odd (C++). The # leading newline removal is for the initial newline that appears when # defining block strings. For instance: # textwrap.dedent(""" # print("This is a stmt") # """) # produces '\nprint("This is a stmt")\n'. # # Stripping this down to 'print("This is a stmt")' doesn't change stmt = textwrap.dedent(stmt) stmt = (stmt[1:] if stmt and stmt[0] == "\n" else stmt).rstrip() setup = textwrap.dedent(setup) setup = (setup[1:] if setup and setup[0] == "\n" else setup).rstrip() self._timer = self._timer_cls( stmt=stmt, setup=setup, timer=timer, globals=valgrind_timer_interface.CopyIfCallgrind.unwrap_all(self._globals), ) self._task_spec = common.TaskSpec( stmt=stmt, setup=setup, label=label, sub_label=sub_label, description=description, env=env, num_threads=num_threads, ) def timeit(self, number: int = 1000000) -> common.Measurement: with common.set_torch_threads(self._task_spec.num_threads): # Warmup self._timer.timeit(number=max(int(number // 100), 1)) return common.Measurement( number_per_run=number, raw_times=[self._timer.timeit(number=number)], task_spec=self._task_spec ) def repeat(self, repeat: int = -1, number: int = -1) -> None: raise NotImplementedError("See `Timer.blocked_autorange.`") def autorange(self, callback: Optional[Callable[[int, float], NoReturn]] = None) -> None: raise NotImplementedError("See `Timer.blocked_autorange.`") def _threaded_measurement_loop( self, number: int, time_hook: Callable[[], float], stop_hook: Callable[[List[float]], bool], min_run_time: float, max_run_time: Optional[float] = None, callback: Optional[Callable[[int, float], NoReturn]] = None ) -> List[float]: total_time = 0.0 can_stop = False times: List[float] = [] with common.set_torch_threads(self._task_spec.num_threads): while (total_time < min_run_time) or (not can_stop): time_spent = time_hook() times.append(time_spent) total_time += time_spent if callback: callback(number, time_spent) can_stop = stop_hook(times) if max_run_time and total_time > max_run_time: break return times def _estimate_block_size(self, min_run_time: float) -> int: with common.set_torch_threads(self._task_spec.num_threads): # Estimate the block size needed for measurement to be negligible # compared to the inner loop. This also serves as a warmup. overhead = np.median([self._timer.timeit(0) for _ in range(5)]) number = 1 while True: time_taken = self._timer.timeit(number) relative_overhead = overhead / time_taken if relative_overhead <= 1e-4 and time_taken >= min_run_time / 1000: break if time_taken > min_run_time: break number *= 10 return number def adaptive_autorange( self, threshold: float = 0.1, *, min_run_time: float = 0.01, max_run_time: float = 10.0, callback: Optional[Callable[[int, float], NoReturn]] = None, ) -> common.Measurement: number = self._estimate_block_size(min_run_time=0.05) def time_hook() -> float: return self._timer.timeit(number) def stop_hook(times: List[float]) -> bool: if len(times) > 3: return common.Measurement( number_per_run=number, raw_times=times, task_spec=self._task_spec ).meets_confidence(threshold=threshold) return False times = self._threaded_measurement_loop( number, time_hook, stop_hook, min_run_time, max_run_time, callback=callback) return common.Measurement( number_per_run=number, raw_times=times, task_spec=self._task_spec ) def blocked_autorange( self, callback: Optional[Callable[[int, float], NoReturn]] = None, min_run_time: float = 0.2, ) -> common.Measurement: number = self._estimate_block_size(min_run_time) def time_hook() -> float: return self._timer.timeit(number) def stop_hook(times: List[float]) -> bool: return True times = self._threaded_measurement_loop( number, time_hook, stop_hook, min_run_time=min_run_time, callback=callback) return common.Measurement( number_per_run=number, raw_times=times, task_spec=self._task_spec ) def collect_callgrind( self, number: int = 100, collect_baseline: bool = True ) -> valgrind_timer_interface.CallgrindStats: if not isinstance(self._task_spec.stmt, str): raise ValueError("`collect_callgrind` currently only supports string `stmt`") # Check that the statement is valid. It doesn't guarantee success, but it's much # simpler and quicker to raise an exception for a faulty `stmt` or `setup` in # the parent process rather than the valgrind subprocess. self._timer.timeit(1) is_python = (self._language == Language.PYTHON) assert is_python or not self._globals return valgrind_timer_interface.wrapper_singleton().collect_callgrind( task_spec=self._task_spec, globals=self._globals, number=number, collect_baseline=collect_baseline and is_python, is_python=is_python)
true
true
f7001bd6600b878e6c19cae22c37a1c835b9236c
19,315
py
Python
Ranger/src/Range/Range.py
er432/Ranger
a583b332ffe0e5db9f60a5716c9a5504d91fbd39
[ "BSD-3-Clause" ]
2
2015-03-22T00:31:28.000Z
2021-01-31T16:24:42.000Z
Ranger/src/Range/Range.py
er432/Ranger
a583b332ffe0e5db9f60a5716c9a5504d91fbd39
[ "BSD-3-Clause" ]
1
2015-10-06T00:43:51.000Z
2015-10-06T02:36:36.000Z
Ranger/src/Range/Range.py
er432/Ranger
a583b332ffe0e5db9f60a5716c9a5504d91fbd39
[ "BSD-3-Clause" ]
2
2016-04-10T08:02:23.000Z
2020-12-11T06:17:41.000Z
from Ranger.src.Range.Cut import Cut class Range(object): """ Class used to represent a range along some 1-D domain. The range is represented by 2 cutpoints can can be unbounded by specifying an aboveAll or belowAll Cut. """ def __init__(self, lowerCut, upperCut): """ Instantiates a Range Parameters ---------- lowerCut : Cut object Specifies the lower cut for the range upperCut : Cut object Specifies the upper cut for the range Raises ------ ValueError If bound(s) are not Cut objects or lower > upper """ if not all(map(lambda x: isinstance(x, Cut), (lowerCut,upperCut))): raise ValueError("Bounds must be Cut objects") elif lowerCut > upperCut: raise ValueError("Lower bound cannot be greater than upper bound") self.lowerCut = lowerCut self.upperCut = upperCut def __repr__(self): try: return_str = '[' if self.isLowerBoundClosed() else '(' except TypeError: return_str = '(' return_str += (str(self.lowerCut.point) if not self.lowerCut.belowAll \ else '') return_str += ' , ' return_str += (str(self.upperCut.point) if not self.upperCut.aboveAll \ else '') try: return_str += ']' if self.isUpperBoundClosed() else ')' except TypeError: return_str += ')' return return_str def __hash__(self): return (hash(self.lowerCut)*31 + hash(self.upperCut)) def __eq__(self, other): if not isinstance(other, Range): return False else: return ((self.lowerCut == other.lowerCut) and \ (self.upperCut == other.upperCut)) def __ne__(self, other): return not self.__eq__(other) def contains(self, val): """ Returns true if the range contains the value Parameters ---------- val : Comparable object of the appropriate type for the range Value to query whether in the range Raises ------ ValueError If the value type not compatible with cutpoint type Returns ------- True if the range contains the value """ return (self.lowerCut < val and \ self.upperCut > val) def containsAll(self, vals): """ Returns True if the range contains all values in some iterable Parameters ---------- vals : Iterable of comparable object of appropriate type for range Values to query against the range Raises ------ ValueError If there is a value type not compatible with the cutpoint type Returns ------- True if the range contains all values """ for val in vals: if not self.contains(val): return False return True def getDistanceFromPoint(self, val, distFunc = lambda x1, x2: abs(x1-x2)): """ Returns the minimum distance of a Range from a Point, returning 0 if there is an overlap. Note that both upper and lower bounds must be closed for this function to work Parameters ---------- val : comparable, compatible with cutpoint type The value of the point where the distance is desired distFunc : callable Function that calculates the distance between two points in the domain of the Range Raises ------ TypeError If the upper and/or lower bounds of this Range are not closed or if the distFunc not compatible with the type Returns ------- The minimum distance between the Range and the Point. Returns 0 if there is an overlap """ if not all((self.isLowerBoundClosed(), self.isUpperBoundClosed())): raise TypeError("Range is not closed") if self.contains(val): return 0. else: return min(distFunc(self.lowerCut.point, val), distFunc(self.upperCut.point, val)) def getDistanceFromRange(self, other, distFunc = lambda x1,x2: abs(x1-x2)): """ Returns the minimum distance of a Range from another Range, returning 0 if there is any overlap Note that both Ranges must be closed for this function to work Parameters ---------- other : Range, compatible with this Range's domain The Range to compare to distFunc : callable Function that calculates the distance between two points in the domain of the Range Raises ------ TypeError If the upper and/or lower bounds of this Range are not closed or if the distFunc not compatible with the type Returns ------- Minimum distance between the ranges """ if not isinstance(other, Range): raise TypeError("other is not a Range") if not all((self.isLowerBoundClosed(), self.isUpperBoundClosed(), other.isLowerBoundClosed(), other.isUpperBoundClosed())): raise TypeError("Not all Ranges closed") if self.isConnected(other): return 0. else: return min(distFunc(self.lowerCut.point, other.upperCut.point), distFunc(other.lowerCut.point, self.upperCut.point)) def hasLowerBound(self): """ Returns True if the range has a lower endpoint (not unbounded at the lower end) Returns ------- True if the range has a lower endpoint """ return (not self.lowerCut.belowAll) def hasUpperBound(self): """ Returns True if the range has an upper endpoint (not unbounded at the upper end) Returns ------- True if the range has an upper endpoint """ return (not self.upperCut.aboveAll) def lowerEndpoint(self): """ Returns the lower endpoint of the range if it exists. Otherwise raises a TypeError Raises ------ TypeError If the range is unbounded below Returns ------- The lower endpoint of the range """ if self.lowerCut.point is None: raise TypeError("Range unbounded below") else: return self.lowerCut.point def upperEndpoint(self): """ Returns the upper endpoint of the range if it exists. Otherwise raises a TypeError Raises ------ TypeError If the range is unbounded above Returns ------- The upper endpoint of the range """ if self.upperCut.point is None: raise TypeError("Range unbounded above") else: return self.upperCut.point def isLowerBoundClosed(self): """ Returns whether the lower bound is closed (if there is a lower bound) Raises ------ TypeError If the range is unbounded below Returns ------- True if the lower bound is closed """ if self.lowerCut.point is None: raise TypeError("Range unbounded below") else: return self.lowerCut.below def isUpperBoundClosed(self): """ Returns whether the upper bound is closed (if there is an upper bound) Raises ------ TypeError If the range is unbounded above Returns ------- True if the upper bound is closed """ if self.upperCut.point is None: raise TypeError("Range unbounded above") else: return (not self.upperCut.below) def isEmpty(self): """ Returns True if the range is of form [v, v) or (v, v] Returns ------- True if the range is of the form [v,v) or (v,v] """ return self.lowerCut == self.upperCut def encloses(self, other): """ Returns True if the bounds of the other range do not extend outside the bounds of this range Examples: [3,6] encloses [4,5] (3,6) encloses (3,6) [3,6] encloses [4,4] (3,6] does not enclose [3,6] [4,5] does not enclose (3,6) Parameters ---------- other : A Range The range to compare to Raises ------ ValueError If object passed in is not a Range Returns ------- True if the bounds of the other range do not extend outside the bounds of this range """ if not isinstance(other, Range): raise ValueError("Range required") return ((self.lowerCut <= other.lowerCut) and \ (self.upperCut >= other.upperCut)) def isConnected(self, other): """ Returns True if there is a (possibly empty) range that is enclosed by both this range and other Examples: [2,4] and [5,7] are not connected [2,4] and [3,5] are connected [2,4] and [4,6] are connected [3,5] and (5,10) are connected Parameters ---------- other : A range The range to compare to Raises ------ ValueError If object passed in is not a Range Returns ------- True if there is a (possibly empty) range that is enclosed by both this range and other """ if not isinstance(other, Range): raise ValueError("Range required") return ((self.lowerCut <= other.upperCut) and \ (other.lowerCut <= self.upperCut)) def intersection(self, other): """ Returns the maximal range enclosed by both this range and the other range, if such a range exists Examples: Intersection of [1,5] and [3,7] is [3,5] Intersection of [1,5] and [5,7] is [5,5] Parameters ---------- other : A range The range to compare to Raises ------ ValueError If object passed in is not a Range or if there is no intersection Returns ------- The intersection range """ if not isinstance(other, Range): raise ValueError("Range required") if ((self.lowerCut >= other.lowerCut) and \ (self.upperCut <= other.upperCut)): return Range(self.lowerCut, self.upperCut) elif ((self.lowerCut <= other.lowerCut) and \ (self.upperCut >= other.upperCut)): return Range(other.lowerCut, other.upperCut) else: newLower = self.lowerCut if (self.lowerCut >= other.lowerCut) else \ other.lowerCut newUpper = self.upperCut if (self.upperCut <= other.upperCut) else \ other.upperCut return Range(newLower, newUpper) def span(self, other): """ Returns the minimal range that encloses both this range and the other. Note that if the input ranges are not connected, the span can contain values that are not contained within either input range Examples: Span of [1,3] and [5,7] is [1,7] Parameters ---------- other : A range A range to span with Raises ------ ValueError If object passed in is not a Range or if there is no intersection Returns ------- The minimal range enclosing both with and the other range """ if ((self.lowerCut <= other.lowerCut) and \ (self.upperCut >= other.upperCut)): return Range(self.lowerCut, self.upperCut) elif ((self.lowerCut >= other.lowerCut) and \ (self.upperCut <= other.upperCut)): return Range(other.lowerCut, other.upperCut) else: newLower = self.lowerCut if (self.lowerCut <= other.lowerCut) else \ other.lowerCut newUpper = self.upperCut if (self.upperCut >= other.upperCut) else \ other.upperCut return Range(newLower, newUpper) ################## # Static methods # ################## @staticmethod def _validate_cutpoints(*pts): if not all(map(lambda x: (hasattr(x, "__lt__") and \ hasattr(x, "__gt__")) or hasattr(x,'__cmp__'), pts)): raise ValueError("Cutpoint type(s) not comparable") if len(pts) == 2: if not (issubclass(type(pts[0]),type(pts[1])) or \ issubclass(type(pts[1]),type(pts[0]))): raise ValueError("Cutpoints are not compatible") return True @staticmethod def _get_type(*pts): if len(pts) == 1: return type(pts[0]) elif len(pts) == 2: if issubclass(type(pts[0]),type(pts[1])): return type(pts[1]) elif issubclass(type(pts[1]),type(pts[0])): return type(pts[0]) else: raise ValueError("Cutpoints are not compatible") @staticmethod def closed(lower, upper): """ Creates a range including the endpoints (i.e. [lower, upper]) Parameters ---------- lower : comparable, of same type as or subclass of upper type The lower bound upper : comparable, of same type as or subclass of lower type The upper bound Raises ------ ValueError If type(s) are not comparable or compatible Returns ------- A Range object [lower, upper] """ # Ensure cutpoints are of compatible, appropriate types Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) return Range(Cut.belowValue(lower, theType=theType), Cut.aboveValue(upper, theType=theType)) @staticmethod def closedOpen(lower, upper): """ Creates a range including the lower endpoint (i.e. [lower, upper)) Parameters ---------- lower : comparable, of same type as or subclass of upper type The lower bound upper : comparable, of same type as or subclass of lower type The upper bound Raises ------ ValueError If type(s) are not comparable or compatible Returns ------- A Range object [lower, upper) """ # Ensure cutpoints are of compatible, appropriate types Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) return Range(Cut.belowValue(lower, theType=theType), Cut.belowValue(upper, theType=theType)) @staticmethod def openClosed(lower, upper): """ Creates a range including the upper (i.e. (lower, upper]) Parameters ---------- lower : comparable, of same type as or subclass of upper type The lower bound upper : comparable, of same type as or subclass of lower type The upper bound Raises ------ ValueError If type(s) are not comparable or compatible Returns ------- A Range object (lower, upper] """ # Ensure cutpoints are of compatible, appropriate types Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) return Range(Cut.aboveValue(lower, theType=theType), Cut.aboveValue(upper, theType=theType)) @staticmethod def open(lower, upper): """ Creates a range excluding the endpoints (i.e. (lower, upper)) Parameters ---------- lower : comparable, of same type as or subclass of upper type The lower bound upper : comparable, of same type as or subclass of lower type The upper bound Raises ------ ValueError If type(s) are not comparable or compatible or if constructing a range of type (v,v), which is invalid Returns ------- A Range object (lower, upper) """ # Ensure cutpoints are of compatible, appropriate types Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) if lower == upper: raise TypeError("Range of type (v,v) is not valid") return Range(Cut.aboveValue(lower, theType=theType), Cut.belowValue(upper, theType=theType)) @staticmethod def lessThan(val): """ Makes range including all values less than some value (i.e. (-inf, val)) Parameters ---------- val : comparable The upper bound Raises ------ ValueError If type not comparable Returns ------- A Range object (-inf, val) """ Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.belowAll(theType=theType), Cut.belowValue(val, theType=theType)) @staticmethod def atMost(val): """ Makes range including all values less than or equal to some value (i.e. (-inf, val]) Parameters ---------- val : comparable The upper bound Raises ------ ValueError If type not comparable Returns ------- A Range object (-inf, val] """ Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.belowAll(theType=theType), Cut.aboveValue(val, theType=theType)) @staticmethod def greaterThan(val): """ Makes range including all values greater than some value (i.e. (val, inf]) Parameters ---------- val : comparable The lower bound Raises ------ ValueError If type not comparable Returns ------- A Range object (val, inf) """ Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.aboveValue(val,theType=theType), Cut.aboveAll(theType=theType)) @staticmethod def atLeast(val): """ Makes range including all values greater than or equal to some value (i.e. [val, inf)) Parameters ---------- val : comparable The lower bound Raises ------ ValueError If type not comparable Returns ------- A Range object [val, inf) """ Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.belowValue(val, theType=theType), Cut.aboveAll(theType=theType))
31.92562
81
0.546932
from Ranger.src.Range.Cut import Cut class Range(object): def __init__(self, lowerCut, upperCut): if not all(map(lambda x: isinstance(x, Cut), (lowerCut,upperCut))): raise ValueError("Bounds must be Cut objects") elif lowerCut > upperCut: raise ValueError("Lower bound cannot be greater than upper bound") self.lowerCut = lowerCut self.upperCut = upperCut def __repr__(self): try: return_str = '[' if self.isLowerBoundClosed() else '(' except TypeError: return_str = '(' return_str += (str(self.lowerCut.point) if not self.lowerCut.belowAll \ else '') return_str += ' , ' return_str += (str(self.upperCut.point) if not self.upperCut.aboveAll \ else '') try: return_str += ']' if self.isUpperBoundClosed() else ')' except TypeError: return_str += ')' return return_str def __hash__(self): return (hash(self.lowerCut)*31 + hash(self.upperCut)) def __eq__(self, other): if not isinstance(other, Range): return False else: return ((self.lowerCut == other.lowerCut) and \ (self.upperCut == other.upperCut)) def __ne__(self, other): return not self.__eq__(other) def contains(self, val): return (self.lowerCut < val and \ self.upperCut > val) def containsAll(self, vals): for val in vals: if not self.contains(val): return False return True def getDistanceFromPoint(self, val, distFunc = lambda x1, x2: abs(x1-x2)): if not all((self.isLowerBoundClosed(), self.isUpperBoundClosed())): raise TypeError("Range is not closed") if self.contains(val): return 0. else: return min(distFunc(self.lowerCut.point, val), distFunc(self.upperCut.point, val)) def getDistanceFromRange(self, other, distFunc = lambda x1,x2: abs(x1-x2)): if not isinstance(other, Range): raise TypeError("other is not a Range") if not all((self.isLowerBoundClosed(), self.isUpperBoundClosed(), other.isLowerBoundClosed(), other.isUpperBoundClosed())): raise TypeError("Not all Ranges closed") if self.isConnected(other): return 0. else: return min(distFunc(self.lowerCut.point, other.upperCut.point), distFunc(other.lowerCut.point, self.upperCut.point)) def hasLowerBound(self): return (not self.lowerCut.belowAll) def hasUpperBound(self): return (not self.upperCut.aboveAll) def lowerEndpoint(self): if self.lowerCut.point is None: raise TypeError("Range unbounded below") else: return self.lowerCut.point def upperEndpoint(self): if self.upperCut.point is None: raise TypeError("Range unbounded above") else: return self.upperCut.point def isLowerBoundClosed(self): if self.lowerCut.point is None: raise TypeError("Range unbounded below") else: return self.lowerCut.below def isUpperBoundClosed(self): if self.upperCut.point is None: raise TypeError("Range unbounded above") else: return (not self.upperCut.below) def isEmpty(self): return self.lowerCut == self.upperCut def encloses(self, other): if not isinstance(other, Range): raise ValueError("Range required") return ((self.lowerCut <= other.lowerCut) and \ (self.upperCut >= other.upperCut)) def isConnected(self, other): if not isinstance(other, Range): raise ValueError("Range required") return ((self.lowerCut <= other.upperCut) and \ (other.lowerCut <= self.upperCut)) def intersection(self, other): if not isinstance(other, Range): raise ValueError("Range required") if ((self.lowerCut >= other.lowerCut) and \ (self.upperCut <= other.upperCut)): return Range(self.lowerCut, self.upperCut) elif ((self.lowerCut <= other.lowerCut) and \ (self.upperCut >= other.upperCut)): return Range(other.lowerCut, other.upperCut) else: newLower = self.lowerCut if (self.lowerCut >= other.lowerCut) else \ other.lowerCut newUpper = self.upperCut if (self.upperCut <= other.upperCut) else \ other.upperCut return Range(newLower, newUpper) def span(self, other): if ((self.lowerCut <= other.lowerCut) and \ (self.upperCut >= other.upperCut)): return Range(self.lowerCut, self.upperCut) elif ((self.lowerCut >= other.lowerCut) and \ (self.upperCut <= other.upperCut)): return Range(other.lowerCut, other.upperCut) else: newLower = self.lowerCut if (self.lowerCut <= other.lowerCut) else \ other.lowerCut newUpper = self.upperCut if (self.upperCut >= other.upperCut) else \ other.upperCut return Range(newLower, newUpper) @staticmethod def _validate_cutpoints(*pts): if not all(map(lambda x: (hasattr(x, "__lt__") and \ hasattr(x, "__gt__")) or hasattr(x,'__cmp__'), pts)): raise ValueError("Cutpoint type(s) not comparable") if len(pts) == 2: if not (issubclass(type(pts[0]),type(pts[1])) or \ issubclass(type(pts[1]),type(pts[0]))): raise ValueError("Cutpoints are not compatible") return True @staticmethod def _get_type(*pts): if len(pts) == 1: return type(pts[0]) elif len(pts) == 2: if issubclass(type(pts[0]),type(pts[1])): return type(pts[1]) elif issubclass(type(pts[1]),type(pts[0])): return type(pts[0]) else: raise ValueError("Cutpoints are not compatible") @staticmethod def closed(lower, upper): Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) return Range(Cut.belowValue(lower, theType=theType), Cut.aboveValue(upper, theType=theType)) @staticmethod def closedOpen(lower, upper): Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) return Range(Cut.belowValue(lower, theType=theType), Cut.belowValue(upper, theType=theType)) @staticmethod def openClosed(lower, upper): Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) return Range(Cut.aboveValue(lower, theType=theType), Cut.aboveValue(upper, theType=theType)) @staticmethod def open(lower, upper): Range._validate_cutpoints(lower, upper) theType = Range._get_type(lower,upper) if lower == upper: raise TypeError("Range of type (v,v) is not valid") return Range(Cut.aboveValue(lower, theType=theType), Cut.belowValue(upper, theType=theType)) @staticmethod def lessThan(val): Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.belowAll(theType=theType), Cut.belowValue(val, theType=theType)) @staticmethod def atMost(val): Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.belowAll(theType=theType), Cut.aboveValue(val, theType=theType)) @staticmethod def greaterThan(val): Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.aboveValue(val,theType=theType), Cut.aboveAll(theType=theType)) @staticmethod def atLeast(val): Range._validate_cutpoints(val) theType = Range._get_type(val) return Range(Cut.belowValue(val, theType=theType), Cut.aboveAll(theType=theType))
true
true
f7001ca968913bd67dab95f067b10995bada69eb
62
py
Python
pyrelational/data/__init__.py
RelationRx/pyrelational
41ededeff84158bd88b76d39006764de3388c821
[ "Apache-2.0" ]
42
2022-02-09T16:36:37.000Z
2022-03-25T00:25:34.000Z
pyrelational/data/__init__.py
RelationRx/pyrelational
41ededeff84158bd88b76d39006764de3388c821
[ "Apache-2.0" ]
4
2022-03-22T13:22:38.000Z
2022-03-25T16:14:40.000Z
pyrelational/data/__init__.py
RelationRx/pyrelational
41ededeff84158bd88b76d39006764de3388c821
[ "Apache-2.0" ]
3
2022-02-15T17:50:30.000Z
2022-03-10T18:14:16.000Z
from pyrelational.data.data_manager import GenericDataManager
31
61
0.903226
from pyrelational.data.data_manager import GenericDataManager
true
true
f7001d1417417c71799f65be7cf142530523b8ed
7,625
py
Python
selfdrive/car/nissan/values.py
duyetb3/openpilot
1e906366f8439e3ef3f4f8fc01e2f2ec0f951a86
[ "MIT" ]
3
2021-11-28T00:43:46.000Z
2021-12-21T11:32:21.000Z
selfdrive/car/nissan/values.py
byte-iot/openpilot
7fc1b7dc0a5be75a3ae5d7217b9e887b34947c91
[ "MIT" ]
1
2022-02-27T14:38:59.000Z
2022-02-27T14:38:59.000Z
selfdrive/car/nissan/values.py
byte-iot/openpilot
7fc1b7dc0a5be75a3ae5d7217b9e887b34947c91
[ "MIT" ]
5
2022-03-24T16:18:47.000Z
2022-03-30T02:18:49.000Z
from selfdrive.car import dbc_dict from cereal import car Ecu = car.CarParams.Ecu class CarControllerParams: ANGLE_DELTA_BP = [0., 5., 15.] ANGLE_DELTA_V = [5., .8, .15] # windup limit ANGLE_DELTA_VU = [5., 3.5, 0.4] # unwind limit LKAS_MAX_TORQUE = 1 # A value of 1 is easy to overpower STEER_THRESHOLD = 1.0 class CAR: XTRAIL = "NISSAN X-TRAIL 2017" LEAF = "NISSAN LEAF 2018" # Leaf with ADAS ECU found behind instrument cluster instead of glovebox # Currently the only known difference between them is the inverted seatbelt signal. LEAF_IC = "NISSAN LEAF 2018 Instrument Cluster" ROGUE = "NISSAN ROGUE 2019" ALTIMA = "NISSAN ALTIMA 2020" FINGERPRINTS = { CAR.XTRAIL: [ { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 548: 8, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 768: 2, 783: 3, 851: 8, 855: 8, 1041: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1111: 4, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 2, 1497: 3, 1821: 8, 1823: 8, 1837: 8, 2015: 8, 2016: 8, 2024: 8 }, { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 527: 1, 548: 8, 637: 4, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 768: 6, 783: 3, 851: 8, 855: 8, 1041: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1111: 4, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 8, 1497: 3, 1534: 6, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 2015: 8, 2016: 8, 2024: 8 }, ], CAR.LEAF: [ { 2: 5, 42: 6, 264: 3, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 724: 6, 758: 3, 761: 2, 783: 3, 852: 8, 853: 8, 856: 8, 861: 8, 944: 1, 976: 6, 1008: 7, 1011: 7, 1057: 3, 1227: 8, 1228: 8, 1261: 5, 1342: 1, 1354: 8, 1361: 8, 1459: 8, 1477: 8, 1497: 3, 1549: 8, 1573: 6, 1821: 8, 1837: 8, 1856: 8, 1859: 8, 1861: 8, 1864: 8, 1874: 8, 1888: 8, 1891: 8, 1893: 8, 1906: 8, 1947: 8, 1949: 8, 1979: 8, 1981: 8, 2016: 8, 2017: 8, 2021: 8, 643: 5, 1792: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8 }, # 2020 Leaf SV Plus { 2: 5, 42: 8, 264: 3, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 643: 5, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 724: 6, 758: 3, 761: 2, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 783: 3, 852: 8, 853: 8, 856: 8, 861: 8, 943: 8, 944: 1, 976: 6, 1008: 7, 1009: 8, 1010: 8, 1011: 7, 1012: 8, 1013: 8, 1019: 8, 1020: 8, 1021: 8, 1022: 8, 1057: 3, 1227: 8, 1228: 8, 1261: 5, 1342: 1, 1354: 8, 1361: 8, 1402: 8, 1459: 8, 1477: 8, 1497: 3, 1549: 8, 1573: 6, 1821: 8, 1837: 8 }, ], CAR.LEAF_IC: [ { 2: 5, 42: 6, 264: 3, 282: 8, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 643: 5, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 756: 5, 758: 3, 761: 2, 783: 3, 830: 2, 852: 8, 853: 8, 856: 8, 861: 8, 943: 8, 944: 1, 1001: 6, 1057: 3, 1227: 8, 1228: 8, 1229: 8, 1342: 1, 1354: 8, 1361: 8, 1459: 8, 1477: 8, 1497: 3, 1514: 6, 1549: 8, 1573: 6, 1792: 8, 1821: 8, 1822: 8, 1837: 8, 1838: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8 }, ], CAR.ROGUE: [ { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 548: 8, 634: 7, 643: 5, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 783: 3, 851: 8, 855: 8, 1041: 8, 1042: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1110: 7, 1111: 7, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 2, 1497: 3, 1534: 7, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1839: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8 }, ], CAR.ALTIMA: [ { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 438: 8, 451: 8, 517: 8, 520: 2, 522: 8, 523: 6, 539: 8, 541: 7, 542: 8, 543: 8, 544: 8, 545: 8, 546: 8, 547: 8, 548: 8, 570: 8, 576: 8, 577: 8, 582: 8, 583: 8, 584: 8, 586: 8, 587: 8, 588: 8, 589: 8, 590: 8, 591: 8, 592: 8, 600: 8, 601: 8, 610: 8, 611: 8, 612: 8, 614: 8, 615: 8, 616: 8, 617: 8, 622: 8, 623: 8, 634: 7, 638: 8, 645: 8, 648: 5, 654: 6, 658: 8, 659: 8, 660: 8, 661: 8, 665: 8, 666: 8, 674: 2, 675: 8, 676: 8, 682: 8, 683: 8, 684: 8, 685: 8, 686: 8, 687: 8, 689: 8, 690: 8, 703: 8, 708: 7, 709: 7, 711: 7, 712: 7, 713: 7, 714: 8, 715: 8, 716: 8, 717: 7, 718: 7, 719: 7, 720: 7, 723: 8, 726: 7, 727: 7, 728: 7, 735: 8, 746: 8, 748: 6, 749: 6, 750: 8, 758: 3, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 779: 7, 781: 7, 782: 7, 783: 3, 851: 8, 855: 5, 1001: 6, 1041: 8, 1042: 8, 1055: 3, 1100: 7, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1110: 7, 1111: 7, 1144: 7, 1145: 7, 1227: 8, 1228: 8, 1229: 8, 1232: 8, 1247: 4, 1258: 8, 1259: 8, 1266: 8, 1273: 7, 1306: 1, 1314: 8, 1323: 8, 1324: 8, 1342: 1, 1376: 8, 1401: 8, 1454: 8, 1497: 3, 1514: 6, 1526: 8, 1527: 5, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8 }, ] } FW_VERSIONS = { CAR.ALTIMA: { (Ecu.fwdCamera, 0x707, None): [ b'284N86CA1D', ], (Ecu.eps, 0x742, None): [ b'6CA2B\xa9A\x02\x02G8A89P90D6A\x00\x00\x01\x80', ], (Ecu.engine, 0x7e0, None): [ b'237109HE2B', ], (Ecu.gateway, 0x18dad0f1, None): [ b'284U29HE0A', ], }, CAR.LEAF_IC: { (Ecu.fwdCamera, 0x707, None): [ b'5SH1BDB\x04\x18\x00\x00\x00\x00\x00_-?\x04\x91\xf2\x00\x00\x00\x80', b'5SK0ADB\x04\x18\x00\x00\x00\x00\x00_(5\x07\x9aQ\x00\x00\x00\x80', ], (Ecu.esp, 0x740, None): [ b'476605SH1D', b'476605SK2A', ], (Ecu.eps, 0x742, None): [ b'5SH2A\x99A\x05\x02N123F\x15\x81\x00\x00\x00\x00\x00\x00\x00\x80', b'5SK3A\x99A\x05\x02N123F\x15u\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.gateway, 0x18dad0f1, None): [ b'284U25SH3A', b'284U25SK2D', ], }, CAR.XTRAIL: { (Ecu.fwdCamera, 0x707, None): [ b'284N86FR2A', ], (Ecu.esp, 0x740, None): [ b'6FU1BD\x11\x02\x00\x02e\x95e\x80iX#\x01\x00\x00\x00\x00\x00\x80', b'6FU0AD\x11\x02\x00\x02e\x95e\x80iQ#\x01\x00\x00\x00\x00\x00\x80', ], (Ecu.eps, 0x742, None): [ b'6FP2A\x99A\x05\x02N123F\x18\x02\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.combinationMeter, 0x743, None): [ b'6FR2A\x18B\x05\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.engine, 0x7e0, None): [ b'6FU9B\xa0A\x06\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80', b'6FR9A\xa0A\x06\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.gateway, 0x18dad0f1, None): [ b'284U26FR0E', ], }, } DBC = { CAR.XTRAIL: dbc_dict('nissan_x_trail_2017', None), CAR.LEAF: dbc_dict('nissan_leaf_2018', None), CAR.LEAF_IC: dbc_dict('nissan_leaf_2018', None), CAR.ROGUE: dbc_dict('nissan_x_trail_2017', None), CAR.ALTIMA: dbc_dict('nissan_x_trail_2017', None), }
62.5
1,341
0.550033
from selfdrive.car import dbc_dict from cereal import car Ecu = car.CarParams.Ecu class CarControllerParams: ANGLE_DELTA_BP = [0., 5., 15.] ANGLE_DELTA_V = [5., .8, .15] ANGLE_DELTA_VU = [5., 3.5, 0.4] LKAS_MAX_TORQUE = 1 STEER_THRESHOLD = 1.0 class CAR: XTRAIL = "NISSAN X-TRAIL 2017" LEAF = "NISSAN LEAF 2018" LEAF_IC = "NISSAN LEAF 2018 Instrument Cluster" ROGUE = "NISSAN ROGUE 2019" ALTIMA = "NISSAN ALTIMA 2020" FINGERPRINTS = { CAR.XTRAIL: [ { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 548: 8, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 768: 2, 783: 3, 851: 8, 855: 8, 1041: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1111: 4, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 2, 1497: 3, 1821: 8, 1823: 8, 1837: 8, 2015: 8, 2016: 8, 2024: 8 }, { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 527: 1, 548: 8, 637: 4, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 768: 6, 783: 3, 851: 8, 855: 8, 1041: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1111: 4, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 8, 1497: 3, 1534: 6, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 2015: 8, 2016: 8, 2024: 8 }, ], CAR.LEAF: [ { 2: 5, 42: 6, 264: 3, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 724: 6, 758: 3, 761: 2, 783: 3, 852: 8, 853: 8, 856: 8, 861: 8, 944: 1, 976: 6, 1008: 7, 1011: 7, 1057: 3, 1227: 8, 1228: 8, 1261: 5, 1342: 1, 1354: 8, 1361: 8, 1459: 8, 1477: 8, 1497: 3, 1549: 8, 1573: 6, 1821: 8, 1837: 8, 1856: 8, 1859: 8, 1861: 8, 1864: 8, 1874: 8, 1888: 8, 1891: 8, 1893: 8, 1906: 8, 1947: 8, 1949: 8, 1979: 8, 1981: 8, 2016: 8, 2017: 8, 2021: 8, 643: 5, 1792: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8 }, { 2: 5, 42: 8, 264: 3, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 643: 5, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 724: 6, 758: 3, 761: 2, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 783: 3, 852: 8, 853: 8, 856: 8, 861: 8, 943: 8, 944: 1, 976: 6, 1008: 7, 1009: 8, 1010: 8, 1011: 7, 1012: 8, 1013: 8, 1019: 8, 1020: 8, 1021: 8, 1022: 8, 1057: 3, 1227: 8, 1228: 8, 1261: 5, 1342: 1, 1354: 8, 1361: 8, 1402: 8, 1459: 8, 1477: 8, 1497: 3, 1549: 8, 1573: 6, 1821: 8, 1837: 8 }, ], CAR.LEAF_IC: [ { 2: 5, 42: 6, 264: 3, 282: 8, 361: 8, 372: 8, 384: 8, 389: 8, 403: 8, 459: 7, 460: 4, 470: 8, 520: 1, 569: 8, 581: 8, 634: 7, 640: 8, 643: 5, 644: 8, 645: 8, 646: 5, 658: 8, 682: 8, 683: 8, 689: 8, 756: 5, 758: 3, 761: 2, 783: 3, 830: 2, 852: 8, 853: 8, 856: 8, 861: 8, 943: 8, 944: 1, 1001: 6, 1057: 3, 1227: 8, 1228: 8, 1229: 8, 1342: 1, 1354: 8, 1361: 8, 1459: 8, 1477: 8, 1497: 3, 1514: 6, 1549: 8, 1573: 6, 1792: 8, 1821: 8, 1822: 8, 1837: 8, 1838: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8 }, ], CAR.ROGUE: [ { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 520: 2, 523: 6, 548: 8, 634: 7, 643: 5, 645: 8, 658: 8, 665: 8, 666: 8, 674: 2, 682: 8, 683: 8, 689: 8, 723: 8, 758: 3, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 783: 3, 851: 8, 855: 8, 1041: 8, 1042: 8, 1055: 2, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1110: 7, 1111: 7, 1227: 8, 1228: 8, 1247: 4, 1266: 8, 1273: 7, 1342: 1, 1376: 6, 1401: 8, 1474: 2, 1497: 3, 1534: 7, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1839: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8 }, ], CAR.ALTIMA: [ { 2: 5, 42: 6, 346: 6, 347: 5, 348: 8, 349: 7, 361: 8, 386: 8, 389: 8, 397: 8, 398: 8, 403: 8, 438: 8, 451: 8, 517: 8, 520: 2, 522: 8, 523: 6, 539: 8, 541: 7, 542: 8, 543: 8, 544: 8, 545: 8, 546: 8, 547: 8, 548: 8, 570: 8, 576: 8, 577: 8, 582: 8, 583: 8, 584: 8, 586: 8, 587: 8, 588: 8, 589: 8, 590: 8, 591: 8, 592: 8, 600: 8, 601: 8, 610: 8, 611: 8, 612: 8, 614: 8, 615: 8, 616: 8, 617: 8, 622: 8, 623: 8, 634: 7, 638: 8, 645: 8, 648: 5, 654: 6, 658: 8, 659: 8, 660: 8, 661: 8, 665: 8, 666: 8, 674: 2, 675: 8, 676: 8, 682: 8, 683: 8, 684: 8, 685: 8, 686: 8, 687: 8, 689: 8, 690: 8, 703: 8, 708: 7, 709: 7, 711: 7, 712: 7, 713: 7, 714: 8, 715: 8, 716: 8, 717: 7, 718: 7, 719: 7, 720: 7, 723: 8, 726: 7, 727: 7, 728: 7, 735: 8, 746: 8, 748: 6, 749: 6, 750: 8, 758: 3, 772: 8, 773: 6, 774: 7, 775: 8, 776: 6, 777: 7, 778: 6, 779: 7, 781: 7, 782: 7, 783: 3, 851: 8, 855: 5, 1001: 6, 1041: 8, 1042: 8, 1055: 3, 1100: 7, 1104: 4, 1105: 6, 1107: 4, 1108: 8, 1110: 7, 1111: 7, 1144: 7, 1145: 7, 1227: 8, 1228: 8, 1229: 8, 1232: 8, 1247: 4, 1258: 8, 1259: 8, 1266: 8, 1273: 7, 1306: 1, 1314: 8, 1323: 8, 1324: 8, 1342: 1, 1376: 8, 1401: 8, 1454: 8, 1497: 3, 1514: 6, 1526: 8, 1527: 5, 1792: 8, 1821: 8, 1823: 8, 1837: 8, 1872: 8, 1937: 8, 1953: 8, 1968: 8, 1988: 8, 2000: 8, 2001: 8, 2004: 8, 2005: 8, 2015: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8 }, ] } FW_VERSIONS = { CAR.ALTIMA: { (Ecu.fwdCamera, 0x707, None): [ b'284N86CA1D', ], (Ecu.eps, 0x742, None): [ b'6CA2B\xa9A\x02\x02G8A89P90D6A\x00\x00\x01\x80', ], (Ecu.engine, 0x7e0, None): [ b'237109HE2B', ], (Ecu.gateway, 0x18dad0f1, None): [ b'284U29HE0A', ], }, CAR.LEAF_IC: { (Ecu.fwdCamera, 0x707, None): [ b'5SH1BDB\x04\x18\x00\x00\x00\x00\x00_-?\x04\x91\xf2\x00\x00\x00\x80', b'5SK0ADB\x04\x18\x00\x00\x00\x00\x00_(5\x07\x9aQ\x00\x00\x00\x80', ], (Ecu.esp, 0x740, None): [ b'476605SH1D', b'476605SK2A', ], (Ecu.eps, 0x742, None): [ b'5SH2A\x99A\x05\x02N123F\x15\x81\x00\x00\x00\x00\x00\x00\x00\x80', b'5SK3A\x99A\x05\x02N123F\x15u\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.gateway, 0x18dad0f1, None): [ b'284U25SH3A', b'284U25SK2D', ], }, CAR.XTRAIL: { (Ecu.fwdCamera, 0x707, None): [ b'284N86FR2A', ], (Ecu.esp, 0x740, None): [ b'6FU1BD\x11\x02\x00\x02e\x95e\x80iX#\x01\x00\x00\x00\x00\x00\x80', b'6FU0AD\x11\x02\x00\x02e\x95e\x80iQ#\x01\x00\x00\x00\x00\x00\x80', ], (Ecu.eps, 0x742, None): [ b'6FP2A\x99A\x05\x02N123F\x18\x02\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.combinationMeter, 0x743, None): [ b'6FR2A\x18B\x05\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.engine, 0x7e0, None): [ b'6FU9B\xa0A\x06\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80', b'6FR9A\xa0A\x06\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80', ], (Ecu.gateway, 0x18dad0f1, None): [ b'284U26FR0E', ], }, } DBC = { CAR.XTRAIL: dbc_dict('nissan_x_trail_2017', None), CAR.LEAF: dbc_dict('nissan_leaf_2018', None), CAR.LEAF_IC: dbc_dict('nissan_leaf_2018', None), CAR.ROGUE: dbc_dict('nissan_x_trail_2017', None), CAR.ALTIMA: dbc_dict('nissan_x_trail_2017', None), }
true
true
f7001d4c7d2dcb48ce84c4cb33d95e2e8be5f394
608
py
Python
elastic_agent_setup/download.py
MSAdministrator/elastic-agent-setup
5cb8202aaed281f73706556a86657f5525495b56
[ "MIT" ]
1
2022-02-04T16:59:53.000Z
2022-02-04T16:59:53.000Z
elastic_agent_setup/download.py
MSAdministrator/elastic-agent-setup
5cb8202aaed281f73706556a86657f5525495b56
[ "MIT" ]
1
2021-07-21T11:30:13.000Z
2021-11-09T14:30:10.000Z
elastic_agent_setup/download.py
MSAdministrator/elastic-agent-setup
5cb8202aaed281f73706556a86657f5525495b56
[ "MIT" ]
null
null
null
from .core import Core, Settings class Download(Core): host = 'https://artifacts.elastic.co/downloads/beats/elastic-agent/{endpoint}' endpoint = Settings.download_endpoint kwargs = { 'stream': True } def parse_response(self, response): self.__logger.debug('Saving file to download path: {}'.format(Settings.download_path)) with open(Settings.download_path, 'wb+') as f: for chunk in response.raw.stream(1024, decode_content=False): if chunk: f.write(chunk) self.__logger.debug('File saved successfully')
32
94
0.641447
from .core import Core, Settings class Download(Core): host = 'https://artifacts.elastic.co/downloads/beats/elastic-agent/{endpoint}' endpoint = Settings.download_endpoint kwargs = { 'stream': True } def parse_response(self, response): self.__logger.debug('Saving file to download path: {}'.format(Settings.download_path)) with open(Settings.download_path, 'wb+') as f: for chunk in response.raw.stream(1024, decode_content=False): if chunk: f.write(chunk) self.__logger.debug('File saved successfully')
true
true
f7001e1d779abcb0aeb35035bb723969df9248a9
16,479
py
Python
mmdet/models/detectors/csp.py
mohammedshariqnawaz/Pedestron
9785feb94f00e07ae24a662525b4678f12d0fdc8
[ "Apache-2.0" ]
4
2022-03-09T11:34:30.000Z
2022-03-30T07:29:21.000Z
mmdet/models/detectors/csp.py
mohammedshariqnawaz/Pedestron
9785feb94f00e07ae24a662525b4678f12d0fdc8
[ "Apache-2.0" ]
null
null
null
mmdet/models/detectors/csp.py
mohammedshariqnawaz/Pedestron
9785feb94f00e07ae24a662525b4678f12d0fdc8
[ "Apache-2.0" ]
1
2022-03-30T07:29:24.000Z
2022-03-30T07:29:24.000Z
from .single_stage import SingleStageDetector from ..registry import DETECTORS from mmdet.core import bbox2result import torch.nn as nn import torch from .. import builder import numpy as np import cv2 from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler @DETECTORS.register_module class CSP(SingleStageDetector): def __init__(self, backbone, neck, bbox_head, refine_roi_extractor=None, refine_head=None, train_cfg=None, test_cfg=None, pretrained=None, detached=True, return_feature_maps=False): super(CSP, self).__init__(backbone, neck, bbox_head, train_cfg, test_cfg, pretrained) if refine_head is not None: self.refine_roi_extractor = builder.build_roi_extractor( refine_roi_extractor) self.refine_head = builder.build_head(refine_head) self.return_feature_maps = return_feature_maps self.train_cfg = train_cfg self.test_cfg = test_cfg self.detached = detached def show_input_debug(self, img, classification_maps, scale_maps, offset_maps): img_numpy = img.cpu().numpy().copy()[0] # img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] img_numpy = np.transpose(img_numpy, [1, 2, 0]) + [102.9801, 115.9465, 122.7717] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [8, 16, 32, 64, 128] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() # cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][2] cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][:80] scale_numpy = scale_maps[0][i].cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[0][i].cpu().numpy().copy()[0][:2] cs, ys, xs = cls_numpy.nonzero() print(len(ys)) for c, x, y in zip(cs, xs, ys): cv2.imshow(str(c), classification_maps[0][i].cpu().numpy().copy()[0][80+c]) realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy - height/2) top_x = int(realx) down_y = int(realy + height/2) down_x = int(realx) top_left = (int(top_x - height * 0.1), int(top_y)) down_right = (int(down_x + height * 0.1), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 5*int(c)), 2) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) def show_input_debug_caltech(self, img, classification_maps, scale_maps, offset_maps): for j in range(img.shape[0]): img_numpy = img.cpu().numpy().copy()[j] img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [4] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2] ignore_numpy = classification_maps[j][i].cpu().numpy().copy()[0][1] cv2.imshow('ignore', ignore_numpy) scale_numpy = scale_maps[j][i].cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2] ys, xs = cls_numpy.nonzero() print(len(ys)) for x, y in zip(xs, ys): # cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][c]) realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy - height/2) top_x = int(realx) down_y = int(realy + height/2) down_x = int(realx) top_left = (int(top_x - height * 0.1), int(top_y)) down_right = (int(down_x + height * 0.1), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 125), 2) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) def show_input_debug_head(self, img, classification_maps, scale_maps, offset_maps): for j in range(img.shape[0]): img_numpy = img.cpu().numpy().copy()[j] img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [4] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2] ignore_numpy = classification_maps[j][i].cpu().numpy().copy()[0][1] cv2.imshow('ignore', ignore_numpy) scale_numpy = scale_maps[j][i].exp().cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2] ys, xs = cls_numpy.nonzero() for x, y in zip(xs, ys): # cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][c]) realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy) top_x = int(realx) down_y = int(realy + height) down_x = int(realx) top_left = (int(top_x - height * 0.41/2), int(top_y)) down_right = (int(down_x + height * 0.41/2), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 125), 2) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) def show_mot_input_debug(self, img, classification_maps, scale_maps, offset_maps): for j in range(img.shape[0]): img_numpy = img.cpu().numpy().copy()[j] img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] # img_numpy = np.transpose(img_numpy, [1, 2, 0]) + [102.9801, 115.9465, 122.7717] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [4] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() # cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][2] cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2] instance_numpy = classification_maps[j][i].cpu().numpy().copy()[0][3] scale_numpy = scale_maps[j][i].cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2] ys, xs = cls_numpy.nonzero() for x, y in zip(xs, ys): c=0 cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][2]) realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy - height/2) top_x = int(realx) down_y = int(realy + height/2) down_x = int(realx) top_left = (int(top_x - height * 0.1), int(top_y)) down_right = (int(down_x + height * 0.1), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 5*int(c)), 2) instance = instance_numpy[y, x] cv2.putText(img_now, str(instance), top_left, cv2.FONT_HERSHEY_COMPLEX, 1, 255) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) @property def refine(self): return hasattr(self, 'refine_head') and self.refine_head is not None def forward_train(self, img, img_metas, gt_bboxes, gt_labels, gt_bboxes_ignore=None, classification_maps=None, scale_maps=None, offset_maps=None): # for tracking data which batch is produced by dataset instead of data loader if type(img) == list: img=img[0] img_metas=img_metas[0] gt_bboxes=gt_bboxes[0] gt_labels=gt_labels[0] gt_bboxes_ignore = gt_bboxes_ignore[0] classification_maps = classification_maps[0] scale_maps = scale_maps[0] offset_maps = offset_maps[0] losses = dict() x = self.extract_feat(img) # self.show_input_debug(img, classification_maps, scale_maps, offset_maps) # self.show_input_debug_caltech(img, classification_maps, scale_maps, offset_maps) # self.show_mot_input_debug(img, classification_maps, scale_maps, offset_maps) # self.show_input_debug_head(img, classification_maps, scale_maps, offset_maps) outs = self.bbox_head(x) loss_inputs = outs + (gt_bboxes, gt_labels, classification_maps, scale_maps, offset_maps, img_metas, self.train_cfg.csp_head if self.refine else self.train_cfg) losses_bbox = self.bbox_head.loss( *loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) losses.update(losses_bbox) if self.refine: if self.detached: x = tuple([i.detach() for i in x]) bbox_inputs = outs + (img_metas, self.train_cfg.csp_head, False) bbox_list = self.bbox_head.get_bboxes(*bbox_inputs, no_strides=False) # no_strides to not upscale yet bbox_list = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)[0] for det_bboxes, det_labels in bbox_list ] bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner) bbox_sampler = build_sampler( self.train_cfg.rcnn.sampler, context=self) num_imgs = img.size(0) if gt_bboxes_ignore is None: gt_bboxes_ignore = [None for _ in range(num_imgs)] sampling_results = [] for i in range(num_imgs): if bbox_list[i].shape[0] == 0 or gt_bboxes[i].shape[0] == 0: continue bbox = torch.tensor(bbox_list[i]).float().cuda() assign_result = bbox_assigner.assign( bbox, gt_bboxes[i], gt_bboxes_ignore[i], gt_labels[i]) sampling_result = bbox_sampler.sample( assign_result, bbox, gt_bboxes[i], gt_labels[i]) sampling_results.append(sampling_result) samp_list = [res.bboxes for res in sampling_results] if len(samp_list) == 0: losses.update(dict(loss_refine_cls=torch.tensor(0).float().cuda(), acc=torch.tensor(0).float().cuda())) return losses rois = bbox2roi(samp_list).float() if self.refine_head.loss_opinion is not None: pred_scores = torch.cat([torch.tensor(bbox[:, 4]).float().cuda() for bbox in bbox_list], dim=0) pred_rois = bbox2roi([torch.tensor(bbox).float().cuda() for bbox in bbox_list]) pred_feats = self.refine_roi_extractor( x, pred_rois) pred_scores_refine = self.refine_head(pred_feats) loss_opinion = self.refine_head.compute_opinion_loss(pred_scores, pred_scores_refine) losses.update(loss_opinion) bbox_feats = self.refine_roi_extractor( x, rois) cls_score = self.refine_head(bbox_feats) bbox_targets = self.refine_head.get_target( sampling_results, gt_bboxes, gt_labels, self.train_cfg.rcnn) loss_refine = self.refine_head.loss(cls_score, *bbox_targets[:2]) losses.update(dict(loss_refine_cls=loss_refine["loss_cls"], distL1=loss_refine["dist"])) return losses def simple_test_accuracy(self, img, img_meta): gts = img_meta[0]["gts"] x = self.extract_feat(img) if self.detached: x = (x[0].detach(),) rois = bbox2roi(gts) if rois.shape[0] == 0: return 0, 0 roi_feats = self.refine_roi_extractor( x, rois) cls_score = self.refine_head.get_scores(roi_feats) return (cls_score > 0.5).float().sum(), rois.size(0) def simple_test(self, img, img_meta, rescale=False, return_id=False): x = self.extract_feat(img) outs = self.bbox_head(x) bbox_inputs = outs + (img_meta, self.test_cfg.csp_head if self.refine else self.test_cfg, False) # TODO://Handle rescalling if self.return_feature_maps: return self.bbox_head.get_bboxes_features(*bbox_inputs) bbox_list = self.bbox_head.get_bboxes(*bbox_inputs, no_strides=False) im_scale = img_meta[0]["scale_factor"] if "id" in img_meta[0]: img_id = img_meta[0]["id"] else: img_id = 0 if self.refine: if self.detached: x = (x[0].detach(),) bbox_list = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)[0] for det_bboxes, det_labels in bbox_list ] refine_cfg = self.test_cfg.get('rcnn', None) bbox_list = [torch.tensor(bbox).float().cuda() for bbox in bbox_list] rois = bbox2roi(bbox_list) bbox_list = [bbox/im_scale for bbox in bbox_list] if rois.shape[0] == 0: cls_score = None else: roi_feats = self.refine_roi_extractor( x, rois) cls_score = self.refine_head.get_scores(roi_feats) res_buffer = [] if cls_score is not None: if refine_cfg is not None: res_buffer = self.refine_head.suppress_boxes(rois, cls_score, img_meta, cfg=refine_cfg) else: res_buffer = self.refine_head.combine_scores(bbox_list, cls_score) if return_id: return res_buffer, img_id return res_buffer bbox_results = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) for det_bboxes, det_labels in bbox_list ] if return_id: return bbox_results[0], img_id return bbox_results[0] def foward_features(self, features): bbox_list = self.bbox_head.get_bboxes(*features) bbox_results = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) for det_bboxes, det_labels in bbox_list ] return bbox_results[0]
46.948718
168
0.535955
from .single_stage import SingleStageDetector from ..registry import DETECTORS from mmdet.core import bbox2result import torch.nn as nn import torch from .. import builder import numpy as np import cv2 from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler @DETECTORS.register_module class CSP(SingleStageDetector): def __init__(self, backbone, neck, bbox_head, refine_roi_extractor=None, refine_head=None, train_cfg=None, test_cfg=None, pretrained=None, detached=True, return_feature_maps=False): super(CSP, self).__init__(backbone, neck, bbox_head, train_cfg, test_cfg, pretrained) if refine_head is not None: self.refine_roi_extractor = builder.build_roi_extractor( refine_roi_extractor) self.refine_head = builder.build_head(refine_head) self.return_feature_maps = return_feature_maps self.train_cfg = train_cfg self.test_cfg = test_cfg self.detached = detached def show_input_debug(self, img, classification_maps, scale_maps, offset_maps): img_numpy = img.cpu().numpy().copy()[0] img_numpy = np.transpose(img_numpy, [1, 2, 0]) + [102.9801, 115.9465, 122.7717] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [8, 16, 32, 64, 128] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() cls_numpy = classification_maps[0][i].cpu().numpy().copy()[0][:80] scale_numpy = scale_maps[0][i].cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[0][i].cpu().numpy().copy()[0][:2] cs, ys, xs = cls_numpy.nonzero() print(len(ys)) for c, x, y in zip(cs, xs, ys): cv2.imshow(str(c), classification_maps[0][i].cpu().numpy().copy()[0][80+c]) realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy - height/2) top_x = int(realx) down_y = int(realy + height/2) down_x = int(realx) top_left = (int(top_x - height * 0.1), int(top_y)) down_right = (int(down_x + height * 0.1), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 5*int(c)), 2) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) def show_input_debug_caltech(self, img, classification_maps, scale_maps, offset_maps): for j in range(img.shape[0]): img_numpy = img.cpu().numpy().copy()[j] img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [4] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2] ignore_numpy = classification_maps[j][i].cpu().numpy().copy()[0][1] cv2.imshow('ignore', ignore_numpy) scale_numpy = scale_maps[j][i].cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2] ys, xs = cls_numpy.nonzero() print(len(ys)) for x, y in zip(xs, ys): realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy - height/2) top_x = int(realx) down_y = int(realy + height/2) down_x = int(realx) top_left = (int(top_x - height * 0.1), int(top_y)) down_right = (int(down_x + height * 0.1), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 125), 2) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) def show_input_debug_head(self, img, classification_maps, scale_maps, offset_maps): for j in range(img.shape[0]): img_numpy = img.cpu().numpy().copy()[j] img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [4] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2] ignore_numpy = classification_maps[j][i].cpu().numpy().copy()[0][1] cv2.imshow('ignore', ignore_numpy) scale_numpy = scale_maps[j][i].exp().cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2] ys, xs = cls_numpy.nonzero() for x, y in zip(xs, ys): realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy) top_x = int(realx) down_y = int(realy + height) down_x = int(realx) top_left = (int(top_x - height * 0.41/2), int(top_y)) down_right = (int(down_x + height * 0.41/2), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 125), 2) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) def show_mot_input_debug(self, img, classification_maps, scale_maps, offset_maps): for j in range(img.shape[0]): img_numpy = img.cpu().numpy().copy()[j] img_numpy = np.transpose(img_numpy, [1, 2, 0]) * [58.395, 57.12, 57.375] + [123.675, 116.28, 103.53] img_numpy = img_numpy[:, :, ::-1] img_numpy = img_numpy.astype(np.uint8) strides = [4] img_nows = [] for i, stride in enumerate(strides): img_now = img_numpy.copy() cls_numpy = classification_maps[j][i].cpu().numpy().copy()[0][2] instance_numpy = classification_maps[j][i].cpu().numpy().copy()[0][3] scale_numpy = scale_maps[j][i].cpu().numpy().copy()[0][0] * stride offset_numpy = offset_maps[j][i].cpu().numpy().copy()[0][:2] ys, xs = cls_numpy.nonzero() for x, y in zip(xs, ys): c=0 cv2.imshow(str(c), classification_maps[j][i].cpu().numpy().copy()[0][2]) realx = x realy = y height = scale_numpy[y, x] realy = realy + 0.5 + offset_numpy[0][y, x] realx = realx + 0.5 + offset_numpy[1][y, x] realy = realy * stride realx = realx * stride top_y = int(realy - height/2) top_x = int(realx) down_y = int(realy + height/2) down_x = int(realx) top_left = (int(top_x - height * 0.1), int(top_y)) down_right = (int(down_x + height * 0.1), down_y) cv2.rectangle(img_now, top_left, down_right, (255, 255, 5*int(c)), 2) instance = instance_numpy[y, x] cv2.putText(img_now, str(instance), top_left, cv2.FONT_HERSHEY_COMPLEX, 1, 255) img_nows.append(img_now) cv2.imshow(str(i) +'img', img_now) cv2.waitKey(0) @property def refine(self): return hasattr(self, 'refine_head') and self.refine_head is not None def forward_train(self, img, img_metas, gt_bboxes, gt_labels, gt_bboxes_ignore=None, classification_maps=None, scale_maps=None, offset_maps=None): if type(img) == list: img=img[0] img_metas=img_metas[0] gt_bboxes=gt_bboxes[0] gt_labels=gt_labels[0] gt_bboxes_ignore = gt_bboxes_ignore[0] classification_maps = classification_maps[0] scale_maps = scale_maps[0] offset_maps = offset_maps[0] losses = dict() x = self.extract_feat(img) outs = self.bbox_head(x) loss_inputs = outs + (gt_bboxes, gt_labels, classification_maps, scale_maps, offset_maps, img_metas, self.train_cfg.csp_head if self.refine else self.train_cfg) losses_bbox = self.bbox_head.loss( *loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) losses.update(losses_bbox) if self.refine: if self.detached: x = tuple([i.detach() for i in x]) bbox_inputs = outs + (img_metas, self.train_cfg.csp_head, False) bbox_list = self.bbox_head.get_bboxes(*bbox_inputs, no_strides=False) bbox_list = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)[0] for det_bboxes, det_labels in bbox_list ] bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner) bbox_sampler = build_sampler( self.train_cfg.rcnn.sampler, context=self) num_imgs = img.size(0) if gt_bboxes_ignore is None: gt_bboxes_ignore = [None for _ in range(num_imgs)] sampling_results = [] for i in range(num_imgs): if bbox_list[i].shape[0] == 0 or gt_bboxes[i].shape[0] == 0: continue bbox = torch.tensor(bbox_list[i]).float().cuda() assign_result = bbox_assigner.assign( bbox, gt_bboxes[i], gt_bboxes_ignore[i], gt_labels[i]) sampling_result = bbox_sampler.sample( assign_result, bbox, gt_bboxes[i], gt_labels[i]) sampling_results.append(sampling_result) samp_list = [res.bboxes for res in sampling_results] if len(samp_list) == 0: losses.update(dict(loss_refine_cls=torch.tensor(0).float().cuda(), acc=torch.tensor(0).float().cuda())) return losses rois = bbox2roi(samp_list).float() if self.refine_head.loss_opinion is not None: pred_scores = torch.cat([torch.tensor(bbox[:, 4]).float().cuda() for bbox in bbox_list], dim=0) pred_rois = bbox2roi([torch.tensor(bbox).float().cuda() for bbox in bbox_list]) pred_feats = self.refine_roi_extractor( x, pred_rois) pred_scores_refine = self.refine_head(pred_feats) loss_opinion = self.refine_head.compute_opinion_loss(pred_scores, pred_scores_refine) losses.update(loss_opinion) bbox_feats = self.refine_roi_extractor( x, rois) cls_score = self.refine_head(bbox_feats) bbox_targets = self.refine_head.get_target( sampling_results, gt_bboxes, gt_labels, self.train_cfg.rcnn) loss_refine = self.refine_head.loss(cls_score, *bbox_targets[:2]) losses.update(dict(loss_refine_cls=loss_refine["loss_cls"], distL1=loss_refine["dist"])) return losses def simple_test_accuracy(self, img, img_meta): gts = img_meta[0]["gts"] x = self.extract_feat(img) if self.detached: x = (x[0].detach(),) rois = bbox2roi(gts) if rois.shape[0] == 0: return 0, 0 roi_feats = self.refine_roi_extractor( x, rois) cls_score = self.refine_head.get_scores(roi_feats) return (cls_score > 0.5).float().sum(), rois.size(0) def simple_test(self, img, img_meta, rescale=False, return_id=False): x = self.extract_feat(img) outs = self.bbox_head(x) bbox_inputs = outs + (img_meta, self.test_cfg.csp_head if self.refine else self.test_cfg, False) if self.return_feature_maps: return self.bbox_head.get_bboxes_features(*bbox_inputs) bbox_list = self.bbox_head.get_bboxes(*bbox_inputs, no_strides=False) im_scale = img_meta[0]["scale_factor"] if "id" in img_meta[0]: img_id = img_meta[0]["id"] else: img_id = 0 if self.refine: if self.detached: x = (x[0].detach(),) bbox_list = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)[0] for det_bboxes, det_labels in bbox_list ] refine_cfg = self.test_cfg.get('rcnn', None) bbox_list = [torch.tensor(bbox).float().cuda() for bbox in bbox_list] rois = bbox2roi(bbox_list) bbox_list = [bbox/im_scale for bbox in bbox_list] if rois.shape[0] == 0: cls_score = None else: roi_feats = self.refine_roi_extractor( x, rois) cls_score = self.refine_head.get_scores(roi_feats) res_buffer = [] if cls_score is not None: if refine_cfg is not None: res_buffer = self.refine_head.suppress_boxes(rois, cls_score, img_meta, cfg=refine_cfg) else: res_buffer = self.refine_head.combine_scores(bbox_list, cls_score) if return_id: return res_buffer, img_id return res_buffer bbox_results = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) for det_bboxes, det_labels in bbox_list ] if return_id: return bbox_results[0], img_id return bbox_results[0] def foward_features(self, features): bbox_list = self.bbox_head.get_bboxes(*features) bbox_results = [ bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) for det_bboxes, det_labels in bbox_list ] return bbox_results[0]
true
true
f7001e81c88f6887d923756d3bef3ec22c9f9ddb
3,110
py
Python
examples/rfm9x_simpletest.py
lgnashold/Adafruit_CircuitPython_RFM9x
9350b5d151444499ff022969d9299c8a202d8b7f
[ "MIT" ]
null
null
null
examples/rfm9x_simpletest.py
lgnashold/Adafruit_CircuitPython_RFM9x
9350b5d151444499ff022969d9299c8a202d8b7f
[ "MIT" ]
null
null
null
examples/rfm9x_simpletest.py
lgnashold/Adafruit_CircuitPython_RFM9x
9350b5d151444499ff022969d9299c8a202d8b7f
[ "MIT" ]
null
null
null
# Simple demo of sending and recieving data with the RFM95 LoRa radio. # Author: Tony DiCola import board import busio import digitalio import adafruit_rfm9x # Define radio parameters. RADIO_FREQ_MHZ = 915.0 # Frequency of the radio in Mhz. Must match your # module! Can be a value like 915.0, 433.0, etc. # Define pins connected to the chip, use these if wiring up the breakout according to the guide: CS = digitalio.DigitalInOut(board.D5) RESET = digitalio.DigitalInOut(board.D6) # Or uncomment and instead use these if using a Feather M0 RFM9x board and the appropriate # CircuitPython build: # CS = digitalio.DigitalInOut(board.RFM9X_CS) # RESET = digitalio.DigitalInOut(board.RFM9X_RST) # Define the onboard LED LED = digitalio.DigitalInOut(board.D13) LED.direction = digitalio.Direction.OUTPUT # Initialize SPI bus. spi = busio.SPI(board.SCK, MOSI=board.MOSI, MISO=board.MISO) # Initialze RFM radio rfm9x = adafruit_rfm9x.RFM9x(spi, CS, RESET, RADIO_FREQ_MHZ) # Note that the radio is configured in LoRa mode so you can't control sync # word, encryption, frequency deviation, or other settings! # You can however adjust the transmit power (in dB). The default is 13 dB but # high power radios like the RFM95 can go up to 23 dB: rfm9x.tx_power = 23 # Send a packet. Note you can only send a packet up to 252 bytes in length. # This is a limitation of the radio packet size, so if you need to send larger # amounts of data you will need to break it into smaller send calls. Each send # call will wait for the previous one to finish before continuing. rfm9x.send(bytes("Hello world!\r\n", "utf-8")) print("Sent Hello World message!") # Wait to receive packets. Note that this library can't receive data at a fast # rate, in fact it can only receive and process one 252 byte packet at a time. # This means you should only use this for low bandwidth scenarios, like sending # and receiving a single message at a time. print("Waiting for packets...") while True: packet = rfm9x.receive() # Optionally change the receive timeout from its default of 0.5 seconds: # packet = rfm9x.receive(timeout=5.0) # If no packet was received during the timeout then None is returned. if packet is None: # Packet has not been received LED.value = False print("Received nothing! Listening again...") else: # Received a packet! LED.value = True # Print out the raw bytes of the packet: print("Received (raw bytes): {0}".format(packet)) # And decode to ASCII text and print it too. Note that you always # receive raw bytes and need to convert to a text format like ASCII # if you intend to do string processing on your data. Make sure the # sending side is sending ASCII data before you try to decode! packet_text = str(packet, "ascii") print("Received (ASCII): {0}".format(packet_text)) # Also read the RSSI (signal strength) of the last received message and # print it. rssi = rfm9x.last_rssi print("Received signal strength: {0} dB".format(rssi))
40.921053
96
0.717685
import board import busio import digitalio import adafruit_rfm9x RADIO_FREQ_MHZ = 915.0 CS = digitalio.DigitalInOut(board.D5) RESET = digitalio.DigitalInOut(board.D6) LED = digitalio.DigitalInOut(board.D13) LED.direction = digitalio.Direction.OUTPUT spi = busio.SPI(board.SCK, MOSI=board.MOSI, MISO=board.MISO) rfm9x = adafruit_rfm9x.RFM9x(spi, CS, RESET, RADIO_FREQ_MHZ) # word, encryption, frequency deviation, or other settings! # You can however adjust the transmit power (in dB). The default is 13 dB but # high power radios like the RFM95 can go up to 23 dB: rfm9x.tx_power = 23 # Send a packet. Note you can only send a packet up to 252 bytes in length. # This is a limitation of the radio packet size, so if you need to send larger # amounts of data you will need to break it into smaller send calls. Each send # call will wait for the previous one to finish before continuing. rfm9x.send(bytes("Hello world!\r\n", "utf-8")) print("Sent Hello World message!") # Wait to receive packets. Note that this library can't receive data at a fast print("Waiting for packets...") while True: packet = rfm9x.receive() if packet is None: LED.value = False print("Received nothing! Listening again...") else: LED.value = True print("Received (raw bytes): {0}".format(packet)) packet_text = str(packet, "ascii") print("Received (ASCII): {0}".format(packet_text)) rssi = rfm9x.last_rssi print("Received signal strength: {0} dB".format(rssi))
true
true
f7001ef73a819ffe5a62565d38c655c3fc71664d
13,574
py
Python
clinical_outcome.py
MichaelAllen1966/stroke_outcome_algorithm
99050bf4e0b19c38c8973fe10234fee4f230a172
[ "MIT" ]
null
null
null
clinical_outcome.py
MichaelAllen1966/stroke_outcome_algorithm
99050bf4e0b19c38c8973fe10234fee4f230a172
[ "MIT" ]
null
null
null
clinical_outcome.py
MichaelAllen1966/stroke_outcome_algorithm
99050bf4e0b19c38c8973fe10234fee4f230a172
[ "MIT" ]
null
null
null
""" Class to hold clinical outcome model. Predicts probability of good outcome of patient(s) or group(s) of patients. Call `calculate_outcome_for_all(args)` from outside of the object Inputs ====== All inputs take np arrays (for multiple groups of patients). mimic: proportion of patients with stroke mimic ich: proportion of patients with intracerebral haemorrhage (ICH). Or probability of a patient having an ICH, when using for a single patient. nlvo: proportion of patients with non-large vessel occlusions (nLVO). Or probability of a patient having an NLVO, when using for a single patient. lvo: proportion of patients with large vessel occlusions (LVO). Or probability of a patient having a LVO, when using for a single patient. onset_to_needle: minutes from onset to thrombolysis onset_to_ouncture: minutes from onset to thrombectomy nlvo_eligible_for_treatment: proportion of patients with NLVO suitable for treatment with thrombolysis. Or probability of a patient with NVLO being eligible for treatment. lvo_eligible_for_treatment: proportion of patients with LVO suitable for treatment with thrombolysis and/or thrombectomy. Or probability of a patient with LVO being eligible for treatment. Returns ======= Probability of good outcome: The probability of having a good outcome (modified Rankin Scale 0-1) for the patient or group of patients (np array). References for decay of effect of thrombolysis and thrombectomy =============================================================== Decay of effect of thrombolysis without image selection of patients taken from: Emberson, Jonathan, Kennedy R. Lees, Patrick Lyden, Lisa Blackwell, Gregory Albers, Erich Bluhmki, Thomas Brott, et al (2014). “Effect of Treatment Delay, Age, and Stroke Severity on the Effects of Intravenous Thrombolysis with Alteplase for Acute Ischaemic Stroke: A Meta-Analysis of Individual Patient Data from Randomised Trials.” The Lancet 384: 1929–1935. https://doi.org/10.1016/S0140-6736(14)60584-5. * Time to no effect = 6.3hrs Decay of effect of thrombectomy without image selection of patients taken from: Fransen, Puck S. S., Olvert A. Berkhemer, Hester F. Lingsma, Debbie Beumer, Lucie A. van den Berg, Albert J. Yoo, Wouter J. Schonewille, et al. (2016) “Time to Reperfusion and Treatment Effect for Acute Ischemic Stroke: A Randomized Clinical Trial.” JAMA Neurology 73: 190–96. https://doi.org/10.1001/jamaneurol.2015.3886. * Time to no effect = 8hrs """ import numpy as np import pandas as pd class Clinical_outcome: def __init__(self): """Constructor for clinical outcome model """ self.name = "Clinical outcome model" self.thrombectomy_time_no_effect = 8 * 60 self.thrombolysis_time_no_effect = 6.3 * 60 self.maximum_permitted_time_to_thrombectomy = 360 self.maximum_permitted_time_to_thrombolysis = 270 def calculate_outcome_for_all(self, mimic, ich, nlvo, lvo, onset_to_needle, onset_to_puncture, nlvo_eligible_for_treatment, lvo_eligible_for_treatment, prop_thrombolysed_lvo_receiving_thrombectomy): """ Calculates the probability of good outcome for all patients admitted with acute stroke. Based on: Holodinsky JK, Williamson TS, Demchuk AM, et al. Modeling Stroke Patient Transport for All Patients With Suspected Large-Vessel Occlusion. JAMA Neurol. 2018;75(12):1477-1486. doi:10.1001/jamaneurol.2018.2424 Sums outcomes for: 1) mimics 2) ICH 3) non-LVO 4) LVO treated with thrombolysis 5) LVO treated with thrombectomy (if thrombolysis not successful in a drip and ship configuration) arguments --------- np arrays (each row is a given geographic area with different characteristics) mimic: proportion of patients with stroke mimic ich: proportion of patients with ICH nlvo: proportion of patients with non-lvo lvo: proportion of patients with lvo onset_to_needle: minutes from onset to thrombolysis onset_to_ounctureL minutes from onset to thrombectomy nlvo_eligible_for_treatment: proportion of nlvo suitable for treatment lvo_eligible_for_treatment: proportion of lvo suitable for treatment returns ------- probability of good outcome for all (np array) """ # Get outcomes # ------------ outcomes = pd.DataFrame() # Calculate good outcomes for mimics outcomes['mimic'] = self._calculate_outcome_for_stroke_mimics( mimic.shape) # Calculate good outcomes for ich outcomes['ich'] = self._calculate_outcome_for_ICH(mimic.shape) # Calculate good outcomes for nlvo without treatment outcomes['nlvo_base'] = \ np.full(nlvo.shape, 0.4622) # Calculate good outcomes for nlvo with thrombolysis outcomes['nlvo_add_ivt'] = \ self._calculate_thrombolysis_outcome_for_nlvo(onset_to_needle) # Calculate good outcomes for lvo without treatment outcomes['lvo_base'] = \ np.full(nlvo.shape, 0.1328) # Calculate good outcomes for lvo with thrombolysis outcomes['lvo_add_ivt'] = \ self._calculate_thrombolysis_outcome_for_lvo(onset_to_needle) # Calculate good outcomes for lvo with thrombolysis outcomes['lvo_add_et'] = \ self._calculate_thrombectomy_outcome_for_lvo(onset_to_puncture) # Weight outcome results by proportion of patients # ------------------------------------------------ # 'Results' are good outcomes results = pd.DataFrame() # Results for mimic results['mimic'] = outcomes['mimic'] * mimic # Results for ich results['ich'] = outcomes['ich'] * ich # Results for nlvo results['nlvo_base'] = nlvo * outcomes['nlvo_base'] results['nlvo_ivt'] = \ nlvo * outcomes['nlvo_add_ivt'] * nlvo_eligible_for_treatment # Results for lvo results['lvo_base'] = lvo * outcomes['lvo_base'] results['lvo_ivt'] = \ lvo * outcomes['lvo_add_ivt'] * lvo_eligible_for_treatment # Adjust thrombectomy/thrombolysis ratio for LVO # Reduce thrombectomy treatment by LVO responding to IVT lvo_receiving_et = ((lvo * lvo_eligible_for_treatment * prop_thrombolysed_lvo_receiving_thrombectomy) - results['lvo_ivt']) results['lvo_et'] = lvo_receiving_et * outcomes['lvo_add_et'] p_good = results.sum(axis=1).values return p_good @staticmethod def _calculate_outcome_for_ICH(array_shape): """ Calculates the probability of good outcome for patients with intra- cranial haemorrhage (ICH). Sets all values to 0.24 Based on Holodinsky et al. (2018) Drip-and-Ship vs. Mothership: Modelling Stroke Patient Transport for All Suspected Large Vessel Occlusion Patients. JAMA Neuro (in press) arguments --------- array size returns ------- probability of good outcome for ICH (np array) """ # Create an array of required length and set all values to 0.24 p_good = np.zeros(array_shape) p_good[:] = 0.24 return p_good @staticmethod def _calculate_outcome_for_stroke_mimics(array_shape): """ Calculates the probability of good outcome for patients with stroke mimic Sets all values to 1 Based on Holodinsky et al. (2018) Drip-and-Ship vs. Mothership: Modelling Stroke Patient Transport for All Suspected Large Vessel Occlusion Patients. JAMA Neuro (in press) arguments --------- array size returns ------- probability of good outcome for stroke mimiccs (np array) """ # Create an array of required length and set all values to 0.9 p_good = np.zeros(array_shape) p_good[:] = 1 return p_good def _calculate_thrombectomy_outcome_for_lvo(self, onset_to_puncture): """ Calculates the probability of additional good outcome for LVO patients receiving thrombectomy. arguments --------- onset_to_puncture : np array in minutes returns ------- probability of additional good outcome if given thrombectomy (np array) """ p_good_max = 0.5208 p_good_min = 0.1328 # Convert probability to odds odds_good_max = p_good_max / (1 - p_good_max) odds_good_min = p_good_min / (1 - p_good_min) # Calculate fraction of effective time used fraction_max_effect_time_used = \ onset_to_puncture / self.thrombectomy_time_no_effect # Calculate odds of good outcome with treatment odds_good = np.exp(np.log(odds_good_max) - ((np.log(odds_good_max) - np.log(odds_good_min)) * fraction_max_effect_time_used)) # Convert odds to probability prob_good = odds_good / (1 + odds_good) prob_good[prob_good < p_good_min] = p_good_min # Calculate probability of additional good outcome p_good_add = prob_good - p_good_min # Set additional good outcomes to zero if past permitted treatment time mask = onset_to_puncture > self.maximum_permitted_time_to_thrombectomy p_good_add[mask] = 0 # Ensure no negative outcomes mask = p_good_add < 0 p_good_add[mask] = 0 return p_good_add def _calculate_thrombolysis_outcome_for_lvo(self, onset_to_needle): """ Calculates the probability of additional good outcome for LVO patients receiving thrombolysis. Does not include baseline untreated good comes. arguments --------- onset_to_needle : np array in minutes returns ------- probability of additional good outcome if given thrombolysis (np array) """ p_good_max = 0.2441 p_good_min = 0.1328 # Convert probability to odds odds_good_max = p_good_max / (1 - p_good_max) odds_good_min = p_good_min / (1 - p_good_min) # Calculate fraction of effective time used fraction_max_effect_time_used = \ onset_to_needle / self.thrombolysis_time_no_effect # Calculate odds of good outcome with treatment odds_good = np.exp(np.log(odds_good_max) - ((np.log(odds_good_max) - np.log(odds_good_min)) * fraction_max_effect_time_used)) # Convert odds to probability prob_good = odds_good / (1 + odds_good) prob_good[prob_good < p_good_min] = p_good_min # Calculate probability of additional good outcome p_good_add = prob_good - p_good_min # Set additional good outcomes to zero if past permitted treatment time mask = onset_to_needle> self.maximum_permitted_time_to_thrombolysis p_good_add[mask] = 0 # Ensure no negative outcomes mask = p_good_add < 0 p_good_add[mask] = 0 # return outcome and proportion of treated who respond return p_good_add def _calculate_thrombolysis_outcome_for_nlvo(self, onset_to_needle): """ Calculates the probability of good outcome for non-LVO patients receiving thrombolysis. arguments --------- onset_to_needle : np array in minutes returns ------- probability of good outcome if given thrombolysis (np array) """ p_good_max = 0.6444 p_good_min = 0.4622 # Convert probability to odds odds_good_max = p_good_max / (1 - p_good_max) odds_good_min = p_good_min / (1 - p_good_min) # Calculate fraction of effective time used fraction_max_effect_time_used = (onset_to_needle / self.thrombolysis_time_no_effect) # Calculate odds of good outcome with treatment odds_good = np.exp(np.log(odds_good_max) - ((np.log(odds_good_max) - np.log(odds_good_min)) * fraction_max_effect_time_used)) # Convert odds to probability prob_good = odds_good / (1 + odds_good) prob_good[prob_good < p_good_min] = p_good_min # Calculate probability of additional good outcome p_good_add = prob_good - p_good_min mask = onset_to_needle> self.maximum_permitted_time_to_thrombolysis p_good_add[mask] = 0 # Ensure no negative outcomes mask = p_good_add < 0 p_good_add[mask] = 0 # return outcome and proportion of treated who respond return p_good_add
33.516049
80
0.627229
import numpy as np import pandas as pd class Clinical_outcome: def __init__(self): self.name = "Clinical outcome model" self.thrombectomy_time_no_effect = 8 * 60 self.thrombolysis_time_no_effect = 6.3 * 60 self.maximum_permitted_time_to_thrombectomy = 360 self.maximum_permitted_time_to_thrombolysis = 270 def calculate_outcome_for_all(self, mimic, ich, nlvo, lvo, onset_to_needle, onset_to_puncture, nlvo_eligible_for_treatment, lvo_eligible_for_treatment, prop_thrombolysed_lvo_receiving_thrombectomy): outcomes = pd.DataFrame() outcomes['mimic'] = self._calculate_outcome_for_stroke_mimics( mimic.shape) outcomes['ich'] = self._calculate_outcome_for_ICH(mimic.shape) outcomes['nlvo_base'] = \ np.full(nlvo.shape, 0.4622) outcomes['nlvo_add_ivt'] = \ self._calculate_thrombolysis_outcome_for_nlvo(onset_to_needle) outcomes['lvo_base'] = \ np.full(nlvo.shape, 0.1328) outcomes['lvo_add_ivt'] = \ self._calculate_thrombolysis_outcome_for_lvo(onset_to_needle) outcomes['lvo_add_et'] = \ self._calculate_thrombectomy_outcome_for_lvo(onset_to_puncture) results = pd.DataFrame() results['mimic'] = outcomes['mimic'] * mimic results['ich'] = outcomes['ich'] * ich results['nlvo_base'] = nlvo * outcomes['nlvo_base'] results['nlvo_ivt'] = \ nlvo * outcomes['nlvo_add_ivt'] * nlvo_eligible_for_treatment results['lvo_base'] = lvo * outcomes['lvo_base'] results['lvo_ivt'] = \ lvo * outcomes['lvo_add_ivt'] * lvo_eligible_for_treatment lvo_receiving_et = ((lvo * lvo_eligible_for_treatment * prop_thrombolysed_lvo_receiving_thrombectomy) - results['lvo_ivt']) results['lvo_et'] = lvo_receiving_et * outcomes['lvo_add_et'] p_good = results.sum(axis=1).values return p_good @staticmethod def _calculate_outcome_for_ICH(array_shape): p_good = np.zeros(array_shape) p_good[:] = 0.24 return p_good @staticmethod def _calculate_outcome_for_stroke_mimics(array_shape): p_good = np.zeros(array_shape) p_good[:] = 1 return p_good def _calculate_thrombectomy_outcome_for_lvo(self, onset_to_puncture): p_good_max = 0.5208 p_good_min = 0.1328 odds_good_max = p_good_max / (1 - p_good_max) odds_good_min = p_good_min / (1 - p_good_min) fraction_max_effect_time_used = \ onset_to_puncture / self.thrombectomy_time_no_effect odds_good = np.exp(np.log(odds_good_max) - ((np.log(odds_good_max) - np.log(odds_good_min)) * fraction_max_effect_time_used)) prob_good = odds_good / (1 + odds_good) prob_good[prob_good < p_good_min] = p_good_min p_good_add = prob_good - p_good_min mask = onset_to_puncture > self.maximum_permitted_time_to_thrombectomy p_good_add[mask] = 0 mask = p_good_add < 0 p_good_add[mask] = 0 return p_good_add def _calculate_thrombolysis_outcome_for_lvo(self, onset_to_needle): p_good_max = 0.2441 p_good_min = 0.1328 odds_good_max = p_good_max / (1 - p_good_max) odds_good_min = p_good_min / (1 - p_good_min) fraction_max_effect_time_used = \ onset_to_needle / self.thrombolysis_time_no_effect odds_good = np.exp(np.log(odds_good_max) - ((np.log(odds_good_max) - np.log(odds_good_min)) * fraction_max_effect_time_used)) prob_good = odds_good / (1 + odds_good) prob_good[prob_good < p_good_min] = p_good_min p_good_add = prob_good - p_good_min mask = onset_to_needle> self.maximum_permitted_time_to_thrombolysis p_good_add[mask] = 0 mask = p_good_add < 0 p_good_add[mask] = 0 return p_good_add def _calculate_thrombolysis_outcome_for_nlvo(self, onset_to_needle): p_good_max = 0.6444 p_good_min = 0.4622 odds_good_max = p_good_max / (1 - p_good_max) odds_good_min = p_good_min / (1 - p_good_min) fraction_max_effect_time_used = (onset_to_needle / self.thrombolysis_time_no_effect) odds_good = np.exp(np.log(odds_good_max) - ((np.log(odds_good_max) - np.log(odds_good_min)) * fraction_max_effect_time_used)) prob_good = odds_good / (1 + odds_good) prob_good[prob_good < p_good_min] = p_good_min p_good_add = prob_good - p_good_min mask = onset_to_needle> self.maximum_permitted_time_to_thrombolysis p_good_add[mask] = 0 mask = p_good_add < 0 p_good_add[mask] = 0 return p_good_add
true
true
f7001f284c5642d54b622cdec48cd3926702afb2
2,137
py
Python
data/train/python/f7001f284c5642d54b622cdec48cd3926702afb2urls.py
harshp8l/deep-learning-lang-detection
2a54293181c1c2b1a2b840ddee4d4d80177efb33
[ "MIT" ]
84
2017-10-25T15:49:21.000Z
2021-11-28T21:25:54.000Z
data/train/python/f7001f284c5642d54b622cdec48cd3926702afb2urls.py
vassalos/deep-learning-lang-detection
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
[ "MIT" ]
5
2015-01-05T07:31:25.000Z
2021-01-03T00:56:57.000Z
data/train/python/f7001f284c5642d54b622cdec48cd3926702afb2urls.py
vassalos/deep-learning-lang-detection
cbb00b3e81bed3a64553f9c6aa6138b2511e544e
[ "MIT" ]
24
2017-11-22T08:31:00.000Z
2022-03-27T01:22:31.000Z
# -*- coding: utf-8 -*- from django.conf.urls.defaults import patterns, url from djangopypi.feeds import ReleaseFeed urlpatterns = patterns("djangopypi.views", url(r'^$', "root", name="djangopypi-root"), url(r'^packages/$','packages.index', name='djangopypi-package-index'), url(r'^simple/$','packages.simple_index', name='djangopypi-package-index-simple'), url(r'^search/$','packages.search',name='djangopypi-search'), url(r'^pypi/$', 'root', name='djangopypi-release-index'), url(r'^rss/$', ReleaseFeed(), name='djangopypi-rss'), url(r'^simple/(?P<package>[\w\d_\.\-]+)/$','packages.simple_details', name='djangopypi-package-simple'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/$','packages.details', name='djangopypi-package'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/rss/$', ReleaseFeed(), name='djangopypi-package-rss'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/doap.rdf$','packages.doap', name='djangopypi-package-doap'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/$','packages.manage', name='djangopypi-package-manage'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/versions/$','packages.manage_versions', name='djangopypi-package-manage-versions'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/$', 'releases.details',name='djangopypi-release'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/doap.rdf$', 'releases.doap',name='djangopypi-release-doap'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/manage/$', 'releases.manage',name='djangopypi-release-manage'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/metadata/$', 'releases.manage_metadata',name='djangopypi-release-manage-metadata'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/$', 'releases.manage_files',name='djangopypi-release-manage-files'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/upload/$', 'releases.upload_file',name='djangopypi-release-upload-file'), )
54.794872
88
0.595227
from django.conf.urls.defaults import patterns, url from djangopypi.feeds import ReleaseFeed urlpatterns = patterns("djangopypi.views", url(r'^$', "root", name="djangopypi-root"), url(r'^packages/$','packages.index', name='djangopypi-package-index'), url(r'^simple/$','packages.simple_index', name='djangopypi-package-index-simple'), url(r'^search/$','packages.search',name='djangopypi-search'), url(r'^pypi/$', 'root', name='djangopypi-release-index'), url(r'^rss/$', ReleaseFeed(), name='djangopypi-rss'), url(r'^simple/(?P<package>[\w\d_\.\-]+)/$','packages.simple_details', name='djangopypi-package-simple'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/$','packages.details', name='djangopypi-package'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/rss/$', ReleaseFeed(), name='djangopypi-package-rss'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/doap.rdf$','packages.doap', name='djangopypi-package-doap'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/$','packages.manage', name='djangopypi-package-manage'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/manage/versions/$','packages.manage_versions', name='djangopypi-package-manage-versions'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/$', 'releases.details',name='djangopypi-release'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/doap.rdf$', 'releases.doap',name='djangopypi-release-doap'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/manage/$', 'releases.manage',name='djangopypi-release-manage'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/metadata/$', 'releases.manage_metadata',name='djangopypi-release-manage-metadata'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/$', 'releases.manage_files',name='djangopypi-release-manage-files'), url(r'^pypi/(?P<package>[\w\d_\.\-]+)/(?P<version>[\w\d_\.\-]+)/files/upload/$', 'releases.upload_file',name='djangopypi-release-upload-file'), )
true
true
f7001f45079e3103298a8ceb0386c7b776820464
129,647
py
Python
examples/earnings_surprises/earnings-converter.py
brettelliot/event-study
cffc6a80dbc4b33e68e863488428996af51cc991
[ "MIT" ]
2
2019-10-20T15:53:20.000Z
2020-05-30T22:17:20.000Z
examples/earnings_surprises/earnings-converter.py
brettelliot/event-study
cffc6a80dbc4b33e68e863488428996af51cc991
[ "MIT" ]
2
2021-03-31T18:50:01.000Z
2021-06-01T22:11:55.000Z
examples/earnings_surprises/earnings-converter.py
brettelliot/event-study
cffc6a80dbc4b33e68e863488428996af51cc991
[ "MIT" ]
1
2021-07-18T05:22:45.000Z
2021-07-18T05:22:45.000Z
import pandas as pd from pandas.compat import StringIO import numpy numpy.set_printoptions(threshold=numpy.nan) def main(): df = pd.read_csv(StringIO(earnings), sep=",", header=None, names=['symbol', 'exchange', 'eps_pct_diff_surp', 'asof_date']) df = df.sort_values(by=['asof_date']) print(df.head()) print(len(df)) df.to_csv('../../data/events/nyse_earnings_surprises_2013.csv', index=False) myString = ', '.join('"{0}"'.format(s) for s in df.symbol.unique()) myString = myString.replace(" ", "") print(myString) #earnings = 'CFN, NYSE, -21.82, 2013-02-09\nNDZ, NYSE, 30.77, 2013-01-29\nAZZ, NYSE, -1.64, 2013-01-10' earnings = 'CFN, NYSE, -21.82, 2013-02-09\n NDZ, NYSE, 30.77, 2013-01-29\n AZZ, NYSE, -1.64, 2013-01-10\n CLC, NYSE, 2.86, 2013-01-17\n CMC, NYSE, 64.71, 2013-01-08\n FC, NYSE, 15.38, 2013-01-04\n FDO, NYSE, -6.76, 2013-01-04\n FUL, NYSE, 14.29, 2013-01-17\n LEN, NYSE, 30.23, 2013-01-16\n LNN, NYSE, 53.33, 2013-01-09\n MKC, NYSE, -3.48, 2013-01-25\n RT, NYSE, 0.00, 2013-01-10\n MSM, NYSE, 1.00, 2013-01-11\n RPM, NYSE, -4.76, 2013-01-09\n SVU, NYSE, -50.00, 2013-01-11\n TISI, NYSE, 10.00, 2013-01-08\n TXI, NYSE, -5.88, 2013-01-10\n UNF, NYSE, 15.79, 2013-01-04\n WOR, NYSE, 12.20, 2013-01-04\n GBX, NYSE, 12.90, 2013-01-10\n SJR, NYSE, 11.11, 2013-01-10\n OMN, NYSE, -50.00, 2013-01-23\n MON, NYSE, 67.57, 2013-01-09\n GPN, NYSE, 6.90, 2013-01-09\n AYI, NYSE, -13.75, 2013-01-09\n STZ, NYSE, 14.55, 2013-01-10\n SNX, NYSE, 11.54, 2013-01-11\n TAL, NYSE, 600.00, 2013-01-23\n IHS, NYSE, 12.35, 2013-01-09\n EDU, NYSE, -150.00, 2013-01-30\n SAR, NYSE, 28.57, 2013-01-15\n ZEP, NYSE, 11.11, 2013-01-08\n MG, NYSE, 0.00, 2013-01-09\n MOS, NYSE, 7.14, 2013-01-04\n ABT, NYSE, 1.33, 2013-01-24\n ABX, NYSE, 1.83, 2013-02-15\n AB, NYSE, 21.21, 2013-02-13\n TAP, NYSE, 7.81, 2013-02-15\n ACO, NYSE, -15.91, 2013-01-26\n ADM, NYSE, -26.83, 2013-02-05\n AEM, NYSE, -13.33, 2013-02-14\n AEP, NYSE, 11.11, 2013-02-16\n AES, NYSE, 6.67, 2013-02-28\n AET, NYSE, -2.08, 2013-02-01\n AFL, NYSE, 0.00, 2013-02-06\n AGCO, NYSE, 1.02, 2013-02-06\n HES, NYSE, -2.44, 2013-01-31\n AIG, NYSE, 322.22, 2013-02-22\n AIN, NYSE, -9.68, 2013-02-07\n AJG, NYSE, 2.63, 2013-01-30\n ALU, NYSE, 0.00, 2013-02-08\n MATX, NYSE, 24.14, 2013-02-08\n ALK, NYSE, -4.11, 2013-01-25\n ALX, NYSE, -11.52, 2013-02-27\n BEAM, NYSE, 0.00, 2013-02-02\n AME, NYSE, 2.08, 2013-01-25\n TWX, NYSE, 6.36, 2013-02-07\n AVD, NYSE, 11.43, 2013-03-01\n AMN, NYSE, 36.36, 2013-02-22\n AN, NYSE, 3.08, 2013-02-01\n AON, NYSE, 1.60, 2013-02-02\n AP, NYSE, 77.78, 2013-02-05\n APA, NYSE, -1.30, 2013-02-15\n APC, NYSE, 30.00, 2013-02-05\n APD, NYSE, 0.78, 2013-01-24\n APH, NYSE, 4.44, 2013-01-18\n ARG, NYSE, -3.70, 2013-01-25\n AAN, NYSE, -4.00, 2013-02-08\n ARW, NYSE, 13.89, 2013-02-08\n ASGN, NYSE, -25.00, 2013-02-15\n ASH, NYSE, -17.65, 2013-01-30\n ASR, NYSE, 56.88, 2013-02-26\n GAS, NYSE, -9.90, 2013-02-07\n ATO, NYSE, -5.13, 2013-02-07\n ATW, NYSE, 17.02, 2013-01-31\n AU, NYSE, -67.44, 2013-02-21\n AVP, NYSE, 37.04, 2013-02-13\n AVT, NYSE, 21.69, 2013-01-25\n AVY, NYSE, 10.20, 2013-01-31\n AXP, NYSE, 0.00, 2013-01-18\n B, NYSE, 7.84, 2013-02-23\n BA, NYSE, 7.56, 2013-01-31\n BAC, NYSE, 50.00, 2013-01-18\n BAX, NYSE, 0.00, 2013-01-25\n BC, NYSE, 122.22, 2013-01-25\n OMX, NYSE, 6.67, 2013-02-21\n BCE, NYSE, -2.99, 2013-02-08\n BCR, NYSE, 1.80, 2013-02-01\n BCS, NYSE, 40.74, 2013-02-13\n BDX, NYSE, 9.76, 2013-02-06\n BEN, NYSE, 1.68, 2013-02-02\n BGG, NYSE, 250.00, 2013-01-25\n BHE, NYSE, 10.00, 2013-02-05\n BHI, NYSE, 1.64, 2013-01-24\n BID, NYSE, 0.92, 2013-03-01\n BIO, NYSE, 15.67, 2013-02-27\n BK, NYSE, 0.00, 2013-01-16\n BKH, NYSE, 9.68, 2013-02-01\n WRB, NYSE, 28.00, 2013-01-29\n BLC, NYSE, 5.71, 2013-02-09\n BLL, NYSE, -3.03, 2013-02-01\n BLX, NYSE, 20.75, 2013-02-08\n BMI, NYSE, -11.36, 2013-02-07\n BMS, NYSE, 4.00, 2013-02-01\n BMY, NYSE, 9.30, 2013-01-25\n BOH, NYSE, 1.12, 2013-01-31\n BXS, NYSE, -25.00, 2013-01-24\n BPL, NYSE, 18.52, 2013-02-09\nBRK.A, NYSE, 175.73, 2013-03-02\n BRO, NYSE, 7.41, 2013-02-02\n BSX, NYSE, 63.64, 2013-01-30\n BT, NYSE, -89.22, 2013-02-02\n MTRN, NYSE, 17.14, 2013-03-01\n CACI, NYSE, 3.66, 2013-01-31\n CAT, NYSE, -13.10, 2013-01-29\n CB, NYSE, 10.00, 2013-01-30\n CBI, NYSE, 9.64, 2013-02-28\n CBM, NYSE, 100.00, 2013-02-07\n CBU, NYSE, -3.70, 2013-01-23\n CBT, NYSE, -28.57, 2013-01-31\n CCC, NYSE, 35.71, 2013-02-22\n CCE, NYSE, 4.65, 2013-02-08\n C, NYSE, -20.69, 2013-01-18\n CCK, NYSE, -7.27, 2013-01-31\n CCU, NYSE, -12.21, 2013-02-01\n CDE, NYSE, -15.15, 2013-02-22\n CDI, NYSE, 8.70, 2013-02-27\n CAH, NYSE, 9.41, 2013-02-06\n CFR, NYSE, 5.38, 2013-01-31\n CHD, NYSE, 0.00, 2013-02-06\n CKP, NYSE, -50.00, 2013-03-06\n CPK, NYSE, 18.60, 2013-03-08\n CI, NYSE, 6.08, 2013-02-08\n CIA, NYSE, -100.00, 2013-03-12\n CKH, NYSE, -93.55, 2013-02-28\n CL, NYSE, 0.71, 2013-02-01\n CLF, NYSE, -25.45, 2013-02-13\n CLH, NYSE, -25.00, 2013-02-21\n CLX, NYSE, 11.11, 2013-02-05\n CMA, NYSE, 7.81, 2013-01-17\n CMO, NYSE, -6.06, 2013-01-31\n CRK, NYSE, -77.42, 2013-02-12\n CMS, NYSE, 4.17, 2013-02-22\n CNA, NYSE, -150.00, 2013-02-12\n CNW, NYSE, -10.34, 2013-02-07\n CHG, NYSE, -4.12, 2013-02-27\n CNL, NYSE, 12.50, 2013-02-20\n COG, NYSE, 14.29, 2013-02-22\n COT, NYSE, -66.67, 2013-02-16\n CP, NYSE, -0.78, 2013-01-30\n CPF, NYSE, 11.54, 2013-02-01\n CQB, NYSE, -17.65, 2013-03-12\n CR, NYSE, -5.15, 2013-01-29\nCRD.B, NYSE, 52.38, 2013-02-14\n CRS, NYSE, 1.64, 2013-02-01\n CSC, NYSE, 22.22, 2013-02-06\n CSL, NYSE, 6.49, 2013-02-09\n CTB, NYSE, 35.29, 2013-02-26\n CTL, NYSE, -1.47, 2013-02-14\n CTS, NYSE, -21.74, 2013-01-29\n CUB, NYSE, -32.86, 2013-02-12\n CMI, NYSE, 14.94, 2013-02-07\n CUZ, NYSE, 40.00, 2013-02-14\n CVC, NYSE, -400.00, 2013-03-01\n CVH, NYSE, 35.82, 2013-02-07\n CW, NYSE, 4.40, 2013-02-21\n CWT, NYSE, 33.33, 2013-02-28\n CX, NYSE, -258.33, 2013-02-08\n CYN, NYSE, -13.00, 2013-01-25\n D, NYSE, 1.47, 2013-02-01\n DBD, NYSE, -8.16, 2013-02-13\n DCO, NYSE, -23.81, 2013-03-05\n DD, NYSE, 22.22, 2013-01-23\n CVA, NYSE, -13.04, 2013-02-07\n DHR, NYSE, 0.00, 2013-01-30\n DIS, NYSE, 2.60, 2013-02-06\n DLX, NYSE, 11.76, 2013-01-25\n DNB, NYSE, -1.24, 2013-02-12\n RRD, NYSE, 16.22, 2013-02-27\n DOV, NYSE, 1.87, 2013-01-25\n DOW, NYSE, -2.94, 2013-02-01\n DRE, NYSE, 0.00, 2013-01-31\n DHI, NYSE, 42.86, 2013-01-30\n UFS, NYSE, -7.09, 2013-02-02\n DTE, NYSE, 0.00, 2013-02-21\n DUK, NYSE, 7.69, 2013-02-14\n DVN, NYSE, 2.63, 2013-02-21\n DV, NYSE, 55.36, 2013-02-07\n EAT, NYSE, 0.00, 2013-01-23\n ECL, NYSE, 0.00, 2013-02-27\n ED, NYSE, -6.85, 2013-02-01\n EDE, NYSE, 27.78, 2013-02-15\n EFX, NYSE, 4.00, 2013-02-07\n EGN, NYSE, -15.58, 2013-01-24\n EGP, NYSE, 0.00, 2013-02-13\n ELY, NYSE, 2.00, 2013-01-31\n EMC, NYSE, 6.98, 2013-01-30\n EMR, NYSE, 0.00, 2013-02-06\n EOG, NYSE, 19.26, 2013-02-14\n EQT, NYSE, 14.29, 2013-01-25\n ESE, NYSE, -44.44, 2013-02-08\n ESV, NYSE, 7.87, 2013-02-21\n ETN, NYSE, -10.87, 2013-02-06\n ETR, NYSE, 21.99, 2013-02-09\n EXAR, NYSE, -14.29, 2013-01-24\n F, NYSE, 19.23, 2013-01-30\n OPY, NYSE, 115.79, 2013-02-02\n CLGX, NYSE, -3.12, 2013-02-22\n FNB, NYSE, 4.55, 2013-01-24\n FCF, NYSE, -18.18, 2013-01-31\n FBP, NYSE, -30.00, 2013-02-06\n FICO, NYSE, 6.94, 2013-01-31\n FLO, NYSE, 12.00, 2013-02-08\n FMC, NYSE, 0.00, 2013-02-07\n FOE, NYSE, -250.00, 2013-03-06\n S, NYSE, 4.35, 2013-02-08\n NEE, NYSE, 9.57, 2013-01-30\n FRT, NYSE, 0.91, 2013-02-13\n FRX, NYSE, -61.54, 2013-01-16\n FUN, NYSE, -433.33, 2013-02-20\n FUR, NYSE, -48.15, 2013-03-08\n GBL, NYSE, -28.72, 2013-02-06\n GVA, NYSE, -29.03, 2013-03-01\n BGC, NYSE, -3.45, 2013-02-26\n GD, NYSE, -26.84, 2013-01-24\n GE, NYSE, 2.33, 2013-01-19\n RHP, NYSE, -50.00, 2013-02-13\n AXLL, NYSE, 95.08, 2013-02-13\n GGG, NYSE, 13.33, 2013-01-29\n GHM, NYSE, -22.22, 2013-02-02\n GIB, NYSE, -4.35, 2013-01-31\n GLT, NYSE, -25.71, 2013-02-08\n GLW, NYSE, 3.03, 2013-01-30\n GSK, NYSE, 8.33, 2013-02-07\n GLF, NYSE, -160.71, 2013-02-26\n GNI, NYSE, -14.44, 2013-01-30\n GPC, NYSE, 0.00, 2013-02-20\n GRA, NYSE, 4.72, 2013-02-07\n GTY, NYSE, -10.34, 2013-03-01\n GWW, NYSE, -7.28, 2013-01-25\n HAE, NYSE, 4.17, 2013-01-31\n HAL, NYSE, 3.28, 2013-01-26\n HAR, NYSE, -32.95, 2013-02-01\n HVT, NYSE, 30.43, 2013-02-26\n HRC, NYSE, 6.82, 2013-01-24\n HCC, NYSE, 43.75, 2013-02-13\n HCN, NYSE, 1.19, 2013-02-26\n HCP, NYSE, 1.41, 2013-02-13\n HOG, NYSE, 0.00, 2013-01-30\n HE, NYSE, 21.88, 2013-02-16\n HL, NYSE, -25.00, 2013-02-26\n HMA, NYSE, -5.00, 2013-02-15\n HMC, NYSE, -29.58, 2013-02-01\n HMN, NYSE, 91.43, 2013-02-06\n HFC, NYSE, -8.97, 2013-02-27\n HOT, NYSE, 7.69, 2013-02-08\n HP, NYSE, 8.53, 2013-02-01\n HLS, NYSE, 40.63, 2013-02-19\n HRS, NYSE, 4.17, 2013-01-30\n HSC, NYSE, -3.23, 2013-02-15\n HSY, NYSE, -1.33, 2013-02-01\n HUBB, NYSE, 0.00, 2013-01-25\n HUM, NYSE, 11.21, 2013-02-05\n HXL, NYSE, -5.26, 2013-01-24\n IBM, NYSE, 2.67, 2013-01-23\n IDA, NYSE, 10.00, 2013-02-22\n IEX, NYSE, 2.99, 2013-02-05\n IFF, NYSE, -1.19, 2013-02-08\n DIN, NYSE, 1.22, 2013-02-28\n INT, NYSE, 0.00, 2013-02-22\n IP, NYSE, 6.15, 2013-01-30\n IPG, NYSE, 3.70, 2013-02-23\n IO, NYSE, 30.77, 2013-02-14\n IR, NYSE, 8.57, 2013-02-02\n IRF, NYSE, 6.38, 2013-01-29\n ITW, NYSE, -1.11, 2013-01-30\n IVC, NYSE, -56.00, 2013-02-09\n JEC, NYSE, 0.00, 2013-01-24\n JNJ, NYSE, 1.71, 2013-01-23\n JNY, NYSE, 75.00, 2013-02-14\n K, NYSE, 3.08, 2013-02-06\n KAMN, NYSE, 0.00, 2013-02-26\n KDN, NYSE, 0.00, 2013-02-22\n KEX, NYSE, 9.30, 2013-01-31\n KEY, NYSE, -4.55, 2013-01-25\n KIM, NYSE, 6.45, 2013-02-06\n KMB, NYSE, 0.74, 2013-01-26\n KEM, NYSE, 53.33, 2013-02-01\n KMT, NYSE, -21.88, 2013-01-25\n KO, NYSE, 2.27, 2013-02-13\n KSU, NYSE, 10.98, 2013-01-23\n LDL, NYSE, -10.53, 2013-02-27\n LDR, NYSE, 10.42, 2013-02-12\n LEE, NYSE, 25.00, 2013-01-23\n LEG, NYSE, 10.34, 2013-02-05\n LLY, NYSE, 8.97, 2013-01-30\n LM, NYSE, 29.63, 2013-02-02\n LNC, NYSE, 3.77, 2013-02-07\n LPX, NYSE, -10.00, 2013-02-09\n LXU, NYSE, 145.00, 2013-03-01\n LTC, NYSE, -1.72, 2013-02-22\n L, NYSE, -37.93, 2013-02-12\n LUK, NYSE, 210.17, 2013-02-26\n LUV, NYSE, 28.57, 2013-01-25\n LUX, NYSE, 4.35, 2013-03-01\n MKL, NYSE, 314.07, 2013-02-05\n MAN, NYSE, 18.18, 2013-01-31\n MTW, NYSE, 12.50, 2013-02-01\n SM, NYSE, 95.65, 2013-02-21\n MAS, NYSE, 500.00, 2013-02-12\n MTZ, NYSE, 2.22, 2013-03-01\n MCD, NYSE, 3.76, 2013-01-24\n MDC, NYSE, 40.48, 2013-02-01\n MDP, NYSE, 1.14, 2013-01-25\n MDR, NYSE, 13.04, 2013-03-01\n MDU, NYSE, 2.56, 2013-02-05\n MED, NYSE, 12.00, 2013-03-08\n CVS, NYSE, 2.73, 2013-02-07\n MFC, NYSE, -12.50, 2013-02-08\n MGA, NYSE, 36.84, 2013-03-02\n MGM, NYSE, 0.00, 2013-02-21\n MLR, NYSE, -11.76, 2013-03-07\n MLI, NYSE, 14.29, 2013-02-06\n MMC, NYSE, 0.00, 2013-02-13\n MMM, NYSE, 0.00, 2013-01-25\n MSA, NYSE, 3.64, 2013-02-14\n MNR, NYSE, 38.46, 2013-02-08\n MO, NYSE, 1.85, 2013-02-01\n MOD, NYSE, -75.00, 2013-02-02\nMOG.A, NYSE, -8.54, 2013-01-26\n MHK, NYSE, 7.45, 2013-02-22\n MSI, NYSE, 7.61, 2013-01-24\n MCY, NYSE, -168.00, 2013-02-05\n MRK, NYSE, 2.47, 2013-02-02\n MRO, NYSE, -19.12, 2013-02-07\n POWR, NYSE, 18.18, 2013-03-08\n MTG, NYSE, -37.87, 2013-03-01\n MTB, NYSE, 2.76, 2013-01-17\n MTX, NYSE, 6.38, 2013-02-01\n MUR, NYSE, 59.23, 2013-01-31\n MYE, NYSE, -7.14, 2013-02-14\n NBL, NYSE, 54.21, 2013-02-08\n NBR, NYSE, 3.45, 2013-02-20\n NE, NYSE, -19.35, 2013-01-24\n NEM, NYSE, 13.27, 2013-02-22\n NFG, NYSE, 6.58, 2013-02-08\n NHI, NYSE, 1.20, 2013-02-15\n NI, NYSE, 0.00, 2013-02-20\n NJR, NYSE, -17.48, 2013-02-08\n THC, NYSE, -24.64, 2013-02-27\n NNN, NYSE, 4.55, 2013-02-08\n NOC, NYSE, 18.39, 2013-01-31\n NPK, NYSE, -11.23, 2013-02-16\n NR, NYSE, 0.00, 2013-02-15\n NSC, NYSE, 9.24, 2013-01-23\n NUE, NYSE, 55.17, 2013-01-30\n NVR, NYSE, 8.22, 2013-01-25\n NWL, NYSE, 2.38, 2013-02-02\n NWN, NYSE, -4.55, 2013-03-02\n NYT, NYSE, 3.23, 2013-02-08\n OCR, NYSE, 1.18, 2013-02-20\n OGE, NYSE, 14.71, 2013-02-28\n OHI, NYSE, 3.57, 2013-02-12\n OI, NYSE, 8.11, 2013-01-31\n OII, NYSE, 2.78, 2013-02-14\n OKE, NYSE, 17.78, 2013-02-26\n OLN, NYSE, 2.94, 2013-01-29\n BRS, NYSE, 32.95, 2013-02-05\n OLP, NYSE, 0.00, 2013-03-15\n OMC, NYSE, 3.67, 2013-02-13\n OMI, NYSE, -12.77, 2013-02-12\n ORB, NYSE, 31.82, 2013-02-15\n ORI, NYSE, -28.57, 2013-01-25\n OSK, NYSE, 93.55, 2013-01-26\n OXY, NYSE, 10.24, 2013-02-01\n PHX, NYSE, -18.75, 2013-02-08\n FCFS, NYSE, 2.20, 2013-01-24\n PBI, NYSE, 7.69, 2013-02-01\n PCG, NYSE, 3.51, 2013-02-22\n PCL, NYSE, 68.97, 2013-01-29\n PCP, NYSE, -3.23, 2013-01-25\n TPC, NYSE, 0.00, 2013-02-22\n PDS, NYSE, 250.00, 2013-02-15\n PEG, NYSE, 5.13, 2013-02-22\n PEI, NYSE, 0.00, 2013-02-26\n PEP, NYSE, 3.81, 2013-02-15\n PFE, NYSE, 6.82, 2013-01-30\n PG, NYSE, 9.91, 2013-01-26\n PGR, NYSE, 0.00, 2013-01-19\n PH, NYSE, 6.25, 2013-01-19\n PHG, NYSE, -4.17, 2013-01-30\n PHM, NYSE, 9.68, 2013-02-01\n PKD, NYSE, -150.00, 2013-02-22\n PKY, NYSE, 17.39, 2013-02-12\n PNC, NYSE, 24.82, 2013-01-18\n PNM, NYSE, 18.18, 2013-03-02\n PNR, NYSE, 6.82, 2013-01-30\n PNW, NYSE, 41.18, 2013-02-23\n POM, NYSE, -5.00, 2013-03-02\n POT, NYSE, -11.86, 2013-02-01\n PPG, NYSE, -0.65, 2013-01-15\n PPL, NYSE, 6.52, 2013-02-15\n PRGO, NYSE, 3.82, 2013-02-02\n PL, NYSE, 11.36, 2013-02-07\n PSB, NYSE, 5.04, 2013-02-20\n CSH, NYSE, 12.61, 2013-01-25\n PWR, NYSE, 36.11, 2013-02-22\n PX, NYSE, 0.00, 2013-01-24\n KWR, NYSE, 26.32, 2013-03-07\n R, NYSE, 6.36, 2013-02-01\n RBC, NYSE, 2.70, 2013-02-05\n RDC, NYSE, 28.57, 2013-03-01\n HTSI, NYSE, -20.69, 2013-02-01\n RES, NYSE, 8.33, 2013-01-24\n RGS, NYSE, -76.92, 2013-02-01\n RGR, NYSE, 36.99, 2013-02-28\n RHI, NYSE, 2.44, 2013-01-30\n RJF, NYSE, 0.00, 2013-01-24\n RLI, NYSE, 102.27, 2013-01-24\n ROG, NYSE, -8.62, 2013-02-20\n ROK, NYSE, -2.38, 2013-01-31\n ROL, NYSE, -5.88, 2013-01-24\n ROP, NYSE, 1.37, 2013-01-29\n RTI, NYSE, 25.00, 2013-02-07\n RTN, NYSE, 23.08, 2013-01-25\n RYL, NYSE, 12.00, 2013-01-30\n BSAC, NYSE, -1.96, 2013-02-05\n T, NYSE, -6.38, 2013-01-25\n SCG, NYSE, 0.00, 2013-02-22\n SCHW, NYSE, 0.00, 2013-01-17\n SCL, NYSE, -5.56, 2013-02-20\n SMG, NYSE, 0.88, 2013-02-07\n SEE, NYSE, 17.24, 2013-02-20\n SF, NYSE, 5.17, 2013-02-26\n SFE, NYSE, -121.74, 2013-03-08\n SHW, NYSE, -0.87, 2013-02-01\n STC, NYSE, 29.27, 2013-02-15\n SJI, NYSE, -6.67, 2013-03-01\n JOE, NYSE, -1000.00, 2013-03-01\n SJW, NYSE, 72.22, 2013-02-20\n SLB, NYSE, 0.00, 2013-01-19\n HSH, NYSE, 29.17, 2013-02-01\n AOS, NYSE, 12.35, 2013-01-25\n SNA, NYSE, 4.38, 2013-02-08\n PII, NYSE, 0.81, 2013-01-30\n SNV, NYSE, 0.00, 2013-01-23\n SO, NYSE, 12.82, 2013-01-31\n SON, NYSE, 3.70, 2013-02-14\n SPA, NYSE, 30.00, 2013-02-06\n TRV, NYSE, 500.00, 2013-01-23\n SR, NYSE, 14.68, 2013-02-06\n NVE, NYSE, 0.00, 2013-02-23\n SCI, NYSE, 10.00, 2013-02-13\n SSP, NYSE, -3.85, 2013-02-27\n STT, NYSE, 11.00, 2013-01-19\n STI, NYSE, 6.56, 2013-01-19\n STJ, NYSE, 2.22, 2013-01-24\n STL, NYSE, 14.29, 2013-01-24\n STR, NYSE, 8.57, 2013-02-21\n STE, NYSE, 3.57, 2013-02-07\n SYK, NYSE, 0.88, 2013-01-24\n SUN, NYSE, -4.88, 2013-03-30\n SUP, NYSE, -61.54, 2013-03-02\n SWK, NYSE, 3.01, 2013-01-25\n SWN, NYSE, 2.33, 2013-02-21\n SWS, NYSE, 0.00, 2013-02-07\n SWX, NYSE, -2.44, 2013-02-27\n SWY, NYSE, 23.68, 2013-02-22\n SXI, NYSE, 1.10, 2013-02-02\n SYY, NYSE, 19.51, 2013-02-05\n TNC, NYSE, 6.90, 2013-02-20\n TCB, NYSE, -16.67, 2013-01-31\n TCO, NYSE, 5.15, 2013-02-14\n TDS, NYSE, -725.00, 2013-02-27\n TDW, NYSE, 38.64, 2013-02-02\n TDY, NYSE, 8.33, 2013-01-25\n TE, NYSE, 0.00, 2013-02-06\n TER, NYSE, 600.00, 2013-01-24\n TEVA, NYSE, -0.75, 2013-02-08\n TEX, NYSE, -51.28, 2013-02-20\n TFX, NYSE, 1.79, 2013-02-22\n TEN, NYSE, -2.94, 2013-02-01\n TKR, NYSE, 25.00, 2013-01-25\n TMK, NYSE, 1.53, 2013-02-05\n TMO, NYSE, 6.25, 2013-02-01\n TOT, NYSE, -1.12, 2013-02-14\n TM, NYSE, -44.72, 2013-02-06\n TR, NYSE, 37.50, 2013-02-14\n TRN, NYSE, 7.14, 2013-02-21\n TRP, NYSE, -15.09, 2013-02-13\n TRR, NYSE, 566.67, 2013-02-07\n TSO, NYSE, -2.90, 2013-02-07\n TSS, NYSE, -3.03, 2013-01-23\n TTI, NYSE, -21.05, 2013-03-01\n TXT, NYSE, -1.75, 2013-01-24\n TYL, NYSE, 10.71, 2013-02-07\n TSN, NYSE, 23.08, 2013-02-02\n UDR, NYSE, 2.94, 2013-02-06\n UFI, NYSE, -42.86, 2013-01-23\n UGI, NYSE, -15.89, 2013-02-01\n UAM, NYSE, 45.45, 2013-02-20\n UHS, NYSE, 9.89, 2013-03-01\n UHT, NYSE, 268.42, 2013-02-28\n UIL, NYSE, -9.68, 2013-02-22\n UNH, NYSE, 0.00, 2013-01-18\n KMPR, NYSE, -250.00, 2013-02-08\n UNM, NYSE, 5.13, 2013-02-06\n UNP, NYSE, 1.39, 2013-01-25\n UNT, NYSE, 2.06, 2013-02-20\n URS, NYSE, -1.04, 2013-02-26\n USG, NYSE, -67.86, 2013-02-07\n MUX, NYSE, -600.00, 2013-03-09\n USM, NYSE, -1100.00, 2013-02-27\n USPH, NYSE, 3.03, 2013-03-08\n UTL, NYSE, 3.13, 2013-01-31\n UTX, NYSE, 26.47, 2013-01-24\n VMI, NYSE, 8.48, 2013-02-13\n VAR, NYSE, 3.49, 2013-01-24\n VFC, NYSE, 1.32, 2013-02-16\n CBS, NYSE, -8.57, 2013-02-15\n VLO, NYSE, 57.98, 2013-01-30\n VMC, NYSE, -81.82, 2013-02-15\n VLY, NYSE, 0.00, 2013-01-31\n VNO, NYSE, 6.09, 2013-02-27\n VSH, NYSE, 37.50, 2013-02-06\n WTS, NYSE, 5.17, 2013-02-20\n WBS, NYSE, 6.12, 2013-01-19\n WEC, NYSE, 4.88, 2013-01-31\n WFC, NYSE, 3.41, 2013-01-14\n WG, NYSE, 57.14, 2013-03-07\n WGL, NYSE, 9.62, 2013-02-07\n WHR, NYSE, 3.15, 2013-02-01\n WMB, NYSE, -3.85, 2013-02-21\n WMK, NYSE, 20.29, 2013-03-06\n WNC, NYSE, 3.23, 2013-02-06\n TEG, NYSE, -5.32, 2013-03-01\n WR, NYSE, 80.00, 2013-03-01\n WRE, NYSE, 2.17, 2013-02-14\n WRI, NYSE, 4.44, 2013-02-15\n WPP, NYSE, -175.00, 2013-02-12\n WSO, NYSE, -12.77, 2013-02-15\n WST, NYSE, 8.93, 2013-02-22\n WWW, NYSE, 200.00, 2013-02-20\n WY, NYSE, 36.84, 2013-01-26\n X, NYSE, 45.33, 2013-01-30\n XL, NYSE, 138.24, 2013-02-08\n XOM, NYSE, 10.00, 2013-02-02\n XRX, NYSE, 7.14, 2013-01-25\n Y, NYSE, 54.64, 2013-02-22\n HRG, NYSE, -50.00, 2013-02-09\n CRY, NYSE, 33.33, 2013-02-15\n CHK, NYSE, 85.71, 2013-02-22\n DDR, NYSE, 0.00, 2013-02-13\n ELS, NYSE, 0.00, 2013-01-29\n ALG, NYSE, 37.93, 2013-03-07\n ETH, NYSE, 5.41, 2013-01-23\n ATR, NYSE, 0.00, 2013-02-08\n GGP, NYSE, 6.90, 2013-02-05\n MSL, NYSE, -10.00, 2013-01-30\n RCL, NYSE, 66.67, 2013-02-05\n CWEI, NYSE, -34.04, 2013-02-22\n HR, NYSE, 0.00, 2013-02-21\n RGA, NYSE, 35.56, 2013-02-01\n RIG, NYSE, 12.35, 2013-03-02\n SKT, NYSE, 2.22, 2013-02-13\n TWI, NYSE, -80.85, 2013-02-26\n BDN, NYSE, 17.86, 2013-02-07\n KGC, NYSE, -4.55, 2013-02-14\n YPF, NYSE, 26.67, 2013-03-13\n CPT, NYSE, 1.04, 2013-02-01\n SGY, NYSE, 67.27, 2013-02-26\n BFS, NYSE, -11.48, 2013-03-08\n BWA, NYSE, 3.57, 2013-02-15\n EQR, NYSE, 0.00, 2013-02-06\n CLP, NYSE, -81.25, 2013-02-08\n KOF, NYSE, -7.78, 2013-02-28\n OKS, NYSE, 3.13, 2013-02-26\n SQM, NYSE, -15.63, 2013-03-06\n BYD, NYSE, -138.46, 2013-03-05\n CBL, NYSE, 8.77, 2013-02-06\n DECK, NYSE, 7.36, 2013-03-01\n IT, NYSE, 6.78, 2013-02-08\n GFI, NYSE, -36.36, 2013-02-15\n HST, NYSE, 8.11, 2013-02-22\n LXP, NYSE, 0.00, 2013-02-22\n OMG, NYSE, -533.33, 2013-02-20\n REG, NYSE, 8.62, 2013-01-31\n TUC, NYSE, -5.56, 2013-03-08\n AF, NYSE, 7.14, 2013-01-24\n BFR, NYSE, 13.33, 2013-02-09\n HHS, NYSE, 26.32, 2013-02-01\n MHO, NYSE, -3.45, 2013-02-01\n NFX, NYSE, -36.36, 2013-02-20\n SPG, NYSE, 13.93, 2013-02-05\n SU, NYSE, -14.20, 2013-02-06\n SUI, NYSE, -2.44, 2013-02-22\n TV, NYSE, 5.13, 2013-02-26\n CGI, NYSE, 0.00, 2013-01-24\n CYT, NYSE, 77.42, 2013-02-01\n EMN, NYSE, 0.00, 2013-02-01\n GRT, NYSE, 0.00, 2013-02-15\n MAA, NYSE, -1.74, 2013-02-07\n PLT, NYSE, 0.00, 2013-01-30\n BZH, NYSE, 24.27, 2013-01-29\n ELX, NYSE, 0.00, 2013-02-01\n AGM, NYSE, -5.41, 2013-03-19\n MLM, NYSE, -13.21, 2013-02-13\n AKS, NYSE, 14.29, 2013-01-30\n ALB, NYSE, 18.18, 2013-01-23\n VRX, NYSE, -4.00, 2013-03-01\n CBR, NYSE, 140.00, 2013-02-22\n MAC, NYSE, 3.45, 2013-02-07\n RKT, NYSE, 5.47, 2013-01-23\n RYN, NYSE, 3.51, 2013-01-25\n ADC, NYSE, 1.96, 2013-02-28\nBRK.B, NYSE, 0.88, 2013-03-02\n EXP, NYSE, 0.00, 2013-02-07\n GGB, NYSE, -66.67, 2013-02-22\n SSD, NYSE, -100.00, 2013-02-08\n ESS, NYSE, 4.02, 2013-02-01\n FR, NYSE, 0.00, 2013-02-21\n HIW, NYSE, 0.00, 2013-02-13\n IMAX, NYSE, 58.33, 2013-02-22\n AIV, NYSE, 4.00, 2013-02-08\n FCH, NYSE, 50.00, 2013-02-20\n ITGR, NYSE, 6.00, 2013-02-26\n GEO, NYSE, 7.32, 2013-02-22\n CLI, NYSE, 4.76, 2013-02-08\n DAR, NYSE, -20.00, 2013-02-28\n RS, NYSE, 9.28, 2013-02-22\n CPE, NYSE, -66.67, 2013-03-15\n KNX, NYSE, 4.76, 2013-01-31\n O, NYSE, 3.70, 2013-02-15\n PKX, NYSE, -15.35, 2013-03-02\n COF, NYSE, -12.35, 2013-01-18\n CYD, NYSE, -23.14, 2013-02-28\n IRS, NYSE, 57.50, 2013-02-20\n MCK, NYSE, -13.50, 2013-02-01\n SWC, NYSE, 116.67, 2013-02-28\n STM, NYSE, -22.22, 2013-01-31\n TEO, NYSE, 28.36, 2013-03-01\n TRK, NYSE, 400.00, 2013-03-07\n GFF, NYSE, 300.00, 2013-01-31\n LMT, NYSE, -0.56, 2013-01-25\n APU, NYSE, -13.89, 2013-02-01\n AGU, NYSE, 6.93, 2013-02-22\n LH, NYSE, -4.35, 2013-02-09\n DDD, NYSE, 0.00, 2013-02-26\n WEX, NYSE, 0.94, 2013-02-07\n AFG, NYSE, 3.08, 2013-02-12\n RMD, NYSE, 3.92, 2013-01-25\n WAB, NYSE, 2.29, 2013-02-20\n CIB, NYSE, 20.39, 2013-03-05\n CAM, NYSE, -1.04, 2013-02-01\n FCX, NYSE, 5.41, 2013-01-23\n RNR, NYSE, 70.27, 2013-02-06\n AVX, NYSE, -20.00, 2013-01-25\n RWT, NYSE, 85.19, 2013-02-22\n AXE, NYSE, 0.76, 2013-01-30\n CLB, NYSE, 3.54, 2013-01-31\n MD, NYSE, 1.54, 2013-02-01\n THG, NYSE, 6.25, 2013-02-07\n BAP, NYSE, 3.72, 2013-02-06\n DO, NYSE, 28.18, 2013-02-06\n RE, NYSE, 175.86, 2013-02-07\n DST, NYSE, 17.82, 2013-02-01\n EL, NYSE, 11.54, 2013-02-06\n ESC, NYSE, -34.88, 2013-03-01\n MIG, NYSE, -100.00, 2013-02-13\n WAT, NYSE, 0.63, 2013-01-23\n EME, NYSE, 11.48, 2013-02-27\n HIG, NYSE, 80.00, 2013-02-05\n ITT, NYSE, 2.63, 2013-02-28\n SPN, NYSE, 4.26, 2013-02-27\n SWM, NYSE, -9.18, 2013-02-07\n SCCO, NYSE, 0.00, 2013-02-02\n RCI, NYSE, 20.55, 2013-02-15\n EIX, NYSE, 66.04, 2013-02-27\n IRM, NYSE, -20.00, 2013-03-01\n REV, NYSE, -19.18, 2013-02-06\n SPH, NYSE, -17.46, 2013-02-08\n CCJ, NYSE, 46.34, 2013-02-09\n PGI, NYSE, -6.67, 2013-02-14\n CRR, NYSE, 2.30, 2013-02-01\n BVN, NYSE, -26.67, 2013-03-01\n FCN, NYSE, 11.67, 2013-03-01\n RPT, NYSE, 8.00, 2013-02-13\n TUP, NYSE, 1.79, 2013-01-30\n ASB, NYSE, 0.00, 2013-01-18\n GWR, NYSE, -2.47, 2013-02-13\n TBI, NYSE, 35.71, 2013-02-07\n FFG, NYSE, 24.00, 2013-02-08\n USNA, NYSE, 4.96, 2013-02-06\n CSV, NYSE, 4.35, 2013-02-26\n LVB, NYSE, 12.77, 2013-03-07\n ALR, NYSE, 6.25, 2013-02-16\n OCN, NYSE, -7.84, 2013-03-01\n PAA, NYSE, 42.03, 2013-02-07\n DNR, NYSE, 24.14, 2013-02-22\n HMY, NYSE, 50.00, 2013-02-05\n TGI, NYSE, 5.80, 2013-01-31\n PAG, NYSE, 7.55, 2013-02-07\n GEL, NYSE, -2.86, 2013-02-15\n IM, NYSE, 23.73, 2013-02-14\n LIN, NYSE, -21.92, 2013-03-01\n NUS, NYSE, 2.11, 2013-02-07\n CNI, NYSE, -0.70, 2013-01-23\n LAD, NYSE, 10.45, 2013-02-21\n NSP, NYSE, 4.44, 2013-02-09\n DEL, NYSE, -29.63, 2013-02-28\n DGX, NYSE, -3.81, 2013-01-24\n KRC, NYSE, 3.23, 2013-01-31\n MTH, NYSE, 50.00, 2013-02-01\n NCR, NYSE, 4.35, 2013-02-08\n OFG, NYSE, -50.00, 2013-02-08\n IVZ, NYSE, -4.26, 2013-02-01\n DX, NYSE, 9.68, 2013-02-21\n FBC, NYSE, 38.27, 2013-02-09\n ALV, NYSE, 9.85, 2013-02-01\n ARE, NYSE, 0.87, 2013-02-08\n BBT, NYSE, 2.86, 2013-01-18\n CGG, NYSE, -59.32, 2013-03-02\n BXP, NYSE, 2.42, 2013-01-30\n MS, NYSE, 73.08, 2013-01-19\n SRT, NYSE, 200.00, 2013-02-28\n HLX, NYSE, 162.86, 2013-02-21\n FLS, NYSE, 0.35, 2013-02-22\n MT, NYSE, -880.00, 2013-02-07\n PXD, NYSE, -2.35, 2013-02-14\n SLG, NYSE, 0.87, 2013-01-31\n NAT, NYSE, 0.00, 2013-02-12\n CSU, NYSE, -22.22, 2013-03-07\n DRQ, NYSE, 2.70, 2013-03-01\n FDP, NYSE, -100.00, 2013-02-20\n NLY, NYSE, 35.29, 2013-02-07\n TLM, NYSE, -300.00, 2013-02-18\n TSM, NYSE, 0.00, 2013-01-18\n YUM, NYSE, 2.47, 2013-02-05\n AMG, NYSE, 4.94, 2013-01-30\n EPR, NYSE, -4.40, 2013-02-27\n FE, NYSE, 1.27, 2013-02-26\n LFL, NYSE, -80.00, 2013-05-01\n MTD, NYSE, 8.44, 2013-02-07\n SID, NYSE, 57.14, 2013-03-29\n IN, NYSE, -18.18, 2013-03-12\n AI, NYSE, 9.91, 2013-02-07\n URI, NYSE, 23.30, 2013-01-24\n INGR, NYSE, 4.26, 2013-02-08\n RAS, NYSE, 153.85, 2013-02-14\n UNS, NYSE, 12.50, 2013-02-27\n ASI, NYSE, -17.95, 2013-03-07\n ANH, NYSE, 7.14, 2013-02-08\n OFC, NYSE, 4.08, 2013-02-09\n GPX, NYSE, 6.67, 2013-02-27\n WAC, NYSE, 11.32, 2013-03-19\n RBA, NYSE, -12.50, 2013-02-27\n WDR, NYSE, 5.17, 2013-01-30\n LHO, NYSE, 4.44, 2013-02-21\n LNT, NYSE, -1.72, 2013-02-15\n LVLT, NYSE, 11.11, 2013-02-13\n MFA, NYSE, 0.00, 2013-03-07\n OME, NYSE, 33.33, 2013-03-06\n EQY, NYSE, 7.14, 2013-02-21\n FII, NYSE, 10.00, 2013-01-25\n FMX, NYSE, 39.60, 2013-02-28\n LLL, NYSE, 6.13, 2013-01-31\n VTR, NYSE, 2.06, 2013-02-16\n WCN, NYSE, -7.69, 2013-02-15\n AVB, NYSE, -0.71, 2013-01-31\n GIL, NYSE, 6.67, 2013-02-07\n HZO, NYSE, 10.00, 2013-01-30\n AWR, NYSE, 43.24, 2013-03-01\n CLS, NYSE, 46.67, 2013-01-23\n EPD, NYSE, 7.58, 2013-02-01\n RSG, NYSE, -13.95, 2013-02-08\n WM, NYSE, -5.00, 2013-02-15\n AKR, NYSE, 3.57, 2013-02-06\n CVG, NYSE, 4.17, 2013-02-08\n RRC, NYSE, 228.57, 2013-02-27\n SAP, NYSE, -2.38, 2013-01-24\n CCI, NYSE, 57.14, 2013-01-24\n PQ, NYSE, -20.00, 2013-03-01\n WFT, NYSE, -94.44, 2013-02-27\n CAA, NYSE, 14.29, 2013-02-01\n ENB, NYSE, -6.67, 2013-02-16\n GMK, NYSE, -8.33, 2013-02-28\n MMR, NYSE, 75.00, 2013-01-19\n PB, NYSE, 1.19, 2013-01-26\n VIV, NYSE, -7.25, 2013-02-26\n AXL, NYSE, -111.76, 2013-02-09\n BP, NYSE, 19.05, 2013-02-06\n ETM, NYSE, 13.04, 2013-02-09\n HT, NYSE, 10.00, 2013-02-21\n BYI, NYSE, 5.26, 2013-02-01\n CEB, NYSE, 4.84, 2013-02-07\n INFY, NYSE, 5.56, 2013-01-12\n JLL, NYSE, -0.38, 2013-01-30\n AZN, NYSE, 24.64, 2013-02-01\n SFG, NYSE, 7.23, 2013-01-30\n TREX, NYSE, 27.78, 2013-02-20\n GS, NYSE, 61.38, 2013-01-17\n SYX, NYSE, -144.44, 2013-03-06\n WCC, NYSE, -2.75, 2013-02-01\n JNPR, NYSE, 26.67, 2013-01-25\n RDN, NYSE, -146.43, 2013-02-12\n RAI, NYSE, 4.11, 2013-02-13\n SKX, NYSE, 172.73, 2013-02-14\n WTM, NYSE, 724.10, 2013-02-06\n NCI, NYSE, 29.17, 2013-02-15\n BLT, NYSE, -21.74, 2013-03-08\n BLK, NYSE, 5.88, 2013-01-18\n CIR, NYSE, 25.45, 2013-03-01\n PKG, NYSE, -1.61, 2013-01-23\n PKI, NYSE, 0.00, 2013-02-01\n UGP, NYSE, 38.10, 2013-02-21\n WWE, NYSE, 0.00, 2013-03-01\n SNN, NYSE, 2.86, 2013-02-08\n UPS, NYSE, -4.35, 2013-02-01\n XOXO, NYSE, 62.50, 2013-03-07\n SLF, NYSE, 36.36, 2013-02-14\n CDR, NYSE, 33.33, 2013-03-08\n RLH, NYSE, -21.43, 2013-03-01\n EW, NYSE, 16.88, 2013-02-05\n MET, NYSE, 5.93, 2013-02-13\n FBR, NYSE, -28.57, 2013-01-31\n VVC, NYSE, 23.81, 2013-02-15\n BAM, NYSE, 148.28, 2013-02-16\n NVS, NYSE, 0.00, 2013-01-24\n VGR, NYSE, -43.75, 2013-02-27\n BHLB, NYSE, 0.00, 2013-01-29\n CRL, NYSE, 6.67, 2013-02-14\n CYH, NYSE, 0.00, 2013-02-22\n MBT, NYSE, 65.71, 2013-03-20\n MTOR, NYSE, -375.00, 2013-01-31\n CNQ, NYSE, -29.55, 2013-03-08\n ERJ, NYSE, -25.27, 2013-03-13\n VZ, NYSE, -28.30, 2013-01-23\n EVC, NYSE, 12.50, 2013-02-28\n PBR, NYSE, 0.00, 2013-02-05\n XEL, NYSE, 3.57, 2013-02-01\n ALE, NYSE, 0.00, 2013-02-16\n HW, NYSE, -20.00, 2013-01-30\n POL, NYSE, 0.00, 2013-01-30\n UMC, NYSE, 0.00, 2013-02-07\n ASX, NYSE, 41.43, 2013-01-31\n COH, NYSE, -4.65, 2013-01-23\n CXW, NYSE, 7.32, 2013-02-14\n DVA, NYSE, 6.33, 2013-02-15\n EXC, NYSE, -1.54, 2013-02-08\n MCO, NYSE, 7.14, 2013-02-09\n BRFS, NYSE, 43.48, 2013-03-06\n TU, NYSE, -1.15, 2013-02-16\n WIT, NYSE, 0.00, 2013-01-18\n ERF, NYSE, 462.50, 2013-02-22\n GG, NYSE, -22.22, 2013-02-15\n HNT, NYSE, -2.70, 2013-01-31\n NXY, NYSE, -23.44, 2013-02-26\n NYCB, NYSE, -3.45, 2013-01-31\n SXT, NYSE, -8.33, 2013-02-08\n CPG, NYSE, -191.67, 2013-03-15\n AMX, NYSE, -40.00, 2013-02-13\n MPX, NYSE, -50.00, 2013-01-24\n OIS, NYSE, -5.82, 2013-02-20\n BH, NYSE, -35.35, 2013-01-26\n MMP, NYSE, 6.15, 2013-02-06\n PES, NYSE, 250.00, 2013-02-14\n ABB, NYSE, -18.75, 2013-02-15\n RDY, NYSE, -27.27, 2013-02-15\n KMR, NYSE, -19.23, 2013-02-22\n GEN, NYSE, -20.00, 2013-02-12\n ADS, NYSE, 2.38, 2013-02-01\n CVI, NYSE, 5.15, 2013-03-13\n FTI, NYSE, 0.00, 2013-02-13\n PRA, NYSE, 10.64, 2013-02-20\n STO, NYSE, 26.47, 2013-02-08\n BEL, NYSE, -266.67, 2013-02-21\n FIS, NYSE, -8.82, 2013-02-13\n COL, NYSE, 4.44, 2013-01-19\n KAI, NYSE, 7.32, 2013-02-27\n FRM, NYSE, 233.33, 2013-03-09\n ABC, NYSE, 0.00, 2013-01-25\n BG, NYSE, -76.15, 2013-02-08\n FRO, NYSE, 106.52, 2013-02-22\n ECA, NYSE, -3.12, 2013-02-15\n CS, NYSE, -54.76, 2013-02-08\n EEP, NYSE, -30.77, 2013-02-14\n CVX, NYSE, -1.65, 2013-02-02\n DB, NYSE, 280.49, 2013-02-01\n GXP, NYSE, 200.00, 2013-03-01\n JHX, NYSE, 371.43, 2013-02-28\n PFG, NYSE, 10.81, 2013-02-01\n PVR, NYSE, -227.78, 2013-02-21\n AAP, NYSE, 17.33, 2013-02-08\n KND, NYSE, 4.55, 2013-02-26\n WTW, NYSE, 9.09, 2013-02-14\n CNC, NYSE, 42.42, 2013-02-06\n PRU, NYSE, -2.87, 2013-02-07\n BCH, NYSE, 12.94, 2013-02-06\n NS, NYSE, -19.35, 2013-02-02\n ITUB, NYSE, -5.00, 2013-02-05\n SXL, NYSE, 20.88, 2013-02-21\n VALE, NYSE, -26.00, 2013-02-28\n TNP, NYSE, -128.57, 2013-04-20\n LCI, NYSE, 233.33, 2013-02-08\n AUO, NYSE, -122.73, 2013-02-07\n GTI, NYSE, 19.05, 2013-02-27\n HNR, NYSE, -127.27, 2013-05-04\n MWE, NYSE, -38.89, 2013-02-28\n NLS, NYSE, 4.55, 2013-03-05\n RGC, NYSE, 40.00, 2013-02-08\n SBS, NYSE, 48.25, 2013-03-22\n JAH, NYSE, 2.40, 2013-02-15\n NPO, NYSE, 110.71, 2013-02-08\n TRI, NYSE, 9.09, 2013-02-14\n CAE, NYSE, 12.50, 2013-02-14\n LF, NYSE, 971.43, 2013-02-07\n SNY, NYSE, 1.30, 2013-02-08\n WHG, NYSE, 15.91, 2013-02-08\n BANC, NYSE, -300.00, 2013-03-02\n GTN, NYSE, 4.35, 2013-02-21\n BAK, NYSE, -150.00, 2013-02-08\n COP, NYSE, 1.42, 2013-01-31\n CNP, NYSE, 40.00, 2013-02-28\n EEQ, NYSE, -18.18, 2013-02-15\n MRH, NYSE, 60.26, 2013-02-08\n NGS, NYSE, 26.09, 2013-03-15\n NRP, NYSE, 34.88, 2013-02-14\n PXP, NYSE, -22.64, 2013-02-22\n XEC, NYSE, 9.26, 2013-02-20\n IAG, NYSE, -11.11, 2013-02-21\n TS, NYSE, -16.44, 2013-02-22\n EGO, NYSE, 6.67, 2013-02-23\n JNS, NYSE, 35.71, 2013-01-25\n PFS, NYSE, 7.41, 2013-02-02\n ENH, NYSE, 21.68, 2013-02-08\n IHG, NYSE, 5.56, 2013-02-20\n CNX, NYSE, 95.45, 2013-02-01\n AMT, NYSE, -17.07, 2013-02-27\n ABG, NYSE, 10.77, 2013-02-20\n LII, NYSE, 0.00, 2013-02-06\n SRE, NYSE, 11.34, 2013-02-27\n AEE, NYSE, -36.36, 2013-02-21\n PLD, NYSE, 0.00, 2013-02-07\n SAH, NYSE, 4.00, 2013-02-21\n GPI, NYSE, -17.50, 2013-02-20\n FIX, NYSE, -11.11, 2013-03-01\n MMS, NYSE, 12.50, 2013-02-08\n SRI, NYSE, -28.57, 2013-03-02\n RTEC, NYSE, 6.25, 2013-02-05\n NOV, NYSE, 3.47, 2013-02-02\n DF, NYSE, 33.33, 2013-02-14\n SAM, NYSE, 1.63, 2013-02-21\n RL, NYSE, 8.60, 2013-02-07\n FLR, NYSE, 132.35, 2013-02-21\n ALL, NYSE, 942.86, 2013-02-07\n ATI, NYSE, 5.88, 2013-01-24\n EE, NYSE, -14.29, 2013-02-20\n AIT, NYSE, 0.00, 2013-02-01\n CHH, NYSE, 9.76, 2013-02-12\n FMS, NYSE, 105.77, 2013-02-27\n BCO, NYSE, -7.69, 2013-02-02\n CBB, NYSE, -125.00, 2013-02-28\n MWW, NYSE, 0.00, 2013-02-08\n PSA, NYSE, 5.68, 2013-02-22\n E, NYSE, 2.83, 2013-02-16\n JPM, NYSE, 15.83, 2013-01-17\n USB, NYSE, 1.35, 2013-01-17\n HON, NYSE, 0.92, 2013-01-26\n ITG, NYSE, 100.00, 2013-02-01\n ARB, NYSE, 6.25, 2013-02-26\n APL, NYSE, 0.00, 2013-02-19\n AVA, NYSE, -42.22, 2013-02-21\n AXS, NYSE, 64.96, 2013-02-05\n CHT, NYSE, 5.26, 2013-01-31\n MOH, NYSE, 145.45, 2013-02-08\n CVD, NYSE, 2.82, 2013-01-25\n AHT, NYSE, 2.63, 2013-02-28\n GPK, NYSE, 12.50, 2013-02-08\n CNO, NYSE, 8.70, 2013-02-12\n AUQ, NYSE, -28.57, 2013-03-26\n JRN, NYSE, 34.62, 2013-03-08\nGRP.U, NYSE, -14.92, 2013-03-06\n NFP, NYSE, 11.43, 2013-02-15\n CRI, NYSE, 2.30, 2013-02-28\n FMD, NYSE, -20.00, 2013-02-08\n FPO, NYSE, 10.34, 2013-02-22\n TRQ, NYSE, -350.00, 2013-03-26\n WLL, NYSE, 9.21, 2013-02-28\n AEL, NYSE, 14.63, 2013-02-21\n AHL, NYSE, 87.60, 2013-02-08\n AUY, NYSE, -3.70, 2013-02-21\n CMP, NYSE, 0.00, 2013-02-07\n KRO, NYSE, -400.00, 2013-03-13\n TPX, NYSE, 9.09, 2013-01-25\n UTI, NYSE, 75.00, 2013-02-01\n PJC, NYSE, 31.34, 2013-01-31\n TRW, NYSE, 14.81, 2013-02-16\n AIZ, NYSE, 122.58, 2013-02-07\n HTH, NYSE, 62.50, 2013-03-16\n ETP, NYSE, 0.00, 2013-02-21\n SMI, NYSE, 500.00, 2013-02-07\n LSE, NYSE, -6.25, 2013-02-16\n BBD, NYSE, -2.63, 2013-01-29\n NRG, NYSE, 124.14, 2013-02-28\n HOS, NYSE, 29.17, 2013-02-07\n ABR, NYSE, 160.00, 2013-02-16\n FHN, NYSE, 0.00, 2013-01-19\n AGO, NYSE, 32.39, 2013-02-28\n HSP, NYSE, 1.85, 2013-02-14\n HNI, NYSE, -6.98, 2013-02-06\n GHL, NYSE, -32.43, 2013-01-24\n XPO, NYSE, -14.00, 2013-02-28\n CVO, NYSE, 23.08, 2013-02-28\n CHE, NYSE, 16.92, 2013-02-19\n GNW, NYSE, 30.77, 2013-02-06\n CBG, NYSE, 12.24, 2013-02-07\n SFL, NYSE, -26.67, 2013-02-26\n NEU, NYSE, -15.57, 2013-01-29\n GOL, NYSE, -109.09, 2013-03-26\n CAB, NYSE, 4.17, 2013-02-15\n LTM, NYSE, 1.82, 2013-02-22\n VVI, NYSE, 10.53, 2013-02-02\n WCG, NYSE, 0.00, 2013-02-14\n HEP, NYSE, -2.63, 2013-02-22\n DPZ, NYSE, 8.47, 2013-03-01\n BDC, NYSE, 9.86, 2013-02-08\n EGY, NYSE, -171.43, 2013-03-15\n LPL, NYSE, 2.63, 2013-02-22\n ENS, NYSE, 12.82, 2013-02-07\n BMR, NYSE, 5.88, 2013-02-06\n ACC, NYSE, 9.26, 2013-02-13\n KRG, NYSE, -9.09, 2013-02-08\n WLK, NYSE, 13.60, 2013-02-20\n EXR, NYSE, 4.65, 2013-02-22\n CNS, NYSE, 16.67, 2013-01-24\n IOC, NYSE, 264.29, 2013-02-28\n STON, NYSE, -233.33, 2013-03-16\n CPL, NYSE, 38.10, 2013-03-13\n TPGI, NYSE, -114.29, 2013-02-14\n SHO, NYSE, -3.33, 2013-02-20\n CUBE, NYSE, 5.00, 2013-02-22\n NRF, NYSE, 170.37, 2013-02-15\n BBW, NYSE, -68.29, 2013-02-15\n DLR, NYSE, 4.31, 2013-02-16\n NWE, NYSE, 2.63, 2013-02-15\n ORA, NYSE, 200.00, 2013-02-28\n NP, NYSE, 5.26, 2013-02-21\n SMA, NYSE, -21.05, 2013-02-22\n BBG, NYSE, 25.00, 2013-02-22\n BXC, NYSE, -163.16, 2013-02-14\n KNL, NYSE, 32.14, 2013-02-06\n LVS, NYSE, -8.47, 2013-01-31\n HLF, NYSE, 0.96, 2013-02-20\n MIC, NYSE, -20.41, 2013-02-21\n PHH, NYSE, -11.54, 2013-02-07\n CE, NYSE, 6.35, 2013-01-29\n EDR, NYSE, 0.00, 2013-02-20\n WTI, NYSE, 8.33, 2013-02-27\n ARC, NYSE, -100.00, 2013-03-01\n PBH, NYSE, 8.82, 2013-02-08\n HUN, NYSE, 0.00, 2013-02-13\n DLB, NYSE, 4.44, 2013-01-30\n DSX, NYSE, -33.33, 2013-03-15\n LAZ, NYSE, 84.85, 2013-02-08\n TGP, NYSE, 1.82, 2013-02-22\n TLP, NYSE, -43.48, 2013-03-13\n DRH, NYSE, 16.00, 2013-03-01\n HTGC, NYSE, 8.70, 2013-03-01\n KFN, NYSE, 5.26, 2013-02-06\n THS, NYSE, 0.00, 2013-02-22\n NSR, NYSE, -12.50, 2013-02-06\n WAL, NYSE, 0.00, 2013-01-25\n SLW, NYSE, 2.04, 2013-03-22\n MPW, NYSE, 0.00, 2013-02-08\nRDS.B, NYSE, 16.00, 2013-02-01\n GNK, NYSE, -24.71, 2013-02-21\n MFB, NYSE, 4.76, 2013-03-07\nRDS.A, NYSE, 9.95, 2013-02-01\n ITC, NYSE, 0.93, 2013-02-28\n FTK, NYSE, -158.82, 2013-03-14\n PIKE, NYSE, 168.00, 2013-02-06\n ALJ, NYSE, 0.00, 2013-03-07\n DRC, NYSE, -4.55, 2013-03-01\n STN, NYSE, 8.06, 2013-02-22\n SSW, NYSE, -6.90, 2013-03-06\n CF, NYSE, 3.41, 2013-02-20\n HPY, NYSE, 0.00, 2013-02-08\n ACCO, NYSE, 0.00, 2013-02-14\n ROC, NYSE, -6.25, 2013-02-20\n WPZ, NYSE, -28.57, 2013-02-20\n LCC, NYSE, 44.44, 2013-01-24\n GLP, NYSE, 58.82, 2013-03-15\n AMP, NYSE, 15.54, 2013-01-31\n DHT, NYSE, 108.33, 2013-01-30\n FNF, NYSE, 17.86, 2013-02-20\n NM, NYSE, 20.00, 2013-02-20\n CCO, NYSE, 25.00, 2013-02-20\n BWP, NYSE, 0.00, 2013-02-12\n ICE, NYSE, 5.14, 2013-02-07\n BKD, NYSE, -57.14, 2013-02-12\n AAV, NYSE, 350.00, 2013-03-28\n BAS, NYSE, -42.11, 2013-02-20\n CPA, NYSE, -9.87, 2013-02-07\n LYV, NYSE, -147.06, 2013-02-27\n WNR, NYSE, 5.84, 2013-03-01\n CMG, NYSE, 0.00, 2013-02-06\n RGP, NYSE, -180.00, 2013-02-21\n KOP, NYSE, 11.86, 2013-02-15\n UAL, NYSE, -7.41, 2013-01-25\n ETE, NYSE, -90.91, 2013-02-21\n RSO, NYSE, -17.65, 2013-03-05\n XCO, NYSE, 6.25, 2013-02-21\n PAC, NYSE, 41.18, 2013-02-28\n NYX, NYSE, 10.26, 2013-02-06\n TDG, NYSE, 51.65, 2013-02-05\n BMA, NYSE, 18.40, 2013-02-15\n THI, NYSE, -2.82, 2013-02-22\n BTE, NYSE, -40.48, 2013-03-08\n CNH, NYSE, 29.58, 2013-02-01\n GLA, NYSE, 67.44, 2013-02-14\n POR, NYSE, -9.52, 2013-02-23\n HIL, NYSE, -100.00, 2013-03-12\n HVB, NYSE, -20.00, 2013-02-01\n KS, NYSE, 0.00, 2013-02-14\n HK, NYSE, 0.00, 2013-03-01\n DCP, NYSE, 59.62, 2013-02-28\n DK, NYSE, 10.10, 2013-03-08\n CODI, NYSE, 14.81, 2013-03-07\n VG, NYSE, 25.00, 2013-02-14\n MA, NYSE, 1.46, 2013-02-01\n MWA, NYSE, -200.00, 2013-02-06\n KOG, NYSE, 14.29, 2013-03-01\n PWE, NYSE, -500.00, 2013-02-15\n PGTI, NYSE, 100.00, 2013-02-21\n AWH, NYSE, 16.23, 2013-02-14\n NSH, NYSE, -65.71, 2013-02-02\n WYN, NYSE, 5.00, 2013-02-07\n WNS, NYSE, 0.00, 2013-01-17\n AYR, NYSE, 36.84, 2013-02-22\n EVR, NYSE, 55.77, 2013-01-31\n HBI, NYSE, 7.00, 2013-02-06\n WU, NYSE, 20.00, 2013-02-13\n OC, NYSE, -31.25, 2013-02-21\n MR, NYSE, 2.08, 2013-02-26\n DAC, NYSE, -21.43, 2013-02-12\n AWI, NYSE, 3.03, 2013-02-20\n SUSS, NYSE, 444.44, 2013-02-28\n DEI, NYSE, 0.00, 2013-02-13\n OB, NYSE, -200.00, 2013-02-06\n SBH, NYSE, -5.88, 2013-02-08\n EBS, NYSE, -4.35, 2013-03-08\n KBR, NYSE, 122.22, 2013-02-21\n AER, NYSE, 30.95, 2013-02-21\n NOA, NYSE, -11.11, 2013-02-06\n SPR, NYSE, -2.27, 2013-02-13\n ANW, NYSE, 0.00, 2013-02-28\n DCT, NYSE, 10.00, 2013-02-08\n SE, NYSE, -3.03, 2013-02-06\n TOO, NYSE, 16.67, 2013-02-22\n TSL, NYSE, -39.77, 2013-02-27\n TWC, NYSE, 1.95, 2013-02-01\n MVO, NYSE, -5.06, 2013-03-15\n CO, NYSE, 40.00, 2013-02-27\n EXK, NYSE, -45.83, 2013-03-13\n EIG, NYSE, -25.00, 2013-02-28\n HF, NYSE, 21.62, 2013-03-07\n CEL, NYSE, 34.78, 2013-03-05\n FIG, NYSE, 53.85, 2013-02-28\n NGLS, NYSE, 0.00, 2013-02-15\n TCAP, NYSE, 3.64, 2013-03-07\n GFA, NYSE, -483.33, 2013-03-12\n BR, NYSE, -5.56, 2013-02-08\n SCR, NYSE, 85.71, 2013-03-08\n CNK, NYSE, -12.82, 2013-02-21\n DAL, NYSE, 0.00, 2013-01-23\n ORN, NYSE, 250.00, 2013-03-01\n ACM, NYSE, 9.09, 2013-02-06\n JMP, NYSE, 62.50, 2013-02-14\n SLH, NYSE, 1.69, 2013-02-08\n CLR, NYSE, 16.85, 2013-02-28\n BGS, NYSE, -17.95, 2013-02-15\n STAR, NYSE, 12.50, 2013-02-27\n YGE, NYSE, -74.07, 2013-03-05\n DFS, NYSE, -9.40, 2013-03-06\n TEL, NYSE, 1.56, 2013-01-24\n BX, NYSE, 25.53, 2013-02-01\n SEP, NYSE, 8.11, 2013-02-06\n BZ, NYSE, -30.00, 2013-02-27\n PPO, NYSE, -28.26, 2013-02-21\n PRO, NYSE, 25.00, 2013-02-13\n WBC, NYSE, 13.68, 2013-02-16\n DHX, NYSE, 7.14, 2013-01-31\n PMC, NYSE, 13.79, 2013-02-08\n HGG, NYSE, 0.00, 2013-02-01\n OWW, NYSE, -14.29, 2013-02-15\n VR, NYSE, 35.58, 2013-02-01\n CXO, NYSE, -5.88, 2013-02-21\n G, NYSE, 4.76, 2013-02-08\n EJ, NYSE, 160.00, 2013-03-13\n WX, NYSE, 32.00, 2013-03-08\n CMLP, NYSE, -50.00, 2013-02-06\n VMW, NYSE, -5.56, 2013-01-29\n CZZ, NYSE, 63.64, 2013-02-08\n CGA, NYSE, -3.23, 2013-02-09\n TDC, NYSE, 5.71, 2013-02-08\n FLY, NYSE, 137.65, 2013-03-08\n DUF, NYSE, 6.25, 2013-02-26\n MAIN, NYSE, 12.00, 2013-03-08\n REN, NYSE, -50.00, 2013-03-08\n TGH, NYSE, 9.57, 2013-02-13\n DFT, NYSE, -5.00, 2013-02-07\n RF, NYSE, 10.00, 2013-01-23\n PZN, NYSE, -22.22, 2013-02-13\n LL, NYSE, 19.05, 2013-02-21\n NMM, NYSE, 0.00, 2013-01-25\n OZM, NYSE, 5.48, 2013-02-08\n ES, NYSE, -5.08, 2013-02-20\n MSCI, NYSE, -1.89, 2013-02-08\n ARR, NYSE, -18.52, 2013-02-23\n KW, NYSE, 275.00, 2013-03-13\n GTS, NYSE, -10.17, 2013-02-07\n FOR, NYSE, 222.22, 2013-02-14\n LRN, NYSE, 4.35, 2013-02-06\n TNK, NYSE, -125.00, 2013-02-22\n N, NYSE, 21.43, 2013-02-01\n DAN, NYSE, 5.56, 2013-02-22\n BIP, NYSE, 12.07, 2013-02-09\n CPN, NYSE, -500.00, 2013-02-14\n SOL, NYSE, 2.70, 2013-03-15\n PM, NYSE, 1.64, 2013-02-08\n HI, NYSE, 7.89, 2013-02-05\n V, NYSE, 2.25, 2013-02-07\n IPI, NYSE, 0.00, 2013-02-14\n AWK, NYSE, -14.29, 2013-02-27\n HTS, NYSE, 37.84, 2013-02-13\n DPS, NYSE, -4.71, 2013-02-14\n CFX, NYSE, 7.69, 2013-02-07\n WES, NYSE, -27.91, 2013-02-28\n SB, NYSE, -10.00, 2013-02-21\n LO, NYSE, 3.95, 2013-02-14\n LPS, NYSE, 10.45, 2013-02-08\n FF, NYSE, -31.82, 2013-03-19\n NNA, NYSE, 150.00, 2013-02-13\n EPB, NYSE, 14.55, 2013-01-17\n JBT, NYSE, 3.23, 2013-03-07\n DL, NYSE, 33.33, 2013-02-27\n RAX, NYSE, -4.55, 2013-02-13\n HCI, NYSE, 67.61, 2013-03-06\n EC, NYSE, -20.47, 2013-02-16\n CLW, NYSE, 10.53, 2013-02-21\n MJN, NYSE, 5.88, 2013-02-01\n EPC, NYSE, 1.85, 2013-02-01\n BPI, NYSE, -3.33, 2013-03-13\n RST, NYSE, 55.56, 2013-03-01\n DGI, NYSE, 92.31, 2013-02-27\n SWI, NYSE, 10.34, 2013-02-05\n CYS, NYSE, -46.15, 2013-02-07\n IVR, NYSE, 20.31, 2013-02-06\n BUD, NYSE, -5.08, 2013-02-28\n PMT, NYSE, -2.35, 2013-02-08\n STWD, NYSE, 15.38, 2013-02-28\n CFN, NYSE, -16.98, 2013-02-09\n SPB, NYSE, 71.43, 2013-02-07\n ARI, NYSE, -10.34, 2013-02-28\n CLNY, NYSE, -13.89, 2013-03-07\n ART, NYSE, 300.00, 2013-02-15\n SEM, NYSE, 12.00, 2013-02-22\n BSBR, NYSE, 578.57, 2013-03-28\n DOLE, NYSE, -6100.00, 2013-03-13\n VSI, NYSE, 0.00, 2013-02-27\n TWO, NYSE, -15.15, 2013-02-07\n CVE, NYSE, -14.29, 2013-02-15\n H, NYSE, 81.82, 2013-02-14\n LEA, NYSE, 7.25, 2013-02-02\n CLD, NYSE, 8.00, 2013-02-14\n AOL, NYSE, 7.50, 2013-02-09\n CHSP, NYSE, 5.13, 2013-02-22\n PEB, NYSE, 0.00, 2013-02-22\n CIT, NYSE, 60.94, 2013-01-30\n KAR, NYSE, -4.55, 2013-02-21\n CIE, NYSE, -66.67, 2013-02-27\n TMH, NYSE, 8.33, 2013-02-06\n KRA, NYSE, -300.00, 2013-02-28\n SYA, NYSE, -29.41, 2013-02-05\n TRNO, NYSE, -162.50, 2013-02-16\n PDM, NYSE, -2.70, 2013-02-08\n GNRC, NYSE, 26.09, 2013-02-15\n ACW, NYSE, -2.17, 2013-03-07\n BALT, NYSE, -11.76, 2013-02-21\n ST, NYSE, 2.17, 2013-01-31\n SEMG, NYSE, 55.56, 2013-03-01\n CALX, NYSE, 20.00, 2013-02-06\n MXL, NYSE, -57.14, 2013-02-06\n STNG, NYSE, -60.00, 2013-02-26\n PRI, NYSE, -1.43, 2013-02-08\n SDRL, NYSE, -93.65, 2013-03-01\n CLDT, NYSE, 0.00, 2013-02-20\n EXL, NYSE, 0.00, 2013-02-28\n LYB, NYSE, -0.88, 2013-02-02\n PNG, NYSE, 7.14, 2013-02-07\n PLOW, NYSE, -25.00, 2013-03-12\n SIX, NYSE, 198.00, 2013-02-21\n NKA, NYSE, 1066.67, 2013-02-01\n RRTS, NYSE, 0.00, 2013-02-07\n JKS, NYSE, -332.48, 2013-04-11\n CODE, NYSE, -13.64, 2013-01-30\n FAF, NYSE, 44.64, 2013-02-22\n QEP, NYSE, 3.13, 2013-02-20\n OAS, NYSE, 6.52, 2013-02-26\n VPG, NYSE, 15.38, 2013-02-13\n HPP, NYSE, 9.52, 2013-03-07\n FN, NYSE, 9.09, 2013-02-05\n ECT, NYSE, 65.85, 2013-03-16\n QUAD, NYSE, -6.67, 2013-03-05\n KKR, NYSE, 54.84, 2013-02-08\n RLD, NYSE, 20.00, 2013-02-07\n AMRC, NYSE, 44.44, 2013-03-19\n GDOT, NYSE, 50.00, 2013-02-01\n AT, NYSE, -160.00, 2013-03-01\n ENV, NYSE, 0.00, 2013-02-15\n IL, NYSE, 200.00, 2013-02-22\n WSR, NYSE, -12.00, 2013-03-13\n SFUN, NYSE, 35.71, 2013-02-09\n COR, NYSE, 5.00, 2013-02-23\n VC, NYSE, 20.62, 2013-03-01\n CCSC, NYSE, -20.00, 2013-03-07\n CCG, NYSE, 0.00, 2013-02-27\n EFC, NYSE, -72.73, 2013-02-14\n TOWR, NYSE, 183.33, 2013-02-16\n CHMT, NYSE, -53.13, 2013-02-26\n HBM, NYSE, 200.00, 2013-02-21\n EXAM, NYSE, 55.56, 2013-02-28\n XUE, NYSE, 7.69, 2013-02-28\n CMRE, NYSE, 6.67, 2013-01-24\n NOAH, NYSE, 20.00, 2013-02-26\n IPHI, NYSE, -40.00, 2013-02-05\n BITA, NYSE, 33.33, 2013-03-08\n BAH, NYSE, 11.11, 2013-01-31\n GM, NYSE, -2.04, 2013-02-15\n TROX, NYSE, -60.00, 2013-02-21\n DANG, NYSE, 20.00, 2013-03-08\n YOKU, NYSE, 9.09, 2013-03-01\n FRC, NYSE, -16.44, 2013-01-17\n RFP, NYSE, 52.38, 2013-02-13\n ISS, NYSE, 15.38, 2013-03-09\n WD, NYSE, -14.29, 2013-03-07\n FLT, NYSE, 10.00, 2013-02-08\n GCAP, NYSE, -325.00, 2013-03-13\n FRF, NYSE, -25.93, 2013-03-29\n SWFT, NYSE, 46.15, 2013-01-24\n AG, NYSE, -10.34, 2013-02-27\n QRE, NYSE, -174.07, 2013-03-07\n AAT, NYSE, 11.76, 2013-02-20\n MCC, NYSE, 5.41, 2013-02-07\n NLSN, NYSE, 3.51, 2013-02-12\n AGRO, NYSE, -71.43, 2013-03-22\n BKU, NYSE, 27.08, 2013-01-30\n INXN, NYSE, -38.89, 2013-02-28\n NPTN, NYSE, 16.67, 2013-02-22\n INN, NYSE, 25.00, 2013-02-27\n KMI, NYSE, -5.88, 2013-01-17\n HCA, NYSE, 9.64, 2013-02-05\n MX, NYSE, 135.21, 2013-01-31\n HII, NYSE, 8.89, 2013-02-28\n QIHU, NYSE, 175.00, 2013-03-06\n APO, NYSE, 119.48, 2013-02-09\n GNC, NYSE, 8.70, 2013-02-15\n SDT, NYSE, 11.48, 2013-03-16\n UAN, NYSE, 16.67, 2013-02-28\n ARCO, NYSE, 5.00, 2013-03-09\n ELLI, NYSE, 36.36, 2013-02-15\n TMS, NYSE, -23.81, 2013-02-15\n SQNS, NYSE, -16.00, 2013-02-08\n STAG, NYSE, 17.24, 2013-02-21\n AL, NYSE, 8.33, 2013-03-01\n TLLP, NYSE, 10.42, 2013-02-12\n RENN, NYSE, 14.29, 2013-03-12\n NQ, NYSE, 800.00, 2013-03-07\n THR, NYSE, -14.29, 2013-02-08\n KOS, NYSE, 125.00, 2013-02-26\n RLJ, NYSE, 4.35, 2013-02-28\n NGL, NYSE, -7.41, 2013-02-16\n FENG, NYSE, 100.00, 2013-03-07\n LNKD, NYSE, 900.00, 2013-02-08\n NMFC, NYSE, 5.88, 2013-03-07\n ACTV, NYSE, 5.26, 2013-02-15\n TAOM, NYSE, 700.00, 2013-03-15\n RATE, NYSE, -60.00, 2013-02-13\n VHS, NYSE, -22.22, 2013-01-31\n MPC, NYSE, 8.13, 2013-01-31\n MITT, NYSE, -1.16, 2013-03-06\n OILT, NYSE, 0.00, 2013-03-07\n SXC, NYSE, 14.71, 2013-02-06\n AMTG, NYSE, -8.57, 2013-03-07\n AMID, NYSE, -2500.00, 2013-04-17\n WAIR, NYSE, -7.41, 2013-01-30\n PER, NYSE, -7.58, 2013-03-02\n PPP, NYSE, -44.44, 2013-02-22\n FNV, NYSE, -8.33, 2013-03-20\n FSM, NYSE, 16.67, 2013-03-21\n FBHS, NYSE, 4.55, 2013-02-01\n XLS, NYSE, 4.44, 2013-03-02\n XYL, NYSE, 2.17, 2013-02-08\n NDRO, NYSE, 4.76, 2013-03-19\n RNF, NYSE, -33.33, 2013-03-20\n VAC, NYSE, 25.53, 2013-02-22\n CHKR, NYSE, -7.25, 2013-03-16\n PACD, NYSE, 14.29, 2013-02-28\n INVN, NYSE, 0.00, 2013-01-24\n DLPH, NYSE, 3.45, 2013-02-06\n MN, NYSE, 0.00, 2013-02-14\n RRMS, NYSE, -25.00, 2013-03-01\n WPX, NYSE, -400.00, 2013-03-01\n LPI, NYSE, 0.00, 2013-03-13\n SN, NYSE, -80.00, 2013-03-07\n KORS, NYSE, 60.00, 2013-02-13\n BCEI, NYSE, -7.89, 2013-03-15\n BOXC, NYSE, 4.78, 2013-01-29\n PVG, NYSE, -25.00, 2013-03-06\n POST, NYSE, 30.43, 2013-02-08\n SLCA, NYSE, 32.26, 2013-02-27\n MTDR, NYSE, -116.67, 2013-03-14\n GWAY, NYSE, -200.00, 2013-02-13\n EPAM, NYSE, -10.81, 2013-02-28\n RNDY, NYSE, 5.56, 2013-03-01\n CPAC, NYSE, -13.33, 2013-02-21\n PRLB, NYSE, 7.69, 2013-02-14\n YELP, NYSE, -50.00, 2013-02-07\n NSM, NYSE, 7.58, 2013-03-08\n ALSN, NYSE, 257.14, 2013-02-20\n DWRE, NYSE, 350.00, 2013-02-15\n VNTV, NYSE, 16.13, 2013-02-21\n ET, NYSE, 34.78, 2013-02-22\n VIPS, NYSE, 1100.00, 2013-02-22\n VCRA, NYSE, -33.33, 2013-02-28\n RM, NYSE, -1.89, 2013-02-28\n BNNY, NYSE, 0.00, 2013-02-12\n MM, NYSE, 200.00, 2013-02-20\n RXN, NYSE, -15.00, 2013-02-12\n GLOG, NYSE, -20.00, 2013-02-28\n PBA, NYSE, 44.44, 2013-03-02\n RPAI, NYSE, 15.79, 2013-02-20\n OAK, NYSE, 63.33, 2013-02-15\n FET, NYSE, -3.45, 2013-02-15\n MRC, NYSE, 17.02, 2013-02-22\n PSX, NYSE, 21.18, 2013-01-31\n TUMI, NYSE, 0.00, 2013-03-21\n ACRE, NYSE, -38.10, 2013-04-02\n EVER, NYSE, 17.24, 2013-01-31\n PDH, NYSE, -13.79, 2013-02-07\n WMC, NYSE, 3.23, 2013-04-03\n WAGE, NYSE, 0.00, 2013-02-21\n HTA, NYSE, 0.00, 2013-02-21\n ALEX, NYSE, 42.86, 2013-02-20\n BKW, NYSE, 53.33, 2013-02-16\n EQM, NYSE, 51.22, 2013-01-25\n NOW, NYSE, 38.46, 2013-01-31\n EGL, NYSE, 18.46, 2013-03-13\n NGVC, NYSE, 25.00, 2013-02-01\n NTI, NYSE, -25.00, 2013-03-14\n AMRE, NYSE, 4.35, 2013-02-20\n GMED, NYSE, 15.79, 2013-02-28\n MANU, NYSE, -46.43, 2013-02-15\n HCLP, NYSE, -28.57, 2013-02-01\n ADT, NYSE, 4.76, 2013-01-31\n TRLA, NYSE, -20.00, 2013-02-13\n SRC, NYSE, 8.82, 2013-02-28\n NBHC, NYSE, -14.29, 2013-01-29\n BSMX, NYSE, -4.17, 2013-02-19\n HY, NYSE, 14.53, 2013-02-20\n SMLP, NYSE, 40.00, 2013-03-14\n DYN, NYSE, -1714.29, 2013-03-15\n LXFR, NYSE, 43.75, 2013-03-12\n LOCK, NYSE, 16.67, 2013-02-21\n JMI, NYSE, 97.78, 2013-03-22\n BERY, NYSE, -40.00, 2013-02-01\n FLTX, NYSE, 0.00, 2013-02-21\n ANFI, NYSE, 30.77, 2013-02-26\n SSTK, NYSE, -100.00, 2013-02-22\n SDLP, NYSE, 90.91, 2013-03-01\n MPLX, NYSE, -25.00, 2013-01-31\n WWAV, NYSE, 5.88, 2013-02-14\n SXE, NYSE, -4121.43, 2013-03-29\n DKL, NYSE, -5.56, 2013-03-06\n RKUS, NYSE, -20.00, 2013-02-13\n WGP, NYSE, 57.14, 2013-02-28\n PBF, NYSE, -92.31, 2013-03-01\n SBY, NYSE, 0.00, 2013-03-01\n RIOM, NYSE, 77.78, 2013-03-29\n BFAM, NYSE, -1186.36, 2013-03-27\n ZTS, NYSE, -79.41, 2013-03-29\n DDC, NYSE, -39.13, 2013-04-04\n ABM, NYSE, 18.18, 2013-03-05\n ANN, NYSE, 0.00, 2013-03-09\n BBY, NYSE, 5.81, 2013-03-02\n BF.B, NYSE, 4.29, 2013-03-07\n BKE, NYSE, 2.40, 2013-03-15\n BNS, NYSE, -3.17, 2013-03-06\n BRC, NYSE, -22.45, 2013-02-22\n CATO, NYSE, -3.57, 2013-03-22\n COO, NYSE, 2.50, 2013-03-08\n CPB, NYSE, 6.06, 2013-02-16\n CFI, NYSE, 10.34, 2013-02-28\n DCI, NYSE, -10.53, 2013-02-26\n DDS, NYSE, -1.03, 2013-02-26\n DE, NYSE, 17.02, 2013-02-14\n DY, NYSE, 50.00, 2013-02-27\n EV, NYSE, -3.85, 2013-02-21\n ENZ, NYSE, -133.33, 2013-03-13\n ESL, NYSE, 13.11, 2013-03-01\nFCE.A, NYSE, 9.09, 2013-03-28\n M, NYSE, 3.54, 2013-02-27\n GCO, NYSE, 1.41, 2013-03-09\n GPS, NYSE, 2.82, 2013-03-01\n HD, NYSE, 4.69, 2013-02-27\n HEI, NYSE, -12.50, 2013-02-21\n HNZ, NYSE, 10.00, 2013-02-28\n HOV, NYSE, -66.67, 2013-03-07\n HRB, NYSE, -633.33, 2013-03-08\n HRL, NYSE, -2.04, 2013-02-22\n HPQ, NYSE, 15.49, 2013-02-22\n JCP, NYSE, -926.32, 2013-02-28\n KR, NYSE, 25.71, 2013-03-08\n KSS, NYSE, 1.84, 2013-03-01\n LB, NYSE, 1.15, 2013-02-28\n LOW, NYSE, 13.04, 2013-02-26\n LZB, NYSE, 16.67, 2013-02-20\n MDT, NYSE, 2.20, 2013-02-20\n MEI, NYSE, 350.00, 2013-03-01\n MPR, NYSE, 0.00, 2013-03-22\n NAV, NYSE, 14.11, 2013-03-08\n JWN, NYSE, 4.48, 2013-02-22\n ODC, NYSE, -35.42, 2013-03-12\n OXM, NYSE, -5.80, 2013-04-03\n PBY, NYSE, -225.00, 2013-04-16\n PLL, NYSE, 8.96, 2013-02-28\n PNY, NYSE, 1.72, 2013-03-07\n PVH, NYSE, 6.67, 2013-03-28\n THO, NYSE, 0.00, 2013-03-08\n TIF, NYSE, 2.19, 2013-03-23\n TJX, NYSE, 1.23, 2013-02-28\n TOL, NYSE, -81.82, 2013-02-21\n TTC, NYSE, 23.26, 2013-02-22\n VAL, NYSE, -9.09, 2013-02-13\n JW.A, NYSE, 13.41, 2013-03-08\n WMT, NYSE, 6.37, 2013-02-22\n WSM, NYSE, 4.69, 2013-03-20\n FL, NYSE, -11.11, 2013-03-09\n CHS, NYSE, 0.00, 2013-03-01\n REX, NYSE, -800.00, 2013-03-29\n BKS, NYSE, -136.00, 2013-03-01\n CAL, NYSE, 75.00, 2013-03-16\n SIG, NYSE, 1.44, 2013-03-29\n ZLC, NYSE, -1.92, 2013-02-22\n AEO, NYSE, 0.00, 2013-03-07\n FGP, NYSE, -10.00, 2013-03-08\n BMO, NYSE, 1.37, 2013-02-27\n RY, NYSE, 0.75, 2013-03-01\n GEF, NYSE, -13.21, 2013-02-28\n MOV, NYSE, 70.83, 2013-03-22\n SKS, NYSE, 13.33, 2013-02-27\n TD, NYSE, 1.55, 2013-03-01\n ANF, NYSE, 14.51, 2013-02-23\n CIEN, NYSE, 116.00, 2013-03-08\n KMG, NYSE, -17.65, 2013-03-09\n IRET, NYSE, -5.88, 2013-03-13\n CM, NYSE, 0.00, 2013-03-01\nHEI.A, NYSE, -18.60, 2013-02-21\n UBA, NYSE, 13.04, 2013-03-07\n KFY, NYSE, 6.90, 2013-03-07\n TGT, NYSE, 12.24, 2013-02-28\n KKD, NYSE, 0.00, 2013-03-15\n NDZ, NYSE, 0.00, 2013-03-06\n MVC, NYSE, -20.00, 2013-03-08\n CBK, NYSE, 52.17, 2013-03-14\n SJM, NYSE, 7.30, 2013-02-16\n BIG, NYSE, 5.03, 2013-03-07\n IDT, NYSE, -7.14, 2013-03-08\n JOY, NYSE, 14.91, 2013-02-28\n SSI, NYSE, -5.93, 2013-03-13\n GME, NYSE, 3.35, 2013-03-29\n DKS, NYSE, -3.74, 2013-03-12\n A, NYSE, -5.97, 2013-02-15\n MTN, NYSE, -3.51, 2013-03-07\n GES, NYSE, 10.47, 2013-03-21\n CRM, NYSE, 66.67, 2013-03-01\n NWY, NYSE, 25.00, 2013-03-22\n PAY, NYSE, 8.11, 2013-03-06\n DSW, NYSE, -4.17, 2013-03-20\n NX, NYSE, -183.33, 2013-03-08\n AGX, NYSE, 15.00, 2013-04-11\n CMD, NYSE, -5.26, 2013-03-08\n DG, NYSE, 7.78, 2013-03-26\n EXPR, NYSE, 1.35, 2013-03-14\n P, NYSE, 0.00, 2013-03-07\n GWRE, NYSE, 181.82, 2013-02-27\n BLOX, NYSE, -20.00, 2013-02-22\n TLYS, NYSE, 6.67, 2013-03-21\n PANW, NYSE, -250.00, 2013-03-01\n WDAY, NYSE, 24.00, 2013-03-08\n RH, NYSE, 4.92, 2013-04-19\n AIR, NYSE, 4.55, 2013-03-20\n ATU, NYSE, -5.41, 2013-03-21\n AZO, NYSE, 0.84, 2013-02-27\n AZZ, NYSE, 2.04, 2013-04-09\n CAG, NYSE, -3.51, 2013-04-04\n CLC, NYSE, 2.17, 2013-03-21\n CMC, NYSE, -80.00, 2013-03-29\n KMX, NYSE, 0.00, 2013-04-11\n FC, NYSE, -27.27, 2013-04-05\n FDO, NYSE, -0.82, 2013-04-11\n FDX, NYSE, -10.87, 2013-03-21\n FUL, NYSE, -3.92, 2013-03-28\n GIS, NYSE, 12.28, 2013-03-21\n KBH, NYSE, 30.43, 2013-03-22\n LEN, NYSE, 100.00, 2013-03-21\n LNN, NYSE, 16.28, 2013-03-28\n LUB, NYSE, -100.00, 2013-03-21\n MKC, NYSE, 1.79, 2013-04-03\n RT, NYSE, 0.00, 2013-04-11\n MSM, NYSE, 0.00, 2013-04-11\n NKE, NYSE, 8.96, 2013-03-22\n ORCL, NYSE, -1.56, 2013-03-21\n PIR, NYSE, 0.00, 2013-04-12\n PKE, NYSE, -21.43, 2013-05-10\n RPM, NYSE, 16.67, 2013-04-05\n SVU, NYSE, -200.00, 2013-04-25\n TXI, NYSE, 25.00, 2013-03-28\n UNF, NYSE, 18.75, 2013-03-28\n WGO, NYSE, 37.50, 2013-03-29\n WOR, NYSE, 6.12, 2013-03-22\n JBL, NYSE, -2.17, 2013-03-21\n GBX, NYSE, 21.62, 2013-04-05\n DRI, NYSE, 0.99, 2013-03-23\n FDS, NYSE, -21.24, 2013-03-20\n SCS, NYSE, 0.00, 2013-03-28\n SJR, NYSE, 5.56, 2013-04-13\n RHT, NYSE, 19.05, 2013-03-28\n OMN, NYSE, -75.00, 2013-04-04\n MON, NYSE, 7.06, 2013-04-04\n GPN, NYSE, -1.14, 2013-04-03\n AYI, NYSE, 0.00, 2013-04-04\n CCL, NYSE, 100.00, 2013-03-16\n CUK, NYSE, 33.33, 2013-03-16\n STZ, NYSE, 4.44, 2013-04-11\n ACN, NYSE, 3.09, 2013-03-29\n SNX, NYSE, 1.15, 2013-03-28\n TAL, NYSE, 50.00, 2013-04-24\n IHS, NYSE, 11.90, 2013-03-22\n EDU, NYSE, 63.64, 2013-04-25\n KED, NYSE, -99.22, 2013-05-02\n CORR, NYSE, -9.09, 2013-05-11\n DFS, NYSE, 18.75, 2013-04-24\n ZEP, NYSE, 54.55, 2013-04-10\n MG, NYSE, -58.82, 2013-04-09\n MOS, NYSE, 5.62, 2013-03-28\n ABT, NYSE, 0.00, 2013-04-18\n ABX, NYSE, 6.98, 2013-04-25\n AB, NYSE, 8.57, 2013-05-02\n ACO, NYSE, -10.64, 2013-04-27\n ADM, NYSE, -5.88, 2013-05-01\n AEM, NYSE, -35.29, 2013-04-26\n AEP, NYSE, 0.00, 2013-04-27\n AES, NYSE, -14.29, 2013-05-10\n AET, NYSE, 8.70, 2013-05-01\n AFL, NYSE, 4.32, 2013-04-25\n AGCO, NYSE, 35.23, 2013-05-01\n HES, NYSE, 24.20, 2013-04-25\n AIG, NYSE, 52.27, 2013-05-03\n AIN, NYSE, 0.00, 2013-05-02\n AJG, NYSE, 33.33, 2013-05-01\n ALU, NYSE, -81.82, 2013-04-27\n MATX, NYSE, 31.25, 2013-05-07\n ALK, NYSE, 15.09, 2013-04-26\n ALX, NYSE, -2.56, 2013-05-07\n BEAM, NYSE, 18.52, 2013-05-03\n AME, NYSE, 3.92, 2013-04-26\n TWX, NYSE, 9.33, 2013-05-02\n AVD, NYSE, 47.50, 2013-05-03\n AMN, NYSE, 33.33, 2013-05-03\n AN, NYSE, 7.94, 2013-04-19\n AON, NYSE, 0.00, 2013-04-27\n APA, NYSE, -9.01, 2013-05-10\n APC, NYSE, 17.39, 2013-05-07\n APD, NYSE, 0.00, 2013-04-24\n APH, NYSE, 1.16, 2013-04-19\n ARG, NYSE, 0.88, 2013-05-03\n AAN, NYSE, -5.63, 2013-04-26\n ARW, NYSE, 3.49, 2013-05-02\n ASGN, NYSE, 94.44, 2013-04-25\n ASH, NYSE, 14.10, 2013-04-25\n ASR, NYSE, -13.25, 2013-04-23\n GAS, NYSE, -2.96, 2013-05-01\n ATO, NYSE, 1.63, 2013-05-02\n ATW, NYSE, 2.40, 2013-05-02\n AU, NYSE, -26.67, 2013-05-14\n AVP, NYSE, 85.71, 2013-05-01\n AVT, NYSE, 3.45, 2013-04-26\n AVY, NYSE, 3.51, 2013-04-25\n AXP, NYSE, 3.60, 2013-04-18\n B, NYSE, -11.11, 2013-04-27\n BA, NYSE, 17.69, 2013-04-25\n BAC, NYSE, -13.04, 2013-04-17\n BAX, NYSE, 0.96, 2013-04-19\n BC, NYSE, 22.58, 2013-04-26\n OMX, NYSE, -52.17, 2013-05-08\n BCE, NYSE, 10.00, 2013-05-10\n BCR, NYSE, 0.00, 2013-04-24\n BDX, NYSE, 6.67, 2013-05-03\n BEN, NYSE, 8.47, 2013-05-01\n BGG, NYSE, -17.59, 2013-04-20\n BHE, NYSE, 10.00, 2013-04-26\n BHI, NYSE, 4.84, 2013-04-20\n BID, NYSE, -175.00, 2013-05-10\n BIO, NYSE, -38.18, 2013-05-08\n BK, NYSE, 9.62, 2013-04-18\n BKH, NYSE, 19.18, 2013-05-03\n WRB, NYSE, 0.00, 2013-04-24\n BLC, NYSE, 6.67, 2013-04-26\n BLL, NYSE, -9.38, 2013-04-26\n BLX, NYSE, -21.82, 2013-04-18\n BMI, NYSE, -58.33, 2013-04-17\n BMS, NYSE, -1.85, 2013-04-26\n BMY, NYSE, 0.00, 2013-04-26\n BOH, NYSE, -6.90, 2013-04-23\n BXS, NYSE, 4.76, 2013-04-23\n BPL, NYSE, 19.44, 2013-05-04\nBRK.A, NYSE, 197.70, 2013-05-04\n BRO, NYSE, 5.13, 2013-04-16\n BSX, NYSE, 0.00, 2013-04-26\n MTRN, NYSE, -2.94, 2013-04-26\n CAI, NYSE, -1.32, 2013-04-25\n CAT, NYSE, -2.24, 2013-04-23\n CB, NYSE, 12.44, 2013-04-23\n CBI, NYSE, 15.49, 2013-05-03\n CBM, NYSE, 85.00, 2013-05-04\n CBU, NYSE, -1.96, 2013-04-24\n CBT, NYSE, -7.25, 2013-05-01\n CCC, NYSE, 20.00, 2013-05-07\n CCE, NYSE, 2.63, 2013-04-26\n C, NYSE, 9.32, 2013-04-16\n CCK, NYSE, 4.17, 2013-04-18\n CDE, NYSE, -74.07, 2013-05-10\n CDI, NYSE, -40.91, 2013-05-03\n CAH, NYSE, 26.32, 2013-05-03\n CFR, NYSE, -4.21, 2013-04-25\n CHD, NYSE, 5.56, 2013-05-03\n CPK, NYSE, 14.93, 2013-05-03\n CI, NYSE, 20.28, 2013-05-03\n CIA, NYSE, 0.00, 2013-05-03\n CKH, NYSE, -156.12, 2013-04-30\n CL, NYSE, 0.00, 2013-04-26\n CLF, NYSE, 87.50, 2013-04-25\n CLH, NYSE, 25.81, 2013-05-02\n CLX, NYSE, -5.66, 2013-05-02\n CMA, NYSE, 4.48, 2013-04-17\n CMO, NYSE, 3.33, 2013-04-25\n CRK, NYSE, -11.36, 2013-04-30\n CMS, NYSE, 15.22, 2013-04-26\n CNA, NYSE, 21.13, 2013-05-01\n CNW, NYSE, -29.63, 2013-05-02\n CHG, NYSE, 19.00, 2013-05-10\n CNL, NYSE, -8.33, 2013-04-30\n COG, NYSE, -20.00, 2013-04-25\n COT, NYSE, -100.00, 2013-05-02\n CP, NYSE, 2.54, 2013-04-25\n CPF, NYSE, 105.00, 2013-04-27\n CQB, NYSE, 28.57, 2013-05-08\n CR, NYSE, -0.95, 2013-04-23\nCRD.B, NYSE, -29.17, 2013-05-09\n CRS, NYSE, -9.21, 2013-04-26\n CSC, NYSE, 32.29, 2013-05-16\n CSL, NYSE, 0.00, 2013-04-25\n CTB, NYSE, 31.82, 2013-05-10\n CTL, NYSE, 10.14, 2013-05-09\n CTS, NYSE, 16.67, 2013-04-24\n CUB, NYSE, 52.24, 2013-05-03\n CMI, NYSE, -22.58, 2013-05-01\n CUZ, NYSE, -8.33, 2013-05-09\n CVC, NYSE, -185.71, 2013-05-10\n CVH, NYSE, 26.58, 2013-05-02\n CW, NYSE, 28.21, 2013-05-02\n CWT, NYSE, -200.00, 2013-05-02\n CX, NYSE, -140.00, 2013-04-27\n CYN, NYSE, -2.17, 2013-04-19\n D, NYSE, -7.78, 2013-04-26\n DBD, NYSE, -125.00, 2013-05-01\n DCO, NYSE, -18.60, 2013-05-07\n DD, NYSE, 1.30, 2013-04-24\n CVA, NYSE, -61.54, 2013-04-18\n DHR, NYSE, -1.32, 2013-04-19\n DIS, NYSE, 2.60, 2013-05-08\n DLX, NYSE, 3.41, 2013-04-26\n DNB, NYSE, 2.26, 2013-05-03\n RRD, NYSE, 12.12, 2013-04-26\n DOV, NYSE, 1.85, 2013-04-18\n DOW, NYSE, 15.00, 2013-04-26\n DRE, NYSE, 0.00, 2013-04-25\n DHI, NYSE, 60.00, 2013-04-27\n UFS, NYSE, -35.37, 2013-04-26\n DTE, NYSE, 30.10, 2013-04-27\n DUK, NYSE, -1.92, 2013-05-04\n DVN, NYSE, 17.86, 2013-05-02\n DV, NYSE, 8.43, 2013-04-24\n EAT, NYSE, 4.35, 2013-04-24\n ECL, NYSE, 3.45, 2013-05-01\n ED, NYSE, 4.85, 2013-05-03\n EDE, NYSE, 11.11, 2013-04-26\n EFX, NYSE, 0.00, 2013-04-25\n EGN, NYSE, -7.32, 2013-04-30\n EGP, NYSE, -1.30, 2013-04-19\n ELP, NYSE, 0.00, 2013-05-17\n ELY, NYSE, 65.00, 2013-04-26\n EMC, NYSE, 3.23, 2013-04-25\n EMR, NYSE, -1.28, 2013-05-08\n EOG, NYSE, 59.29, 2013-05-07\n EQT, NYSE, 26.92, 2013-04-26\n ESE, NYSE, -17.65, 2013-05-08\n ESV, NYSE, 5.43, 2013-04-30\n ETN, NYSE, 6.33, 2013-04-30\n ETR, NYSE, 0.00, 2013-04-26\n EXAR, NYSE, 16.67, 2013-05-01\n F, NYSE, 7.89, 2013-04-25\n CLGX, NYSE, 8.11, 2013-04-25\n FNB, NYSE, -4.76, 2013-04-24\n FCF, NYSE, 0.00, 2013-04-24\n FBP, NYSE, -122.22, 2013-05-04\n FICO, NYSE, -9.38, 2013-04-25\n FLO, NYSE, 6.98, 2013-05-17\n FMC, NYSE, 1.85, 2013-05-01\n FOE, NYSE, 66.67, 2013-04-25\n S, NYSE, 38.24, 2013-04-25\n NEE, NYSE, 10.89, 2013-05-01\n FRT, NYSE, 0.88, 2013-05-02\n FRX, NYSE, 47.06, 2013-04-24\n FSS, NYSE, 20.00, 2013-05-07\n FUN, NYSE, 24.32, 2013-05-09\n FUR, NYSE, 77.78, 2013-05-03\n GBL, NYSE, 17.86, 2013-05-08\n GVA, NYSE, -103.85, 2013-05-10\n BGC, NYSE, -319.23, 2013-05-01\n GD, NYSE, 8.00, 2013-04-25\n GE, NYSE, 11.43, 2013-04-20\n RHP, NYSE, 26.47, 2013-05-08\n AXLL, NYSE, -38.02, 2013-05-08\n GGG, NYSE, 15.07, 2013-04-25\n GHM, NYSE, 28.13, 2013-06-01\n GIB, NYSE, 14.58, 2013-05-01\n GLT, NYSE, 17.65, 2013-05-01\n GLW, NYSE, 15.38, 2013-04-25\n GSK, NYSE, 6.49, 2013-04-26\n GLF, NYSE, 175.00, 2013-04-30\n GNI, NYSE, -14.58, 2013-04-26\n GPC, NYSE, -6.06, 2013-04-20\n GRA, NYSE, 0.00, 2013-04-25\n GTY, NYSE, 0.00, 2013-05-03\n GWW, NYSE, 7.69, 2013-04-17\n HAE, NYSE, 4.35, 2013-05-02\n HAL, NYSE, 17.54, 2013-04-23\n HAR, NYSE, 25.40, 2013-05-03\n HVT, NYSE, 33.33, 2013-05-02\n HRC, NYSE, -2.00, 2013-04-25\n HCC, NYSE, 31.71, 2013-05-01\n HCN, NYSE, 1.11, 2013-05-08\n HCP, NYSE, 2.78, 2013-05-01\n HOG, NYSE, 2.06, 2013-04-26\n HE, NYSE, -12.82, 2013-05-09\n HL, NYSE, -66.67, 2013-05-11\n HMA, NYSE, 0.00, 2013-05-03\n HMC, NYSE, -28.57, 2013-04-27\n HMN, NYSE, 7.84, 2013-04-25\n HFC, NYSE, -7.91, 2013-05-08\n HOT, NYSE, 43.40, 2013-05-01\n HP, NYSE, 5.43, 2013-04-26\n HLS, NYSE, 14.29, 2013-04-26\n HRS, NYSE, 0.00, 2013-05-01\n HSC, NYSE, 50.00, 2013-05-10\n HSY, NYSE, 4.81, 2013-04-26\n HUBB, NYSE, -0.90, 2013-04-19\n HUM, NYSE, 51.12, 2013-05-02\n HXL, NYSE, 4.88, 2013-04-23\n IBM, NYSE, -1.96, 2013-04-19\n IDA, NYSE, 17.54, 2013-05-03\n IEX, NYSE, 4.23, 2013-04-23\n IFF, NYSE, 5.31, 2013-05-08\n DIN, NYSE, 12.87, 2013-05-03\n INT, NYSE, 14.06, 2013-05-01\n IP, NYSE, -12.16, 2013-05-03\n IPG, NYSE, -7.69, 2013-04-20\n IO, NYSE, -85.71, 2013-05-01\n IR, NYSE, 2.44, 2013-04-24\n IRF, NYSE, 27.50, 2013-04-30\n ITW, NYSE, 0.00, 2013-04-24\n JEC, NYSE, -2.44, 2013-04-30\n JNJ, NYSE, 2.13, 2013-04-17\n JNY, NYSE, 0.00, 2013-05-02\n K, NYSE, 0.00, 2013-05-03\n KAMN, NYSE, -2.94, 2013-04-30\n KDN, NYSE, 5.71, 2013-05-10\n KEX, NYSE, 2.15, 2013-04-25\n KEY, NYSE, 5.00, 2013-04-19\n KIM, NYSE, 3.13, 2013-05-01\n KMB, NYSE, 10.45, 2013-04-20\n KEM, NYSE, -133.33, 2013-05-10\n KMT, NYSE, -8.45, 2013-04-26\n KO, NYSE, 2.22, 2013-04-17\n KSU, NYSE, 2.30, 2013-04-20\n LDR, NYSE, -9.52, 2013-05-07\n LEG, NYSE, -13.16, 2013-04-26\n LLY, NYSE, 8.57, 2013-04-25\n LM, NYSE, -13.33, 2013-05-01\n LNC, NYSE, -7.27, 2013-05-02\n LPX, NYSE, 0.00, 2013-05-08\n LXU, NYSE, -110.53, 2013-05-07\n LTC, NYSE, -1.67, 2013-05-01\n L, NYSE, 1.19, 2013-04-30\n LUV, NYSE, 133.33, 2013-04-26\n LUX, NYSE, 7.14, 2013-05-02\n MKL, NYSE, 40.11, 2013-05-01\n MAN, NYSE, 40.00, 2013-04-20\n MTW, NYSE, -35.71, 2013-05-01\n SM, NYSE, 46.43, 2013-05-01\n MAS, NYSE, -7.14, 2013-04-30\n MTZ, NYSE, 12.50, 2013-05-03\n MCD, NYSE, -0.79, 2013-04-20\n MDC, NYSE, 73.08, 2013-05-03\n MDP, NYSE, 4.35, 2013-04-26\n MDR, NYSE, -40.00, 2013-05-09\n MDU, NYSE, 36.36, 2013-05-01\n MED, NYSE, 26.47, 2013-05-09\n CVS, NYSE, 5.06, 2013-05-02\n MFC, NYSE, 18.52, 2013-05-03\n MGA, NYSE, 13.57, 2013-05-11\n MGM, NYSE, 130.00, 2013-05-03\n MMC, NYSE, 4.29, 2013-05-03\n MMM, NYSE, -2.42, 2013-04-26\n MSA, NYSE, -20.31, 2013-04-25\n MNR, NYSE, -7.69, 2013-05-09\n MO, NYSE, 1.89, 2013-04-26\n MOD, NYSE, 5.88, 2013-05-31\nMOG.A, NYSE, -1.23, 2013-04-27\n MHK, NYSE, 3.57, 2013-05-03\n MSI, NYSE, -1.79, 2013-04-25\n MCY, NYSE, 46.81, 2013-04-30\n MRK, NYSE, 8.97, 2013-05-02\n MRO, NYSE, -28.17, 2013-05-08\n POWR, NYSE, 0.00, 2013-05-09\n MTG, NYSE, -60.00, 2013-05-01\n MTB, NYSE, 6.19, 2013-04-16\n MTX, NYSE, 0.00, 2013-04-26\n MUR, NYSE, 11.34, 2013-05-02\n MYE, NYSE, -11.11, 2013-04-25\n NBL, NYSE, 21.31, 2013-04-26\n NBR, NYSE, 13.79, 2013-04-24\n NE, NYSE, 3.51, 2013-04-18\n NEM, NYSE, -8.97, 2013-04-30\n NFG, NYSE, 7.37, 2013-05-03\n NHI, NYSE, 4.94, 2013-05-07\n NI, NYSE, -1.43, 2013-05-01\n NJR, NYSE, 3.16, 2013-05-03\n THC, NYSE, 17.86, 2013-05-01\n NNN, NYSE, 4.35, 2013-05-03\n NOC, NYSE, 12.14, 2013-04-25\n NR, NYSE, 5.88, 2013-04-26\n NSC, NYSE, 3.39, 2013-04-24\n NUE, NYSE, 4.00, 2013-04-19\n NVR, NYSE, -9.64, 2013-04-23\n NWL, NYSE, 9.38, 2013-05-04\n NWN, NYSE, -5.41, 2013-05-03\n NYT, NYSE, -20.00, 2013-04-26\n OCR, NYSE, 4.65, 2013-04-25\n OGE, NYSE, -32.35, 2013-05-03\n OHI, NYSE, 5.08, 2013-05-08\n OI, NYSE, 7.14, 2013-04-24\n OII, NYSE, 16.95, 2013-04-24\n OKE, NYSE, -6.90, 2013-05-01\n OLN, NYSE, 10.64, 2013-04-26\n BRS, NYSE, -1.94, 2013-05-23\n OMC, NYSE, 1.33, 2013-04-19\n OMI, NYSE, 4.76, 2013-04-24\n ORB, NYSE, 43.48, 2013-04-24\n ORI, NYSE, 600.00, 2013-04-26\n OSK, NYSE, 12.94, 2013-05-01\n OXY, NYSE, 7.64, 2013-04-26\n FCFS, NYSE, 0.00, 2013-04-18\n PBI, NYSE, 0.00, 2013-05-01\n PCG, NYSE, -10.00, 2013-05-03\n PCL, NYSE, 9.38, 2013-04-30\n PCP, NYSE, 1.81, 2013-05-10\n TPC, NYSE, 34.78, 2013-05-02\n PDS, NYSE, 14.29, 2013-04-26\n PEG, NYSE, 14.86, 2013-05-01\n PEI, NYSE, 4.76, 2013-04-23\n PEP, NYSE, 8.45, 2013-04-19\n PFE, NYSE, -1.82, 2013-05-01\n PG, NYSE, 3.13, 2013-04-25\n PGR, NYSE, -4.55, 2013-04-11\n PH, NYSE, 0.60, 2013-04-26\n PHM, NYSE, 31.25, 2013-04-26\n PKD, NYSE, 200.00, 2013-05-02\n PKY, NYSE, 15.38, 2013-05-07\n PNC, NYSE, 12.10, 2013-04-18\n PNM, NYSE, -10.00, 2013-05-07\n PNR, NYSE, 3.57, 2013-04-24\n PNW, NYSE, 175.00, 2013-05-04\n POM, NYSE, -4.00, 2013-05-04\n POT, NYSE, 3.28, 2013-04-26\n PPG, NYSE, 1.28, 2013-04-19\n PPL, NYSE, 0.00, 2013-05-03\n PRGO, NYSE, -1.39, 2013-05-08\n PL, NYSE, -4.30, 2013-05-07\n PSB, NYSE, 0.00, 2013-05-07\n WTR, NYSE, 7.41, 2013-05-02\n CSH, NYSE, 8.21, 2013-04-26\n PWR, NYSE, 24.14, 2013-05-03\n PX, NYSE, 0.00, 2013-04-25\n KWR, NYSE, 14.29, 2013-04-30\n R, NYSE, 1.28, 2013-04-24\n RBC, NYSE, -6.09, 2013-05-01\n RDC, NYSE, 5.77, 2013-05-02\n HTSI, NYSE, 11.67, 2013-05-03\n RES, NYSE, -33.33, 2013-04-25\n RGS, NYSE, -90.77, 2013-05-08\n RGR, NYSE, 15.38, 2013-04-30\n RHI, NYSE, -2.44, 2013-04-24\n RJF, NYSE, -9.33, 2013-04-25\n RLI, NYSE, -1.89, 2013-04-18\n ROG, NYSE, 0.00, 2013-05-01\n ROK, NYSE, 2.31, 2013-04-25\n ROL, NYSE, -5.88, 2013-04-25\n ROP, NYSE, 4.10, 2013-04-30\n RTI, NYSE, 20.00, 2013-05-01\n RTN, NYSE, 21.88, 2013-04-26\n RYL, NYSE, 43.33, 2013-04-25\n BSAC, NYSE, -21.74, 2013-04-26\n T, NYSE, 0.00, 2013-04-24\n SCG, NYSE, 7.77, 2013-04-26\n SCHW, NYSE, -6.25, 2013-04-16\n SCL, NYSE, -4.08, 2013-05-01\n SMG, NYSE, -19.60, 2013-05-07\n SEE, NYSE, -5.56, 2013-05-02\n SF, NYSE, 1.75, 2013-05-10\n SFE, NYSE, -46.15, 2013-04-26\n SHW, NYSE, 2.78, 2013-04-19\n SJI, NYSE, -8.43, 2013-05-04\n JOE, NYSE, -200.00, 2013-05-09\n SJW, NYSE, -12.50, 2013-04-25\n SLB, NYSE, 2.02, 2013-04-20\n HSH, NYSE, 9.38, 2013-05-03\n AOS, NYSE, 24.68, 2013-04-24\n SMP, NYSE, 31.25, 2013-05-04\n SNA, NYSE, 4.48, 2013-04-19\n PII, NYSE, 5.94, 2013-04-24\n SNV, NYSE, 0.00, 2013-04-24\n SO, NYSE, -3.92, 2013-04-25\n SON, NYSE, -5.66, 2013-04-19\n SPA, NYSE, -46.15, 2013-05-08\n TRV, NYSE, 14.93, 2013-04-24\n SR, NYSE, -3.36, 2013-05-01\n NVE, NYSE, 12.50, 2013-05-04\n SCI, NYSE, 21.74, 2013-04-25\n SSP, NYSE, 58.33, 2013-05-07\n STT, NYSE, 3.23, 2013-04-20\n STI, NYSE, 3.28, 2013-04-20\n STJ, NYSE, 0.00, 2013-04-18\n STL, NYSE, 7.14, 2013-04-23\n STR, NYSE, -2.38, 2013-05-01\n STE, NYSE, 6.06, 2013-05-08\n SYK, NYSE, 1.98, 2013-04-25\n SUN, NYSE, -7.32, 2013-05-09\n SUP, NYSE, 5.88, 2013-05-04\n SWK, NYSE, 7.29, 2013-04-26\n SWN, NYSE, 7.69, 2013-05-03\n SWX, NYSE, 0.61, 2013-05-04\n SWY, NYSE, -2.78, 2013-04-26\n SYY, NYSE, 16.67, 2013-05-07\n TAC, NYSE, -33.33, 2013-04-24\n TNC, NYSE, -17.14, 2013-04-23\n TCB, NYSE, -15.79, 2013-04-20\n TCO, NYSE, 7.14, 2013-04-26\n TDS, NYSE, 350.00, 2013-05-04\n TDW, NYSE, 55.74, 2013-05-22\n TDY, NYSE, 10.31, 2013-04-25\n TE, NYSE, 11.76, 2013-05-01\n TER, NYSE, 200.00, 2013-04-25\n TEVA, NYSE, 1.82, 2013-05-03\n TEX, NYSE, -17.86, 2013-04-25\n TFX, NYSE, 1.98, 2013-05-01\n TEN, NYSE, 10.77, 2013-04-30\n TKR, NYSE, 0.00, 2013-04-25\n TMK, NYSE, 1.46, 2013-04-24\n TMO, NYSE, 6.20, 2013-04-25\n TOT, NYSE, -2.38, 2013-04-27\n TM, NYSE, 80.67, 2013-05-09\n TR, NYSE, -11.76, 2013-04-25\n TRN, NYSE, 13.75, 2013-05-01\n TRP, NYSE, -8.93, 2013-04-27\n TSO, NYSE, 2.82, 2013-05-02\n TSS, NYSE, -2.94, 2013-04-24\n TTI, NYSE, -40.00, 2013-05-09\n TXT, NYSE, -14.89, 2013-04-18\n TYL, NYSE, 26.09, 2013-04-25\n TSN, NYSE, -21.74, 2013-05-07\n UDR, NYSE, 3.03, 2013-05-01\n UFI, NYSE, -43.75, 2013-04-25\n UAM, NYSE, 17.65, 2013-04-30\n UHS, NYSE, 5.17, 2013-04-25\n UIL, NYSE, 3.06, 2013-05-03\n UIS, NYSE, -145.61, 2013-04-24\n UNH, NYSE, 0.00, 2013-04-19\n KMPR, NYSE, 35.85, 2013-05-03\n UNM, NYSE, 2.56, 2013-05-02\n UNP, NYSE, 3.57, 2013-04-19\n UNT, NYSE, 6.98, 2013-05-08\n URS, NYSE, -14.29, 2013-05-08\n USG, NYSE, -88.89, 2013-04-25\n MUX, NYSE, -300.00, 2013-05-10\n USM, NYSE, 214.29, 2013-05-04\n USPH, NYSE, -3.12, 2013-05-10\n UTL, NYSE, -9.20, 2013-04-24\n UTX, NYSE, -1.54, 2013-04-24\n VMI, NYSE, 15.60, 2013-04-19\n VAR, NYSE, 2.97, 2013-04-25\n CBS, NYSE, 7.35, 2013-05-02\n VLO, NYSE, 16.83, 2013-05-01\n VMC, NYSE, -24.32, 2013-05-03\n VLY, NYSE, -11.11, 2013-04-25\n VNO, NYSE, -38.38, 2013-05-07\n VSH, NYSE, 63.64, 2013-05-01\n WTS, NYSE, -14.04, 2013-05-01\n WBS, NYSE, -2.22, 2013-04-16\n WEC, NYSE, 7.04, 2013-05-01\n WFC, NYSE, 5.75, 2013-04-13\n WG, NYSE, -2400.00, 2013-05-09\n WGL, NYSE, 19.05, 2013-05-02\n WHR, NYSE, 1.03, 2013-04-25\n WMB, NYSE, -8.33, 2013-05-08\n WNC, NYSE, 0.00, 2013-05-01\n TEG, NYSE, 10.69, 2013-05-02\n WR, NYSE, 33.33, 2013-05-09\n WRE, NYSE, -4.35, 2013-04-26\n WRI, NYSE, 4.35, 2013-05-01\n WPP, NYSE, 33.33, 2013-04-30\n WSO, NYSE, 18.18, 2013-04-19\n WST, NYSE, 1.16, 2013-05-03\n WWW, NYSE, 50.00, 2013-04-17\n WY, NYSE, 18.18, 2013-04-27\n X, NYSE, -84.21, 2013-05-01\n XL, NYSE, 38.81, 2013-05-03\n XOM, NYSE, 4.43, 2013-04-26\n XRX, NYSE, 12.50, 2013-04-24\n Y, NYSE, 53.96, 2013-05-07\n HRG, NYSE, 60.00, 2013-05-10\n CRY, NYSE, 28.57, 2013-05-01\n CHK, NYSE, 30.43, 2013-05-02\n DDR, NYSE, 0.00, 2013-05-01\n ELS, NYSE, 0.71, 2013-04-23\n ALG, NYSE, 5.56, 2013-05-02\n ETH, NYSE, -22.22, 2013-04-24\n ATR, NYSE, -3.03, 2013-04-26\n GGP, NYSE, 4.17, 2013-04-30\n MSL, NYSE, 3.70, 2013-05-01\n RCL, NYSE, 84.21, 2013-04-26\n CWEI, NYSE, -61.22, 2013-04-25\n HR, NYSE, 0.00, 2013-05-02\n RGA, NYSE, 2.48, 2013-04-26\n RIG, NYSE, -7.92, 2013-05-09\n SKT, NYSE, 2.44, 2013-05-01\n TWI, NYSE, -16.28, 2013-04-25\n BDN, NYSE, 2.94, 2013-04-25\n KGC, NYSE, 25.00, 2013-05-08\n CPT, NYSE, 2.11, 2013-05-03\n SGY, NYSE, 18.84, 2013-05-07\n BFS, NYSE, -24.49, 2013-05-01\n BWA, NYSE, 6.56, 2013-04-26\n EQR, NYSE, -1.54, 2013-05-01\n CLP, NYSE, 3.03, 2013-04-26\n KOF, NYSE, -16.24, 2013-04-25\n OKS, NYSE, -27.59, 2013-05-01\n SQM, NYSE, -6.45, 2013-05-29\n BYD, NYSE, 114.29, 2013-04-25\n CBL, NYSE, 3.92, 2013-04-30\n DECK, NYSE, 133.33, 2013-04-26\n IT, NYSE, -2.50, 2013-05-03\n HST, NYSE, 21.74, 2013-05-04\n LXP, NYSE, 0.00, 2013-05-03\n REG, NYSE, 3.23, 2013-05-08\n TUC, NYSE, -24.00, 2013-05-03\n AF, NYSE, 7.69, 2013-04-18\n BFR, NYSE, -2.56, 2013-05-11\n HHS, NYSE, 10.00, 2013-04-26\n MHO, NYSE, 28.57, 2013-04-26\n NFX, NYSE, -2.17, 2013-04-24\n SPG, NYSE, 1.99, 2013-04-27\n SU, NYSE, -1.41, 2013-04-30\n SUI, NYSE, 2.20, 2013-04-26\n TV, NYSE, -22.50, 2013-04-26\n CGI, NYSE, -26.92, 2013-04-26\n CYT, NYSE, -12.79, 2013-04-19\n EMN, NYSE, 3.18, 2013-04-26\n GRT, NYSE, 14.29, 2013-04-25\n MAA, NYSE, 5.04, 2013-05-02\n PLT, NYSE, 4.62, 2013-05-08\n BZH, NYSE, 15.38, 2013-05-03\n ELX, NYSE, 114.29, 2013-05-03\n MLM, NYSE, -69.44, 2013-05-01\n AKS, NYSE, 41.67, 2013-04-24\n ALB, NYSE, -7.00, 2013-04-18\n VRX, NYSE, 1.56, 2013-05-03\n CBR, NYSE, 0.00, 2013-05-01\n MAC, NYSE, 8.86, 2013-05-02\n RKT, NYSE, 9.80, 2013-04-24\n RYN, NYSE, 27.42, 2013-04-26\n ADC, NYSE, -2.00, 2013-04-30\nBRK.B, NYSE, 52.31, 2013-05-04\n EXP, NYSE, 5.00, 2013-05-15\n GGB, NYSE, -66.67, 2013-05-08\n SSD, NYSE, -52.38, 2013-04-26\n ESS, NYSE, -0.53, 2013-05-02\n FR, NYSE, -7.69, 2013-04-26\n HIW, NYSE, -2.90, 2013-05-01\n IMAX, NYSE, 0.00, 2013-04-26\n AIV, NYSE, 2.13, 2013-05-03\n FCH, NYSE, 0.00, 2013-05-01\n ITGR, NYSE, 2.33, 2013-04-26\n NOK, NYSE, 33.33, 2013-04-19\n GEO, NYSE, -3.51, 2013-05-09\n CLI, NYSE, 0.00, 2013-04-26\n RS, NYSE, -5.22, 2013-04-26\n CPE, NYSE, 100.00, 2013-05-10\n KNX, NYSE, 0.00, 2013-04-25\n O, NYSE, 1.69, 2013-04-26\n COF, NYSE, 17.79, 2013-04-19\n IRS, NYSE, 10.34, 2013-05-18\n MCK, NYSE, -0.43, 2013-05-08\n SWC, NYSE, 200.00, 2013-04-30\n STM, NYSE, 23.53, 2013-04-23\n TEO, NYSE, 1.30, 2013-04-30\n TRK, NYSE, -400.00, 2013-05-02\n LMT, NYSE, 23.38, 2013-04-24\n APU, NYSE, -35.48, 2013-05-16\n AGU, NYSE, -12.15, 2013-05-10\n LH, NYSE, -1.69, 2013-04-20\n DDD, NYSE, -10.00, 2013-05-01\n AFG, NYSE, 10.84, 2013-05-09\n RMD, NYSE, 3.51, 2013-04-26\n WAB, NYSE, 3.60, 2013-04-25\n CIB, NYSE, 6.78, 2013-05-08\n CAM, NYSE, -5.41, 2013-04-26\n FCX, NYSE, 1.39, 2013-04-19\n RNR, NYSE, 34.25, 2013-05-02\n AVX, NYSE, 7.14, 2013-04-25\n RWT, NYSE, 46.81, 2013-05-03\n AXE, NYSE, -6.62, 2013-04-24\n CLB, NYSE, 6.09, 2013-04-18\n MD, NYSE, 0.92, 2013-05-03\n THG, NYSE, 30.69, 2013-04-30\n BAP, NYSE, -10.94, 2013-05-07\n DO, NYSE, 10.43, 2013-04-26\n RE, NYSE, 36.11, 2013-04-23\n DST, NYSE, -6.60, 2013-04-26\n EL, NYSE, 36.36, 2013-05-03\n ESC, NYSE, -57.14, 2013-05-03\n LXK, NYSE, -7.55, 2013-04-24\n MIG, NYSE, 7.69, 2013-05-01\n WAT, NYSE, -1.83, 2013-04-24\n EME, NYSE, 2.27, 2013-04-26\n HIG, NYSE, 10.84, 2013-04-30\n ITT, NYSE, 9.30, 2013-05-03\n SPN, NYSE, 0.00, 2013-04-26\n SWM, NYSE, 8.60, 2013-05-09\n SCCO, NYSE, -4.84, 2013-04-27\n RCI, NYSE, -1.27, 2013-04-23\n EIX, NYSE, 20.31, 2013-05-01\n IRM, NYSE, 0.00, 2013-05-02\n SPH, NYSE, -4.82, 2013-05-10\n CCJ, NYSE, 0.00, 2013-05-02\n PGI, NYSE, 0.00, 2013-04-19\n CRR, NYSE, -14.61, 2013-04-26\n BVN, NYSE, -40.30, 2013-04-30\n FCN, NYSE, 13.46, 2013-05-10\n RPT, NYSE, 6.90, 2013-04-24\n TUP, NYSE, 4.42, 2013-04-25\n ASB, NYSE, 8.00, 2013-04-19\n GWR, NYSE, -10.11, 2013-05-02\n TBI, NYSE, -50.00, 2013-04-25\n FFG, NYSE, 12.66, 2013-05-03\n USNA, NYSE, 14.29, 2013-04-24\n CSV, NYSE, -3.03, 2013-05-08\n LVB, NYSE, 10.53, 2013-05-09\n ALR, NYSE, 6.25, 2013-05-10\n OCN, NYSE, 0.00, 2013-05-03\n PAA, NYSE, 37.50, 2013-05-07\n DNR, NYSE, 13.79, 2013-05-03\n HMY, NYSE, -119.23, 2013-05-04\n TGI, NYSE, 5.66, 2013-05-02\n PAG, NYSE, 1.61, 2013-04-30\n GEL, NYSE, -17.65, 2013-05-03\n IM, NYSE, 0.00, 2013-04-26\n NUS, NYSE, 13.92, 2013-05-03\n CNI, NYSE, -1.67, 2013-04-23\n LAD, NYSE, 16.67, 2013-04-25\n NSP, NYSE, 0.00, 2013-04-30\n DGX, NYSE, -14.42, 2013-04-18\n KRC, NYSE, 0.00, 2013-05-01\n MTH, NYSE, 32.00, 2013-04-25\n NCR, NYSE, 35.00, 2013-05-01\n OFG, NYSE, 2.78, 2013-04-26\n IVZ, NYSE, 10.64, 2013-05-01\n DX, NYSE, 9.68, 2013-05-02\n FBC, NYSE, -65.98, 2013-04-24\n ALV, NYSE, 1.57, 2013-04-27\n ARE, NYSE, 0.00, 2013-04-30\n BBT, NYSE, 2.99, 2013-04-19\n CGG, NYSE, 6.25, 2013-05-04\n BXP, NYSE, -0.83, 2013-05-01\n CBD, NYSE, -23.73, 2013-05-01\n MS, NYSE, 7.02, 2013-04-19\n SRT, NYSE, -314.29, 2013-05-10\n HLX, NYSE, 38.89, 2013-04-22\n FLS, NYSE, 3.61, 2013-04-25\n MT, NYSE, -400.00, 2013-05-11\n PXD, NYSE, 5.15, 2013-05-02\n SLG, NYSE, 0.83, 2013-04-24\n NAT, NYSE, -16.22, 2013-05-14\n CSU, NYSE, -36.36, 2013-05-07\n DRQ, NYSE, 22.50, 2013-05-04\n FDP, NYSE, -24.47, 2013-05-01\n NLY, NYSE, 30.56, 2013-05-02\n TLM, NYSE, -250.00, 2013-05-02\n TSM, NYSE, 13.04, 2013-04-19\n YUM, NYSE, 12.90, 2013-04-24\n AMG, NYSE, 12.38, 2013-05-01\n EPR, NYSE, -1.05, 2013-05-01\n FE, NYSE, 10.14, 2013-05-08\n LFL, NYSE, 80.00, 2013-05-15\n MTD, NYSE, 2.79, 2013-05-03\n SID, NYSE, -66.67, 2013-05-16\n IN, NYSE, -271.43, 2013-05-04\n CBZ, NYSE, 25.64, 2013-05-03\n URI, NYSE, 11.54, 2013-04-17\n INGR, NYSE, 6.82, 2013-05-03\n RAS, NYSE, 181.82, 2013-05-03\n UNS, NYSE, 35.00, 2013-04-30\n ASI, NYSE, 18.92, 2013-05-09\n ANH, NYSE, 15.38, 2013-04-30\n OFC, NYSE, 17.07, 2013-04-27\n GPX, NYSE, 0.00, 2013-05-03\n WAC, NYSE, 1427.27, 2013-05-10\n RBA, NYSE, -13.33, 2013-05-01\n WDR, NYSE, 1.61, 2013-04-24\n LHO, NYSE, 8.00, 2013-04-18\n LNT, NYSE, 18.03, 2013-05-04\n LVLT, NYSE, 7.14, 2013-04-26\n MFA, NYSE, -4.76, 2013-05-02\n OME, NYSE, 50.00, 2013-05-08\n EQY, NYSE, 6.90, 2013-05-02\n FII, NYSE, -2.38, 2013-04-26\n FMX, NYSE, -37.89, 2013-04-25\n LLL, NYSE, 3.63, 2013-04-26\n VTR, NYSE, 4.04, 2013-04-27\n WCN, NYSE, 20.00, 2013-05-02\n AVB, NYSE, 0.74, 2013-05-01\n GIL, NYSE, 5.36, 2013-05-03\n HZO, NYSE, -92.86, 2013-04-26\n AWR, NYSE, 38.00, 2013-05-11\n CLS, NYSE, 10.00, 2013-04-24\n EPD, NYSE, 16.67, 2013-05-01\n RSG, NYSE, 15.00, 2013-04-26\n WM, NYSE, -2.44, 2013-04-25\n AKR, NYSE, 3.33, 2013-04-24\n CVG, NYSE, 17.39, 2013-05-01\n RRC, NYSE, -38.89, 2013-04-26\n SAP, NYSE, 41.51, 2013-04-20\n CCI, NYSE, 0.00, 2013-04-25\n PQ, NYSE, 100.00, 2013-05-08\n WFT, NYSE, 0.00, 2013-05-03\n CAA, NYSE, 0.00, 2013-05-03\n ENB, NYSE, 13.21, 2013-05-09\n GMK, NYSE, 60.00, 2013-04-25\n MMR, NYSE, 0.00, 2013-05-07\n PB, NYSE, 2.38, 2013-04-25\n VIV, NYSE, -20.00, 2013-05-08\n AXL, NYSE, 53.33, 2013-05-04\n BP, NYSE, 33.33, 2013-05-01\n ETM, NYSE, 0.00, 2013-05-09\n HT, NYSE, 0.00, 2013-05-01\n BYI, NYSE, 10.71, 2013-04-25\n CEB, NYSE, 1.64, 2013-05-02\n INFY, NYSE, 5.41, 2013-04-13\n JLL, NYSE, 56.52, 2013-05-01\n AZN, NYSE, 5.22, 2013-04-26\n SFG, NYSE, 33.75, 2013-04-24\n TREX, NYSE, 14.68, 2013-05-04\n GS, NYSE, 11.43, 2013-04-17\n SYX, NYSE, -157.14, 2013-05-01\n WCC, NYSE, -4.27, 2013-04-19\n JNPR, NYSE, 33.33, 2013-04-24\n RDN, NYSE, 28.57, 2013-05-02\n RAI, NYSE, 4.35, 2013-04-24\n SKX, NYSE, -27.78, 2013-05-16\n WTM, NYSE, 178.02, 2013-04-30\n NCI, NYSE, 12.50, 2013-04-26\n BLT, NYSE, -17.39, 2013-05-08\n QTM, NYSE, -33.33, 2013-05-09\n BLK, NYSE, 1.67, 2013-04-17\n CIR, NYSE, 4.00, 2013-05-03\n MSO, NYSE, 12.50, 2013-05-01\n PKG, NYSE, 10.71, 2013-04-23\n PKI, NYSE, -25.00, 2013-04-26\n WWE, NYSE, -37.50, 2013-05-03\n SNN, NYSE, -2.11, 2013-05-03\n UPS, NYSE, 2.97, 2013-04-26\n XOXO, NYSE, 16.67, 2013-05-10\n SLF, NYSE, 7.25, 2013-05-09\n CDR, NYSE, 9.09, 2013-05-10\n EW, NYSE, -5.26, 2013-04-24\n MET, NYSE, 13.85, 2013-05-01\n FBR, NYSE, -89.47, 2013-04-24\n VVC, NYSE, -7.58, 2013-05-02\n BAM, NYSE, 70.00, 2013-05-10\n NVS, NYSE, 4.00, 2013-04-25\n BHLB, NYSE, -1.82, 2013-04-30\n CRL, NYSE, -2.82, 2013-05-02\n CYH, NYSE, 3.57, 2013-04-30\n MBT, NYSE, -13.04, 2013-06-08\n MTOR, NYSE, 500.00, 2013-05-01\n CNQ, NYSE, -44.19, 2013-05-03\n ERJ, NYSE, -62.79, 2013-04-30\n VZ, NYSE, 3.03, 2013-04-19\n EVC, NYSE, 0.00, 2013-05-03\n PBR, NYSE, 0.00, 2013-04-27\n XEL, NYSE, 11.63, 2013-05-03\n ALE, NYSE, 10.67, 2013-05-09\n HW, NYSE, -30.00, 2013-05-01\n POL, NYSE, 14.81, 2013-05-02\n COH, NYSE, 3.70, 2013-04-24\n CXW, NYSE, 6.38, 2013-05-09\n DVA, NYSE, 3.37, 2013-05-08\n EXC, NYSE, 4.41, 2013-05-02\n MCO, NYSE, 11.49, 2013-05-04\n BRFS, NYSE, 23.53, 2013-04-30\n TU, NYSE, 3.77, 2013-05-10\n WIT, NYSE, 0.00, 2013-04-20\n ERF, NYSE, 100.00, 2013-05-11\n GG, NYSE, -35.00, 2013-05-03\n HNT, NYSE, 34.15, 2013-04-30\n NYCB, NYSE, 3.85, 2013-04-25\n SXT, NYSE, 3.33, 2013-04-19\n CPG, NYSE, -20.00, 2013-05-10\n AMX, NYSE, 16.67, 2013-04-20\n MPX, NYSE, 0.00, 2013-04-25\n OIS, NYSE, -2.70, 2013-04-25\n MMP, NYSE, 4.08, 2013-05-03\n PES, NYSE, 33.33, 2013-05-01\n ABB, NYSE, -12.12, 2013-04-25\n KMR, NYSE, -3.28, 2013-05-02\n GEN, NYSE, -41.18, 2013-05-07\n ADS, NYSE, -2.88, 2013-04-19\n CVI, NYSE, 25.00, 2013-05-03\n FTI, NYSE, -6.52, 2013-04-24\n PRA, NYSE, 27.63, 2013-05-07\n STO, NYSE, -16.46, 2013-05-03\n BEL, NYSE, 41.67, 2013-05-02\n FIS, NYSE, 1.64, 2013-05-01\n COL, NYSE, 0.86, 2013-04-20\n KAI, NYSE, 20.51, 2013-04-30\n ABC, NYSE, -2.25, 2013-04-26\n BG, NYSE, 18.56, 2013-04-26\n FRO, NYSE, 27.08, 2013-05-31\n ECA, NYSE, 150.00, 2013-04-24\n CIG, NYSE, 108.33, 2013-05-17\n EEP, NYSE, 16.67, 2013-05-01\n CVX, NYSE, 3.25, 2013-04-27\n GXP, NYSE, 41.67, 2013-05-10\n JHX, NYSE, -2.78, 2013-05-24\n PFG, NYSE, 5.33, 2013-04-26\n PVR, NYSE, 14.29, 2013-04-26\n AAP, NYSE, 2.48, 2013-05-24\n KND, NYSE, 36.11, 2013-05-02\n WTW, NYSE, 38.10, 2013-05-03\n CNC, NYSE, 5.00, 2013-04-24\n BCH, NYSE, 3.70, 2013-05-09\n NS, NYSE, -86.67, 2013-04-25\n ITUB, NYSE, -4.88, 2013-04-26\n SXL, NYSE, 26.74, 2013-05-09\n VALE, NYSE, 50.00, 2013-04-25\n TNP, NYSE, 150.00, 2013-05-25\n LCI, NYSE, 40.00, 2013-05-09\n GTI, NYSE, 50.00, 2013-04-26\n HNR, NYSE, -26.67, 2013-06-06\n MWE, NYSE, -90.00, 2013-05-09\n NLS, NYSE, 50.00, 2013-05-07\n RGC, NYSE, -7.14, 2013-05-01\n JAH, NYSE, 30.43, 2013-04-25\n NPO, NYSE, -23.29, 2013-05-03\n TRI, NYSE, 22.58, 2013-05-01\n CAE, NYSE, 10.53, 2013-05-17\n LF, NYSE, 28.57, 2013-05-02\n SNY, NYSE, -10.11, 2013-05-03\n BANC, NYSE, 400.00, 2013-05-09\n COP, NYSE, 0.00, 2013-04-26\n CNP, NYSE, -8.11, 2013-05-03\n EEQ, NYSE, -321.43, 2013-05-02\n MRH, NYSE, 32.58, 2013-04-25\n NGS, NYSE, 23.08, 2013-05-10\n NRP, NYSE, 4.88, 2013-05-07\n PXP, NYSE, 17.98, 2013-05-03\n XEC, NYSE, -0.93, 2013-05-08\n IAG, NYSE, 7.14, 2013-05-08\n EGO, NYSE, 0.00, 2013-05-03\n JNS, NYSE, -6.25, 2013-04-24\n PFS, NYSE, 14.81, 2013-04-27\n ENH, NYSE, 74.79, 2013-05-02\n CNX, NYSE, -5.00, 2013-04-26\n AMT, NYSE, -10.42, 2013-05-02\n ABG, NYSE, 13.43, 2013-04-25\n LII, NYSE, 22.22, 2013-04-23\n SRE, NYSE, -4.90, 2013-05-03\n AEE, NYSE, -21.43, 2013-05-03\n PLD, NYSE, 0.00, 2013-04-25\n SAH, NYSE, -2.38, 2013-04-24\n GPI, NYSE, 11.54, 2013-05-03\n FIX, NYSE, 800.00, 2013-05-02\n MMS, NYSE, 1.41, 2013-05-10\n SRI, NYSE, 50.00, 2013-05-10\n RTEC, NYSE, 50.00, 2013-05-03\n NOV, NYSE, -5.84, 2013-04-27\n DF, NYSE, 11.54, 2013-05-10\n SAM, NYSE, -17.74, 2013-05-02\n RL, NYSE, 8.46, 2013-05-24\n FLR, NYSE, 6.25, 2013-05-03\n ALL, NYSE, 2.27, 2013-05-02\n ATI, NYSE, 0.00, 2013-04-25\n EE, NYSE, 72.73, 2013-05-02\n AIT, NYSE, 0.00, 2013-05-03\n CHH, NYSE, -3.70, 2013-04-30\n FMS, NYSE, -17.78, 2013-05-01\n BCO, NYSE, 16.67, 2013-04-26\n CBB, NYSE, 133.33, 2013-05-10\n MWW, NYSE, 14.29, 2013-05-03\n PSA, NYSE, -3.09, 2013-05-10\n E, NYSE, 0.00, 2013-04-25\n JPM, NYSE, 15.22, 2013-04-13\n USB, NYSE, 0.00, 2013-04-17\n HON, NYSE, 6.14, 2013-04-20\n ITG, NYSE, 50.00, 2013-05-03\n ARB, NYSE, -15.49, 2013-05-08\n APL, NYSE, -28.95, 2013-04-30\n AVA, NYSE, 0.00, 2013-05-02\n AXS, NYSE, 85.71, 2013-04-26\n MOH, NYSE, 146.15, 2013-04-26\n CVD, NYSE, 4.17, 2013-05-02\n AHT, NYSE, 2.94, 2013-05-09\n GPK, NYSE, 25.00, 2013-04-26\n CNO, NYSE, 0.00, 2013-04-25\n AUQ, NYSE, -60.00, 2013-05-10\n NFP, NYSE, -5.45, 2013-05-04\n CRI, NYSE, 12.86, 2013-05-10\n FMD, NYSE, 27.27, 2013-04-30\n FPO, NYSE, 3.45, 2013-04-26\n TRQ, NYSE, -25.00, 2013-05-14\n WLL, NYSE, 2.17, 2013-04-25\n AEL, NYSE, 11.36, 2013-05-02\n AHL, NYSE, 0.95, 2013-04-25\n AUY, NYSE, -23.81, 2013-05-01\n CMP, NYSE, 24.32, 2013-04-30\n KRO, NYSE, -800.00, 2013-05-09\n TPX, NYSE, 3.33, 2013-05-03\n UTI, NYSE, -300.00, 2013-05-01\n PJC, NYSE, 9.09, 2013-04-18\n TRW, NYSE, 3.42, 2013-05-01\n AIZ, NYSE, -14.56, 2013-04-25\n HTH, NYSE, 11.43, 2013-05-07\n ETP, NYSE, 33.33, 2013-05-09\n LSE, NYSE, 0.00, 2013-05-09\n BBD, NYSE, 0.00, 2013-04-23\n NRG, NYSE, -37.04, 2013-05-08\n HOS, NYSE, 96.67, 2013-05-02\n ABR, NYSE, 84.62, 2013-05-04\n FHN, NYSE, 0.00, 2013-04-20\n AGO, NYSE, 86.11, 2013-05-10\n HSP, NYSE, 18.18, 2013-05-02\n HNI, NYSE, 250.00, 2013-04-18\n GHL, NYSE, -34.78, 2013-04-18\n XPO, NYSE, -16.44, 2013-05-08\n CVO, NYSE, -200.00, 2013-05-09\n CHE, NYSE, 9.92, 2013-04-19\n GNW, NYSE, 11.11, 2013-05-01\n CBG, NYSE, -5.88, 2013-04-26\n SFL, NYSE, -43.33, 2013-05-31\n NEU, NYSE, 3.28, 2013-04-25\n GOL, NYSE, -1200.00, 2013-05-14\n CAB, NYSE, 18.64, 2013-04-26\n LTM, NYSE, 3.08, 2013-04-26\n VVI, NYSE, 68.00, 2013-04-27\n WCG, NYSE, -8.70, 2013-05-04\n HEP, NYSE, -36.36, 2013-05-01\n DPZ, NYSE, 5.36, 2013-05-01\n BDC, NYSE, 6.33, 2013-05-03\n ENS, NYSE, 2.56, 2013-05-29\n BMR, NYSE, 7.89, 2013-05-02\n ACC, NYSE, -1.54, 2013-04-24\n KRG, NYSE, 27.27, 2013-05-03\n WLK, NYSE, 42.64, 2013-05-07\n EXR, NYSE, 4.55, 2013-04-30\n CNS, NYSE, 7.32, 2013-04-18\n IOC, NYSE, 161.54, 2013-05-14\n STON, NYSE, -150.00, 2013-05-08\n TTM, NYSE, 60.56, 2013-05-30\n CPL, NYSE, 7.69, 2013-05-11\n TPGI, NYSE, -460.00, 2013-05-07\n SHO, NYSE, 0.00, 2013-05-07\n CUBE, NYSE, 0.00, 2013-05-03\n NRF, NYSE, -51.35, 2013-05-04\n DLR, NYSE, -1.69, 2013-04-27\n MTL, NYSE, 100.00, 2013-06-19\n NWE, NYSE, 8.60, 2013-04-26\n ORA, NYSE, 550.00, 2013-05-08\n NP, NYSE, 7.25, 2013-05-09\n SMA, NYSE, -73.33, 2013-05-03\n BBG, NYSE, -2600.00, 2013-05-03\n BXC, NYSE, 35.29, 2013-05-02\n KNL, NYSE, 8.33, 2013-04-19\n LVS, NYSE, 7.58, 2013-05-02\n HLF, NYSE, 18.69, 2013-04-30\n MIC, NYSE, -89.09, 2013-04-30\n PHH, NYSE, -81.13, 2013-05-02\n CE, NYSE, 44.30, 2013-04-19\n EDR, NYSE, 0.00, 2013-04-30\n WTI, NYSE, 34.62, 2013-05-08\n ARC, NYSE, 0.00, 2013-05-08\n PBH, NYSE, 5.88, 2013-05-17\n HUN, NYSE, 18.75, 2013-05-01\n WEX, NYSE, 3.16, 2013-05-02\n DLB, NYSE, 14.29, 2013-04-26\n DSX, NYSE, 66.67, 2013-05-23\n LAZ, NYSE, -17.65, 2013-04-27\n TGP, NYSE, 14.29, 2013-05-10\n TLP, NYSE, 7.69, 2013-05-08\n DRH, NYSE, 55.56, 2013-05-11\n HTGC, NYSE, 8.00, 2013-05-03\n KFN, NYSE, 27.78, 2013-05-02\n THS, NYSE, 5.71, 2013-05-10\n NSR, NYSE, -8.86, 2013-05-03\n WAL, NYSE, 14.29, 2013-04-19\n SLW, NYSE, -9.76, 2013-05-11\n MPW, NYSE, -3.85, 2013-04-27\n GNK, NYSE, -2.75, 2013-05-02\n MFB, NYSE, 28.57, 2013-05-09\nRDS.A, NYSE, 21.74, 2013-05-03\n ITC, NYSE, -3.45, 2013-04-24\n FTK, NYSE, -11.76, 2013-05-10\n PIKE, NYSE, -20.00, 2013-05-07\n ALJ, NYSE, 63.27, 2013-05-09\n DRC, NYSE, 2.38, 2013-04-26\n STN, NYSE, 0.00, 2013-05-10\n SSW, NYSE, -8.70, 2013-04-30\n CF, NYSE, 0.50, 2013-05-09\n HPY, NYSE, 12.50, 2013-05-01\n ROC, NYSE, 1.49, 2013-05-01\n WPZ, NYSE, -57.58, 2013-05-01\n LCC, NYSE, 29.17, 2013-04-24\n GLP, NYSE, -7.27, 2013-05-10\n AMP, NYSE, 1.27, 2013-04-23\n DHT, NYSE, 58.33, 2013-04-30\n FNF, NYSE, 5.00, 2013-05-02\n NM, NYSE, 52.38, 2013-05-22\n CCO, NYSE, -57.14, 2013-05-03\n BWP, NYSE, 5.00, 2013-04-30\n ICE, NYSE, 2.53, 2013-05-02\n BKD, NYSE, 50.00, 2013-05-02\n BAS, NYSE, 12.00, 2013-04-25\n CPA, NYSE, 21.21, 2013-05-14\n LYV, NYSE, 8.33, 2013-05-08\n WNR, NYSE, -6.93, 2013-05-03\n CMG, NYSE, 9.81, 2013-04-19\n RGP, NYSE, -50.00, 2013-05-09\n KOP, NYSE, -16.92, 2013-05-04\n TX, NYSE, 40.43, 2013-05-01\n UAL, NYSE, 10.09, 2013-04-26\n ETE, NYSE, -27.03, 2013-05-09\n RSO, NYSE, -45.00, 2013-05-08\n XCO, NYSE, 62.50, 2013-05-01\n PAC, NYSE, 30.00, 2013-04-26\n NYX, NYSE, 1.79, 2013-05-01\n TDG, NYSE, 0.61, 2013-05-08\n BMA, NYSE, 11.68, 2013-05-09\n THI, NYSE, 1.67, 2013-05-09\n BTE, NYSE, -112.00, 2013-05-10\n CNH, NYSE, 41.49, 2013-05-01\n GLA, NYSE, -82.35, 2013-05-02\n POR, NYSE, 0.00, 2013-05-02\n HIL, NYSE, 50.00, 2013-05-03\n HVB, NYSE, 12.50, 2013-04-24\n KS, NYSE, -9.30, 2013-05-08\n HK, NYSE, -28.57, 2013-05-03\n DCP, NYSE, 3.28, 2013-05-07\n DK, NYSE, 7.56, 2013-05-09\n CODI, NYSE, 0.00, 2013-05-08\n MA, NYSE, 0.65, 2013-05-02\n MWA, NYSE, 150.00, 2013-05-01\n KOG, NYSE, -21.43, 2013-05-03\n PWE, NYSE, -150.00, 2013-05-03\n PGTI, NYSE, 100.00, 2013-05-02\n AWH, NYSE, 8.45, 2013-04-25\n NSH, NYSE, -29.73, 2013-04-25\n WYN, NYSE, 7.58, 2013-04-25\n WNS, NYSE, 15.38, 2013-04-18\n PGH, NYSE, 0.00, 2013-05-02\n AYR, NYSE, 34.48, 2013-05-03\n EVR, NYSE, -24.49, 2013-04-25\n HBI, NYSE, 2.00, 2013-04-24\n WU, NYSE, 12.12, 2013-05-01\n OC, NYSE, 45.00, 2013-04-25\n DAC, NYSE, 44.44, 2013-04-30\n AWI, NYSE, -43.59, 2013-04-30\n SUSS, NYSE, 0.00, 2013-05-09\n DEI, NYSE, 5.71, 2013-05-08\n OB, NYSE, 79.31, 2013-04-30\n SBH, NYSE, -7.69, 2013-05-03\n EBS, NYSE, -144.44, 2013-05-03\n KBR, NYSE, 25.53, 2013-04-26\n AER, NYSE, 23.40, 2013-05-08\n NOA, NYSE, -442.86, 2013-06-11\n SPR, NYSE, 29.79, 2013-05-03\n ANW, NYSE, -7.14, 2013-05-16\n DCT, NYSE, 10.00, 2013-05-03\n SE, NYSE, 6.25, 2013-05-04\n TOO, NYSE, -17.86, 2013-05-10\n TSL, NYSE, -27.78, 2013-05-30\n TWC, NYSE, 2.92, 2013-04-26\n MVO, NYSE, -13.92, 2013-05-09\n CO, NYSE, 150.00, 2013-06-19\n EXK, NYSE, -18.75, 2013-05-07\n EIG, NYSE, 22.22, 2013-05-09\n HF, NYSE, -50.00, 2013-05-02\n FIG, NYSE, 33.33, 2013-05-03\n NGLS, NYSE, -20.00, 2013-05-04\n TCAP, NYSE, -1.75, 2013-05-09\n GFA, NYSE, -211.11, 2013-05-14\n BR, NYSE, 18.18, 2013-05-08\n SCR, NYSE, 12.50, 2013-05-10\n CNK, NYSE, 12.00, 2013-05-08\n DAL, NYSE, 42.86, 2013-04-24\n ORN, NYSE, 42.86, 2013-05-03\n ACM, NYSE, 3.92, 2013-05-08\n SLH, NYSE, 5.00, 2013-05-08\n CLR, NYSE, 2.63, 2013-05-09\n BGS, NYSE, -5.13, 2013-04-19\n STAR, NYSE, 26.42, 2013-05-01\n YGE, NYSE, -40.00, 2013-05-31\n DFS, NYSE, 18.75, 2013-04-24\n TEL, NYSE, 7.04, 2013-04-25\n BX, NYSE, 1.85, 2013-04-19\n SEP, NYSE, 4.65, 2013-05-04\n BZ, NYSE, -77.78, 2013-05-03\n PPO, NYSE, -41.18, 2013-05-09\n PRO, NYSE, 100.00, 2013-05-03\n WBC, NYSE, 7.34, 2013-04-26\n DHX, NYSE, 0.00, 2013-04-24\n PMC, NYSE, 23.53, 2013-05-02\n HGG, NYSE, 3.33, 2013-05-21\n OWW, NYSE, -33.33, 2013-05-10\n VR, NYSE, 35.97, 2013-04-26\n CXO, NYSE, -27.50, 2013-05-02\n G, NYSE, 5.00, 2013-05-02\n EJ, NYSE, 89.47, 2013-05-16\n WX, NYSE, 11.11, 2013-05-14\n CMLP, NYSE, -92.86, 2013-05-08\n VMW, NYSE, 10.87, 2013-04-24\n CZZ, NYSE, -40.00, 2013-06-06\n CGA, NYSE, 6.67, 2013-05-14\n TDC, NYSE, -26.92, 2013-05-03\n FLY, NYSE, 61.73, 2013-05-03\n MAIN, NYSE, 2.04, 2013-05-10\n REN, NYSE, 100.00, 2013-05-07\n TGH, NYSE, -12.90, 2013-05-08\n DFT, NYSE, -5.00, 2013-05-08\n RF, NYSE, 15.00, 2013-04-24\n PZN, NYSE, 0.00, 2013-04-25\n LL, NYSE, 29.55, 2013-04-25\n NMM, NYSE, 0.00, 2013-04-26\n OZM, NYSE, 81.25, 2013-05-03\n ES, NYSE, 12.31, 2013-05-02\n MSCI, NYSE, 5.56, 2013-05-02\n ARR, NYSE, -21.74, 2013-05-03\n KW, NYSE, 62.50, 2013-05-08\n GTS, NYSE, 52.78, 2013-05-02\n FOR, NYSE, 450.00, 2013-05-09\n LRN, NYSE, 34.78, 2013-05-04\n TNK, NYSE, -100.00, 2013-05-10\n N, NYSE, -21.43, 2013-04-26\n DAN, NYSE, -33.33, 2013-04-26\n BIP, NYSE, 0.00, 2013-05-03\n CPN, NYSE, -6.67, 2013-05-03\n SOL, NYSE, -15.38, 2013-05-17\n PM, NYSE, -4.44, 2013-04-19\n V, NYSE, 6.08, 2013-05-02\n IPI, NYSE, 5.26, 2013-05-02\n AWK, NYSE, -5.88, 2013-05-08\n HTS, NYSE, -7.46, 2013-04-23\n DPS, NYSE, 12.77, 2013-04-25\n CFX, NYSE, 8.33, 2013-04-26\n WES, NYSE, -22.50, 2013-05-02\n SB, NYSE, 0.00, 2013-05-16\n LO, NYSE, 4.76, 2013-04-25\n LPS, NYSE, 0.00, 2013-04-25\n FF, NYSE, -6.90, 2013-05-08\n NNA, NYSE, 200.00, 2013-05-03\n EPB, NYSE, 7.41, 2013-04-18\n JBT, NYSE, -17.65, 2013-05-08\n DL, NYSE, -33.33, 2013-05-22\n RAX, NYSE, -5.00, 2013-05-09\n GSL, NYSE, -50.00, 2013-05-10\n HCI, NYSE, 66.06, 2013-05-03\n EC, NYSE, -18.58, 2013-05-04\n CLW, NYSE, -98.08, 2013-04-25\n MJN, NYSE, -1.16, 2013-04-26\n EPC, NYSE, 39.53, 2013-05-02\n BPI, NYSE, 0.00, 2013-05-07\n RST, NYSE, 25.00, 2013-05-09\n DGI, NYSE, 22.22, 2013-05-08\n SWI, NYSE, 6.25, 2013-05-01\n CYS, NYSE, -45.16, 2013-04-18\n IVR, NYSE, 1.59, 2013-05-02\n BUD, NYSE, 50.65, 2013-05-01\n SLD, NYSE, -66.67, 2013-05-15\n PMT, NYSE, 11.11, 2013-04-24\n STWD, NYSE, -20.93, 2013-05-09\n CFN, NYSE, 11.32, 2013-05-10\n SPB, NYSE, 7.32, 2013-05-01\n ARI, NYSE, 33.33, 2013-05-02\n CLNY, NYSE, -26.47, 2013-05-07\n ART, NYSE, -800.00, 2013-05-07\n SEM, NYSE, -11.11, 2013-05-03\n BSBR, NYSE, -71.43, 2013-04-26\n DOLE, NYSE, -50.00, 2013-05-03\n VSI, NYSE, 2.86, 2013-05-08\n TWO, NYSE, -9.38, 2013-05-08\n CVE, NYSE, -6.38, 2013-04-25\n H, NYSE, 12.50, 2013-05-02\n LEA, NYSE, 19.27, 2013-04-26\n SVN, NYSE, -81.82, 2013-05-14\n CLD, NYSE, -59.26, 2013-05-01\n AOL, NYSE, 6.25, 2013-05-09\n CHSP, NYSE, 25.00, 2013-05-08\n PEB, NYSE, 5.88, 2013-04-26\n CIT, NYSE, -8.99, 2013-04-24\n KAR, NYSE, -3.03, 2013-05-02\n CIE, NYSE, -15.38, 2013-05-01\n TMH, NYSE, 0.00, 2013-05-01\n KRA, NYSE, -75.00, 2013-05-02\n SYA, NYSE, 8.82, 2013-04-25\n TRNO, NYSE, -11.11, 2013-05-09\n PDM, NYSE, 0.00, 2013-05-03\n GNRC, NYSE, 23.47, 2013-05-03\n ACW, NYSE, -9.68, 2013-04-24\n BALT, NYSE, -9.52, 2013-05-02\n ST, NYSE, 4.35, 2013-04-24\n SEMG, NYSE, -15.00, 2013-05-09\n CALX, NYSE, 50.00, 2013-04-26\n MXL, NYSE, 33.33, 2013-05-01\n STNG, NYSE, 60.00, 2013-04-30\n PRI, NYSE, -4.35, 2013-05-08\n SDRL, NYSE, 16.95, 2013-05-29\n CLDT, NYSE, 7.50, 2013-05-08\n EXL, NYSE, 5.00, 2013-05-02\n LYB, NYSE, 9.09, 2013-04-27\n PNG, NYSE, 4.35, 2013-05-07\n PLOW, NYSE, 13.33, 2013-05-07\n SIX, NYSE, 19.61, 2013-04-23\n NKA, NYSE, -140.00, 2013-05-10\n RRTS, NYSE, 3.57, 2013-05-02\n JKS, NYSE, 66.27, 2013-06-08\n CODE, NYSE, 7.69, 2013-05-01\n FAF, NYSE, -31.71, 2013-04-26\n QEP, NYSE, -6.67, 2013-05-01\n OAS, NYSE, 31.37, 2013-05-08\n HPP, NYSE, 18.18, 2013-05-07\n FN, NYSE, 3.70, 2013-04-30\n ECT, NYSE, 7.32, 2013-05-11\n QUAD, NYSE, -88.10, 2013-05-08\n KKR, NYSE, 4.76, 2013-04-26\n RLD, NYSE, 70.00, 2013-06-07\n AMRC, NYSE, -200.00, 2013-05-10\n GDOT, NYSE, 9.37, 2013-05-01\n AT, NYSE, 40.00, 2013-05-09\n ENV, NYSE, 0.00, 2013-05-17\n COR, NYSE, 0.00, 2013-04-25\n VC, NYSE, 75.65, 2013-05-10\n CCG, NYSE, 5.88, 2013-05-01\n EFC, NYSE, -32.00, 2013-05-07\n TOWR, NYSE, 255.56, 2013-05-03\n CHMT, NYSE, -21.05, 2013-05-03\n HBM, NYSE, 200.00, 2013-05-02\n EXAM, NYSE, 0.00, 2013-05-09\n XUE, NYSE, -25.00, 2013-05-17\n CMRE, NYSE, 26.09, 2013-04-25\n NOAH, NYSE, 112.50, 2013-05-07\n IPHI, NYSE, 18.18, 2013-05-02\n BITA, NYSE, 0.00, 2013-05-10\n BAH, NYSE, 11.43, 2013-05-23\n GM, NYSE, 19.64, 2013-05-03\n XNY, NYSE, 28.57, 2013-05-20\n TROX, NYSE, -181.25, 2013-05-09\n TRGP, NYSE, 52.38, 2013-05-04\n DANG, NYSE, 21.05, 2013-05-17\n YOKU, NYSE, 0.00, 2013-05-16\n FRC, NYSE, 0.00, 2013-04-16\n RFP, NYSE, 64.29, 2013-05-01\n ISS, NYSE, 50.00, 2013-05-18\n WD, NYSE, -45.65, 2013-05-09\n FLT, NYSE, 10.39, 2013-05-03\n GCAP, NYSE, -15.38, 2013-05-08\n FRF, NYSE, -27.27, 2013-05-14\n SWFT, NYSE, 23.53, 2013-04-23\n AG, NYSE, -8.00, 2013-05-16\n QRE, NYSE, 0.00, 2013-05-09\n AAT, NYSE, 8.57, 2013-05-01\n MCC, NYSE, -2.70, 2013-05-03\n NLSN, NYSE, 9.09, 2013-04-26\n AGRO, NYSE, -100.00, 2013-05-17\n BKU, NYSE, 4.44, 2013-04-25\n INXN, NYSE, -7.14, 2013-05-09\n NPTN, NYSE, 10.00, 2013-05-10\n INN, NYSE, 5.88, 2013-05-07\n KMI, NYSE, -12.50, 2013-04-18\n HCA, NYSE, -4.82, 2013-05-03\n MX, NYSE, 13.04, 2013-05-01\n HII, NYSE, 0.00, 2013-05-09\n QIHU, NYSE, 100.00, 2013-05-20\n APO, NYSE, 56.20, 2013-05-07\n GNC, NYSE, 1.39, 2013-04-27\n SDT, NYSE, 16.07, 2013-05-11\n UAN, NYSE, 4.26, 2013-05-02\n ARCO, NYSE, -142.86, 2013-05-01\n ELLI, NYSE, -16.67, 2013-05-01\n TMS, NYSE, -12.00, 2013-04-26\n SQNS, NYSE, 0.00, 2013-04-26\n STAG, NYSE, 3.13, 2013-05-07\n AL, NYSE, 5.13, 2013-05-10\n TLLP, NYSE, -14.89, 2013-05-07\n RENN, NYSE, 85.71, 2013-05-14\n NQ, NYSE, -16.67, 2013-05-16\n KOS, NYSE, -37.50, 2013-05-10\n RLJ, NYSE, 10.81, 2013-05-09\n NGL, NYSE, -62.86, 2013-06-15\n FENG, NYSE, 60.00, 2013-05-15\n LNKD, NYSE, 340.00, 2013-05-03\n NMFC, NYSE, -2.86, 2013-05-07\n ACTV, NYSE, 32.14, 2013-05-03\n FIO, NYSE, 20.00, 2013-04-25\n TAOM, NYSE, -25.00, 2013-05-24\n RATE, NYSE, 10.00, 2013-05-01\n VHS, NYSE, 8.33, 2013-05-01\n MPC, NYSE, 0.00, 2013-05-01\n MITT, NYSE, -9.64, 2013-05-07\n OILT, NYSE, 17.07, 2013-05-09\n SXC, NYSE, -40.00, 2013-04-26\n AMTG, NYSE, 14.06, 2013-05-07\n AMID, NYSE, -200.00, 2013-05-14\n WAIR, NYSE, 22.22, 2013-04-30\n PER, NYSE, -7.58, 2013-05-11\n PPP, NYSE, 260.00, 2013-05-09\n FSM, NYSE, -28.57, 2013-05-08\n FBHS, NYSE, 41.18, 2013-05-03\n XLS, NYSE, 73.91, 2013-05-04\n XYL, NYSE, -3.57, 2013-05-01\n GNE, NYSE, -550.00, 2013-05-08\n NDRO, NYSE, -8.11, 2013-05-04\n RNF, NYSE, -29.63, 2013-05-10\n VAC, NYSE, 10.20, 2013-04-26\n CHKR, NYSE, -2.90, 2013-05-10\n PACD, NYSE, 250.00, 2013-05-07\n INVN, NYSE, -13.33, 2013-05-03\n DLPH, NYSE, 11.46, 2013-05-02\n MN, NYSE, 0.00, 2013-05-02\n RRMS, NYSE, 51.28, 2013-05-10\n WPX, NYSE, -4.17, 2013-05-03\n LPI, NYSE, -15.38, 2013-05-10\n SN, NYSE, -82.61, 2013-05-08\n KORS, NYSE, 35.14, 2013-05-30\n BCEI, NYSE, -20.93, 2013-05-10\n BOXC, NYSE, 2.56, 2013-04-23\n PVG, NYSE, -25.00, 2013-05-11\n POST, NYSE, -29.63, 2013-05-14\n SLCA, NYSE, -2.78, 2013-05-01\n MTDR, NYSE, 0.00, 2013-05-09\n GWAY, NYSE, -120.00, 2013-05-07\n EPAM, NYSE, -14.71, 2013-05-09\n RNDY, NYSE, -9.52, 2013-05-10\n PRLB, NYSE, 0.00, 2013-04-26\n YELP, NYSE, -40.00, 2013-05-02\n NSM, NYSE, 23.19, 2013-05-08\n ALSN, NYSE, 95.24, 2013-04-30\n DWRE, NYSE, -22.73, 2013-05-08\n VNTV, NYSE, 3.70, 2013-05-07\n ET, NYSE, 0.00, 2013-05-10\n VCRA, NYSE, -160.00, 2013-05-03\n RM, NYSE, -1.82, 2013-05-03\n BNNY, NYSE, 3.57, 2013-06-11\n MM, NYSE, 25.00, 2013-05-09\n RXN, NYSE, 0.00, 2013-05-22\n GLOG, NYSE, -16.67, 2013-05-16\n RPAI, NYSE, 9.52, 2013-05-07\n OAK, NYSE, 39.86, 2013-05-08\n FET, NYSE, 3.03, 2013-04-26\n MRC, NYSE, 4.65, 2013-05-03\n PSX, NYSE, 17.74, 2013-05-02\n TUMI, NYSE, 6.67, 2013-05-09\n ACRE, NYSE, -5.88, 2013-05-16\n EVER, NYSE, 13.79, 2013-04-25\n PDH, NYSE, -13.24, 2013-04-25\n ROYT, NYSE, 10.00, 2013-05-11\n WMC, NYSE, -2.15, 2013-05-16\n WAGE, NYSE, 35.71, 2013-05-10\n HTA, NYSE, 6.67, 2013-05-08\n ALEX, NYSE, -28.57, 2013-05-10\n BKW, NYSE, 0.00, 2013-04-27\n CNCO, NYSE, -88.24, 2013-05-31\n EQM, NYSE, 41.30, 2013-04-26\n NOW, NYSE, 0.00, 2013-04-25\n EGL, NYSE, -11.24, 2013-05-14\n NGVC, NYSE, 7.69, 2013-05-10\n NTI, NYSE, 3.51, 2013-05-14\n AMRE, NYSE, 4.00, 2013-05-08\n GMED, NYSE, 5.00, 2013-05-03\n MANU, NYSE, -25.00, 2013-05-03\n HCLP, NYSE, -23.08, 2013-05-15\n ADT, NYSE, -4.65, 2013-05-02\n TRLA, NYSE, -75.00, 2013-05-01\n SRC, NYSE, 19.44, 2013-05-09\n NBHC, NYSE, -50.00, 2013-04-30\n BSMX, NYSE, 30.43, 2013-04-27\n HY, NYSE, 67.05, 2013-05-02\n SMLP, NYSE, -10.71, 2013-05-14\n DYN, NYSE, -254.55, 2013-05-03\n LXFR, NYSE, 0.00, 2013-05-08\n LOCK, NYSE, 25.00, 2013-05-02\n JMI, NYSE, 224.44, 2013-05-08\n BERY, NYSE, 16.67, 2013-05-03\n FLTX, NYSE, 8.33, 2013-05-09\n ANFI, NYSE, 0.00, 2013-06-11\n SSTK, NYSE, 23.08, 2013-05-09\n RLGY, NYSE, -13.33, 2013-05-02\n SDLP, NYSE, 88.64, 2013-05-29\n MPLX, NYSE, -7.14, 2013-05-01\n WWAV, NYSE, 6.67, 2013-05-10\n SXE, NYSE, -44.44, 2013-05-09\n DKL, NYSE, 31.58, 2013-05-08\n SCM, NYSE, -8.82, 2013-05-10\n RKUS, NYSE, -100.00, 2013-05-07\n ALDW, NYSE, -1.32, 2013-05-08\n WGP, NYSE, 0.00, 2013-05-02\n ABBV, NYSE, 3.03, 2013-04-27\n PBF, NYSE, -54.72, 2013-05-03\n SBY, NYSE, -433.33, 2013-05-14\n RIOM, NYSE, 0.00, 2013-05-15\n USAC, NYSE, -30.00, 2013-05-10\n CVRR, NYSE, -2.56, 2013-05-03\n SXCP, NYSE, -9.76, 2013-04-26\n BFAM, NYSE, 81.82, 2013-05-10\n TPH, NYSE, 200.00, 2013-05-15\n ZTS, NYSE, 5.88, 2013-05-01\n BCC, NYSE, 146.15, 2013-04-23\n AGI, NYSE, 0.00, 2013-04-26\n APAM, NYSE, -11.32, 2013-05-02\n SSNI, NYSE, -1211.77, 2013-05-02\n MODN, NYSE, 0.00, 2013-05-08\n AVIV, NYSE, 150.00, 2013-05-08\n OAKS, NYSE, 509.09, 2013-05-04\n MRIN, NYSE, -7.50, 2013-05-09\n PF, NYSE, 17.24, 2013-05-16\n TMHC, NYSE, -66.67, 2013-05-16\n ARPI, NYSE, -600.00, 2013-06-25\n CSTM, NYSE, -105.08, 2013-06-18\n DDC, NYSE, -80.00, 2013-06-06\n ABM, NYSE, 9.09, 2013-06-04\n ANN, NYSE, 4.76, 2013-06-07\n BBY, NYSE, 28.00, 2013-05-22\n BF.B, NYSE, -2.17, 2013-06-06\n BKE, NYSE, -4.88, 2013-05-24\n NCS, NYSE, -21.74, 2013-06-05\n BNS, NYSE, -0.83, 2013-05-29\n BRC, NYSE, -6.78, 2013-05-17\n CATO, NYSE, 1.94, 2013-05-24\n COO, NYSE, 9.49, 2013-06-07\n CPB, NYSE, 10.71, 2013-05-21\n CFI, NYSE, 10.81, 2013-06-13\n DCI, NYSE, -4.17, 2013-05-18\n DDS, NYSE, 15.38, 2013-05-15\n DE, NYSE, 0.73, 2013-05-16\n DY, NYSE, 0.00, 2013-05-22\n EV, NYSE, 0.00, 2013-05-23\n ESL, NYSE, -11.81, 2013-05-31\n M, NYSE, 3.77, 2013-05-16\n GCO, NYSE, 11.90, 2013-06-01\n GPS, NYSE, 2.90, 2013-05-24\n HD, NYSE, 7.79, 2013-05-22\n HEI, NYSE, 10.00, 2013-05-23\n HOV, NYSE, 120.00, 2013-06-06\n HRB, NYSE, -1.93, 2013-06-13\n HRL, NYSE, 0.00, 2013-05-24\n HPQ, NYSE, 7.41, 2013-05-23\n JCP, NYSE, -12.93, 2013-05-17\n KR, NYSE, 4.55, 2013-06-21\n KSS, NYSE, 15.79, 2013-05-17\n LB, NYSE, 4.35, 2013-05-23\n LOW, NYSE, -3.92, 2013-05-23\n LZB, NYSE, 7.14, 2013-06-19\n MDT, NYSE, 6.80, 2013-05-22\n MEI, NYSE, 60.00, 2013-06-21\n MPR, NYSE, -33.33, 2013-06-07\n NAV, NYSE, -302.75, 2013-06-11\n JWN, NYSE, -3.95, 2013-05-17\n OXM, NYSE, 5.13, 2013-06-12\n PBY, NYSE, -85.71, 2013-06-11\n PLL, NYSE, 1.37, 2013-05-31\n PNY, NYSE, 0.00, 2013-06-08\n PVH, NYSE, 39.42, 2013-06-13\n THO, NYSE, -7.87, 2013-06-07\n TIF, NYSE, 32.08, 2013-05-29\n TJX, NYSE, 0.00, 2013-05-22\n TOL, NYSE, 0.00, 2013-05-23\n TTC, NYSE, 10.92, 2013-05-24\n VAL, NYSE, 2.25, 2013-05-15\n JW.A, NYSE, -16.47, 2013-06-19\n TGT, NYSE, 23.53, 2013-05-23\n WMT, NYSE, -0.87, 2013-05-17\n WSM, NYSE, 11.11, 2013-05-24\n FL, NYSE, 3.41, 2013-05-25\n CHS, NYSE, -11.11, 2013-05-30\n BKS, NYSE, 52.22, 2013-06-26\n CAL, NYSE, 45.45, 2013-05-30\n SIG, NYSE, 0.89, 2013-05-24\n ZLC, NYSE, 1200.00, 2013-05-23\n AEO, NYSE, 5.88, 2013-05-23\n FGP, NYSE, 15.69, 2013-06-07\n BMO, NYSE, -4.73, 2013-05-30\n RY, NYSE, -2.34, 2013-05-31\n GEF, NYSE, 1.45, 2013-06-06\n SKS, NYSE, 0.00, 2013-05-22\n TD, NYSE, 1.09, 2013-05-24\n ANF, NYSE, -80.00, 2013-05-25\n CIEN, NYSE, 20.00, 2013-06-07\n KMG, NYSE, 8.70, 2013-06-11\n IRET, NYSE, 11.76, 2013-07-02\n CM, NYSE, 0.00, 2013-05-31\n UBA, NYSE, 12.00, 2013-06-08\n KFY, NYSE, 3.23, 2013-06-18\n KKD, NYSE, 25.00, 2013-05-31\n MVC, NYSE, -37.50, 2013-06-11\n CBK, NYSE, 150.00, 2013-06-08\n SJM, NYSE, 12.17, 2013-06-07\n BIG, NYSE, 0.00, 2013-05-31\n JOY, NYSE, 11.61, 2013-05-31\n SSI, NYSE, -122.22, 2013-05-18\n GME, NYSE, 15.00, 2013-05-24\n DKS, NYSE, 0.00, 2013-05-22\n A, NYSE, 14.93, 2013-05-15\n MTN, NYSE, -3.62, 2013-06-07\n GES, NYSE, 75.00, 2013-05-31\n CRM, NYSE, -600.00, 2013-05-24\n NWY, NYSE, 128.57, 2013-05-24\n PAY, NYSE, -7.69, 2013-06-06\n DSW, NYSE, 11.11, 2013-05-30\n NX, NYSE, -300.00, 2013-06-08\n DG, NYSE, -1.39, 2013-06-05\n EXPR, NYSE, 5.56, 2013-05-31\n P, NYSE, 0.00, 2013-05-23\n GWRE, NYSE, 44.44, 2013-05-29\n BLOX, NYSE, 100.00, 2013-05-24\n TLYS, NYSE, 14.29, 2013-05-30\n PANW, NYSE, -900.00, 2013-05-31\n WDAY, NYSE, 13.04, 2013-05-23\n RH, NYSE, 50.00, 2013-06-14\n RALY, NYSE, 14.78, 2013-06-07\n AIR, NYSE, 13.64, 2013-07-26\n ATU, NYSE, -1.59, 2013-06-20\n AZO, NYSE, 0.69, 2013-05-22\n AZZ, NYSE, -8.20, 2013-06-29\n CAG, NYSE, 1.69, 2013-06-28\n CLC, NYSE, -1.49, 2013-06-20\n CMC, NYSE, -15.79, 2013-06-28\n FC, NYSE, 18.18, 2013-07-10\n FDO, NYSE, 1.94, 2013-07-11\n FDX, NYSE, 8.67, 2013-06-20\n FUL, NYSE, -5.63, 2013-06-27\n GIS, NYSE, -1.85, 2013-06-27\n KBH, NYSE, 20.00, 2013-06-28\n LEN, NYSE, 30.30, 2013-06-26\n LNN, NYSE, 12.92, 2013-06-27\n MKC, NYSE, 0.00, 2013-06-28\n RT, NYSE, -36.84, 2013-07-25\n MCS, NYSE, -6.25, 2013-07-26\n MSM, NYSE, 9.37, 2013-07-11\n NKE, NYSE, 2.70, 2013-06-28\n ORCL, NYSE, 0.00, 2013-06-21\n PIR, NYSE, 0.00, 2013-06-21\n PKE, NYSE, -13.79, 2013-06-27\n RAD, NYSE, 0.00, 2013-06-21\n RPM, NYSE, 7.46, 2013-07-23\n SVU, NYSE, 250.00, 2013-07-19\n TISI, NYSE, 0.00, 2013-08-07\n TXI, NYSE, 116.00, 2013-07-11\n UNF, NYSE, 2.88, 2013-06-27\n WGO, NYSE, 0.00, 2013-06-28\n WOR, NYSE, -7.46, 2013-06-28\n JBL, NYSE, 4.35, 2013-06-20\n GBX, NYSE, -5.66, 2013-07-03\n DRI, NYSE, -1.94, 2013-06-22\n FDS, NYSE, -1.71, 2013-06-19\n KMX, NYSE, 12.28, 2013-06-22\n SCS, NYSE, 0.00, 2013-06-20\n SJR, NYSE, 16.28, 2013-06-29\n RHT, NYSE, 9.09, 2013-06-20\n OMN, NYSE, 14.29, 2013-06-28\n MON, NYSE, 3.75, 2013-06-27\n GPN, NYSE, -3.92, 2013-07-26\n AYI, NYSE, 7.78, 2013-07-03\n CCL, NYSE, 50.00, 2013-06-26\n CUK, NYSE, 50.00, 2013-06-26\n STZ, NYSE, -7.32, 2013-07-03\n ACN, NYSE, 0.00, 2013-06-28\n SNX, NYSE, 0.00, 2013-06-26\n TAL, NYSE, 66.67, 2013-07-23\n IHS, NYSE, 1.45, 2013-06-21\n EDU, NYSE, 20.00, 2013-07-24\n ZEP, NYSE, -31.71, 2013-07-03\n MG, NYSE, -5.88, 2013-08-08\n MOS, NYSE, -0.88, 2013-07-16\n ABT, NYSE, 4.55, 2013-07-18\n ABX, NYSE, 17.86, 2013-08-02\n AB, NYSE, 7.89, 2013-08-01\n TAP, NYSE, 8.63, 2013-08-07\n ACO, NYSE, 1.79, 2013-07-27\n ADM, NYSE, 9.52, 2013-08-07\n AEM, NYSE, -85.71, 2013-07-25\n AEP, NYSE, -5.19, 2013-07-26\n AES, NYSE, 23.08, 2013-08-09\n AET, NYSE, 9.35, 2013-07-31\n AFL, NYSE, 6.58, 2013-07-31\n AGCO, NYSE, 18.78, 2013-08-01\n AGN, NYSE, 1.01, 2013-07-26\n HES, NYSE, 7.09, 2013-08-01\n AIG, NYSE, 31.76, 2013-08-02\n AIN, NYSE, -23.08, 2013-08-01\n AJG, NYSE, 5.80, 2013-07-31\n ALU, NYSE, 33.33, 2013-07-31\n MATX, NYSE, 6.82, 2013-08-08\n ALK, NYSE, -0.68, 2013-07-26\n BEAM, NYSE, 6.67, 2013-08-09\n AME, NYSE, 0.00, 2013-08-08\n TWX, NYSE, 10.67, 2013-08-08\n AVD, NYSE, -17.14, 2013-08-06\n AMN, NYSE, 20.00, 2013-08-02\n AN, NYSE, -1.35, 2013-07-19\n AON, NYSE, 0.91, 2013-07-27\n APA, NYSE, -0.50, 2013-08-02\n APC, NYSE, 16.67, 2013-07-30\n APD, NYSE, 0.00, 2013-07-24\n APH, NYSE, 1.06, 2013-07-19\n ARG, NYSE, -0.87, 2013-07-26\n AAN, NYSE, 0.00, 2013-07-25\n ARW, NYSE, 8.74, 2013-07-25\n ASGN, NYSE, 14.29, 2013-07-25\n ASH, NYSE, -8.29, 2013-07-26\n ASR, NYSE, 21.90, 2013-07-23\n GAS, NYSE, 51.85, 2013-08-01\n ATO, NYSE, 13.51, 2013-08-07\n ATW, NYSE, 0.74, 2013-08-01\n AVP, NYSE, 11.54, 2013-08-02\n AVT, NYSE, 3.16, 2013-08-08\n AVY, NYSE, 2.90, 2013-07-24\n AXP, NYSE, 4.96, 2013-07-18\n B, NYSE, 0.00, 2013-07-27\n BA, NYSE, 5.70, 2013-07-25\n BAC, NYSE, 28.00, 2013-07-18\n BAX, NYSE, 2.65, 2013-07-19\n BC, NYSE, 13.89, 2013-07-26\n OMX, NYSE, -33.33, 2013-08-07\n BCE, NYSE, -2.67, 2013-08-09\n BCR, NYSE, 2.90, 2013-07-24\n BDX, NYSE, 7.48, 2013-08-02\n BEN, NYSE, 1.18, 2013-07-30\n BGG, NYSE, 15.79, 2013-08-16\n BHE, NYSE, 10.71, 2013-07-26\n BHI, NYSE, -6.15, 2013-07-20\n BID, NYSE, -9.56, 2013-08-07\n BIO, NYSE, 7.14, 2013-08-07\n BK, NYSE, 6.90, 2013-07-18\n BKH, NYSE, -2.38, 2013-08-06\n WRB, NYSE, -2.99, 2013-07-23\n BLC, NYSE, 9.09, 2013-07-31\n BLL, NYSE, 1.19, 2013-07-26\n BLX, NYSE, 5.56, 2013-07-19\n BMI, NYSE, -20.00, 2013-07-19\n BMS, NYSE, 1.67, 2013-07-26\n BMY, NYSE, 0.00, 2013-07-26\n BOH, NYSE, 2.41, 2013-07-23\n BXS, NYSE, 10.00, 2013-07-23\n BPL, NYSE, -8.86, 2013-08-03\nBRK.A, NYSE, 176.30, 2013-08-03\n BRO, NYSE, 2.86, 2013-07-16\n BSX, NYSE, 12.50, 2013-07-26\n BT, NYSE, 6.17, 2013-07-26\n MTRN, NYSE, 7.50, 2013-07-27\n CAI, NYSE, -8.54, 2013-07-31\n CAT, NYSE, -15.20, 2013-07-25\n CB, NYSE, 19.27, 2013-07-24\n CBI, NYSE, 0.00, 2013-07-31\n CBM, NYSE, -64.29, 2013-08-02\n CBU, NYSE, 4.00, 2013-07-24\n CBT, NYSE, -4.35, 2013-08-01\n CCC, NYSE, 14.29, 2013-08-07\n CCE, NYSE, 2.67, 2013-07-26\n C, NYSE, 5.93, 2013-07-16\n CCK, NYSE, 3.23, 2013-07-18\n CCU, NYSE, 25.00, 2013-08-08\n CDE, NYSE, -1100.00, 2013-08-09\n CDI, NYSE, 6.25, 2013-08-02\n CAH, NYSE, 2.60, 2013-08-02\n CFR, NYSE, 0.00, 2013-07-25\n CHD, NYSE, 1.67, 2013-08-03\n CKP, NYSE, -15.38, 2013-08-07\n CPK, NYSE, -7.02, 2013-08-10\n CI, NYSE, 11.95, 2013-08-02\n CKH, NYSE, 51.67, 2013-07-31\n CL, NYSE, 0.00, 2013-07-26\n CLF, NYSE, 85.25, 2013-07-26\n CLH, NYSE, -25.00, 2013-08-08\n CLX, NYSE, 2.99, 2013-08-02\n CMA, NYSE, 8.57, 2013-07-17\n CMO, NYSE, -15.63, 2013-07-25\n CRK, NYSE, -6.67, 2013-07-30\n CMS, NYSE, -14.71, 2013-07-26\n CNA, NYSE, 17.19, 2013-07-31\n CNW, NYSE, 13.56, 2013-08-01\n CNL, NYSE, -6.06, 2013-08-01\n COG, NYSE, 35.48, 2013-07-25\n COT, NYSE, -4.76, 2013-08-02\n CP, NYSE, -4.14, 2013-07-25\n CPF, NYSE, 25.93, 2013-07-26\n CQB, NYSE, 43.48, 2013-08-09\n CR, NYSE, 0.00, 2013-07-23\nCRD.B, NYSE, 42.86, 2013-08-06\n CRS, NYSE, 11.59, 2013-07-31\n CSC, NYSE, 42.19, 2013-08-07\n CSL, NYSE, -14.93, 2013-07-24\n CTB, NYSE, -38.20, 2013-08-09\n CTL, NYSE, 2.99, 2013-08-08\n CTS, NYSE, 33.33, 2013-07-23\n CUB, NYSE, 9.52, 2013-08-02\n CMI, NYSE, 11.11, 2013-07-31\n CUZ, NYSE, 9.09, 2013-07-30\n CVC, NYSE, 80.00, 2013-08-03\n CW, NYSE, 6.06, 2013-08-01\n CWT, NYSE, 0.00, 2013-08-01\n CX, NYSE, 0.00, 2013-07-26\n CYN, NYSE, 8.33, 2013-07-19\n D, NYSE, -4.62, 2013-08-07\n DBD, NYSE, 0.00, 2013-08-15\n DCO, NYSE, 30.77, 2013-08-06\n DD, NYSE, 0.79, 2013-07-24\n CVA, NYSE, 150.00, 2013-07-18\n DHR, NYSE, 2.35, 2013-07-19\n DIS, NYSE, 0.00, 2013-08-07\n DLX, NYSE, 10.34, 2013-07-26\n DNB, NYSE, 2.00, 2013-08-08\n RRD, NYSE, 4.65, 2013-07-30\n DOV, NYSE, 5.43, 2013-07-19\n DOW, NYSE, 1.59, 2013-07-26\n DRE, NYSE, 0.00, 2013-08-01\n DHI, NYSE, 23.53, 2013-07-26\n UFS, NYSE, -25.00, 2013-07-26\n DTE, NYSE, -21.52, 2013-07-27\n DUK, NYSE, -6.45, 2013-08-08\n DVN, NYSE, 28.72, 2013-08-08\n DV, NYSE, 31.71, 2013-08-09\n EAT, NYSE, 4.05, 2013-08-03\n ECL, NYSE, 2.38, 2013-07-31\n ED, NYSE, -5.26, 2013-08-02\n EDE, NYSE, 8.00, 2013-07-26\n EFX, NYSE, 2.22, 2013-07-25\n EGN, NYSE, 8.20, 2013-08-01\n EGP, NYSE, 2.56, 2013-07-19\n ELP, NYSE, 17.65, 2013-08-16\n ELY, NYSE, 20.00, 2013-07-26\n EMC, NYSE, 2.94, 2013-07-25\n EMR, NYSE, -2.02, 2013-08-07\n EOG, NYSE, 19.32, 2013-08-07\n EQT, NYSE, 3.64, 2013-07-26\n ESE, NYSE, -41.07, 2013-08-09\n ESV, NYSE, 3.33, 2013-07-30\n ETN, NYSE, -1.80, 2013-08-03\n ETR, NYSE, 3.06, 2013-07-31\n EXAR, NYSE, 14.29, 2013-07-25\n F, NYSE, 21.62, 2013-07-25\n CLGX, NYSE, 13.64, 2013-07-25\n FNB, NYSE, 0.00, 2013-07-24\n FCF, NYSE, -50.00, 2013-07-25\n FBP, NYSE, -11.11, 2013-07-25\n FICO, NYSE, 6.35, 2013-07-31\n FLO, NYSE, 4.35, 2013-08-14\n FMC, NYSE, 0.00, 2013-07-30\n FOE, NYSE, 27.27, 2013-08-01\n S, NYSE, 6.06, 2013-07-31\n NEE, NYSE, 13.18, 2013-07-31\n FRT, NYSE, 0.88, 2013-08-01\n FRX, NYSE, 300.00, 2013-07-24\n FSS, NYSE, 64.29, 2013-08-10\n FUN, NYSE, 2.41, 2013-08-09\n FUR, NYSE, -48.15, 2013-08-02\n GBL, NYSE, 17.20, 2013-08-07\n GVA, NYSE, -78.13, 2013-08-02\n BGC, NYSE, 23.21, 2013-08-01\n GD, NYSE, 11.73, 2013-07-25\n GE, NYSE, 0.00, 2013-07-20\n RHP, NYSE, -26.85, 2013-08-07\n AXLL, NYSE, 2.59, 2013-08-01\n GGG, NYSE, 9.52, 2013-07-25\n GHM, NYSE, 52.00, 2013-07-26\n GIB, NYSE, 10.71, 2013-08-01\n GLT, NYSE, 20.00, 2013-07-31\n GLW, NYSE, 3.23, 2013-07-31\n GSK, NYSE, -5.88, 2013-07-25\n GLF, NYSE, 25.71, 2013-07-23\n GPC, NYSE, 14.88, 2013-07-19\n GRA, NYSE, 2.75, 2013-07-26\n GTY, NYSE, 36.00, 2013-08-08\n GWW, NYSE, 2.71, 2013-07-18\n HAE, NYSE, 0.00, 2013-07-30\n HAL, NYSE, 1.39, 2013-07-23\n HAR, NYSE, 4.60, 2013-08-07\n HVT, NYSE, 31.25, 2013-08-01\n HRC, NYSE, 0.00, 2013-07-25\n HCC, NYSE, 21.69, 2013-07-31\n HCN, NYSE, 1.09, 2013-08-07\n HCP, NYSE, -2.70, 2013-07-31\n HOG, NYSE, 3.42, 2013-07-26\n HE, NYSE, 7.89, 2013-08-09\n HMA, NYSE, -46.15, 2013-08-10\n HMN, NYSE, 30.00, 2013-07-25\n HFC, NYSE, 0.00, 2013-08-08\n HOT, NYSE, 8.22, 2013-07-26\n HP, NYSE, 6.67, 2013-07-27\n HLS, NYSE, 18.60, 2013-07-26\n HRS, NYSE, 23.68, 2013-07-31\n HSC, NYSE, -11.76, 2013-08-09\n HSY, NYSE, 1.41, 2013-07-26\n HUBB, NYSE, 5.38, 2013-07-19\n HUM, NYSE, 6.91, 2013-08-01\n HXL, NYSE, 2.13, 2013-07-23\n IBM, NYSE, 3.44, 2013-07-18\n IDA, NYSE, 33.82, 2013-08-02\n IEX, NYSE, 2.70, 2013-07-23\n IFF, NYSE, -3.39, 2013-08-07\n DIN, NYSE, 12.09, 2013-07-31\n INT, NYSE, 11.76, 2013-08-01\n IP, NYSE, -5.45, 2013-07-26\n IPG, NYSE, -14.29, 2013-07-20\n IO, NYSE, -100.00, 2013-08-08\n IR, NYSE, 5.56, 2013-07-20\n IRF, NYSE, 81.82, 2013-08-20\n ITW, NYSE, -0.92, 2013-07-24\n JEC, NYSE, -1.19, 2013-07-30\n JNJ, NYSE, 5.71, 2013-07-17\n JNY, NYSE, 116.67, 2013-08-01\n K, NYSE, 3.09, 2013-08-02\n KAMN, NYSE, 13.56, 2013-07-30\n KDN, NYSE, 10.53, 2013-07-26\n KEX, NYSE, 0.94, 2013-07-25\n KEY, NYSE, 5.00, 2013-07-19\n KIM, NYSE, 6.06, 2013-07-30\n KMB, NYSE, 1.44, 2013-07-23\n KEM, NYSE, -95.00, 2013-07-26\n KMT, NYSE, 4.11, 2013-07-26\n KO, NYSE, 0.00, 2013-07-17\n KSU, NYSE, 1.05, 2013-07-20\n LDR, NYSE, -19.64, 2013-08-06\n LEG, NYSE, 0.00, 2013-07-26\n LLY, NYSE, 13.73, 2013-07-25\n LM, NYSE, -1.45, 2013-07-26\n LNC, NYSE, 10.43, 2013-08-01\n LPX, NYSE, 32.26, 2013-08-07\n LXU, NYSE, 29.17, 2013-08-09\n LTC, NYSE, -3.39, 2013-08-09\n L, NYSE, -5.48, 2013-07-30\n LUV, NYSE, -2.56, 2013-07-26\n LUX, NYSE, -1.67, 2013-07-26\n MKL, NYSE, 7.46, 2013-08-08\n MAN, NYSE, 17.98, 2013-07-20\n MTW, NYSE, 25.00, 2013-07-30\n SM, NYSE, 0.00, 2013-07-31\n MAS, NYSE, 21.05, 2013-07-30\n MTZ, NYSE, 2.33, 2013-08-02\n MCD, NYSE, -1.43, 2013-07-23\n MDC, NYSE, 38.18, 2013-07-31\n MDP, NYSE, 5.63, 2013-07-26\n MDR, NYSE, -1966.67, 2013-08-06\n MDU, NYSE, -3.85, 2013-08-01\n MED, NYSE, 2.00, 2013-08-07\n CVS, NYSE, 1.04, 2013-08-07\n MFC, NYSE, -3.12, 2013-08-09\n MGA, NYSE, 11.25, 2013-08-10\n MGM, NYSE, 300.00, 2013-08-07\n MMC, NYSE, 2.94, 2013-08-08\n MMM, NYSE, 0.59, 2013-07-26\n MSA, NYSE, 0.00, 2013-07-25\n MNR, NYSE, -27.78, 2013-08-07\n MO, NYSE, -1.59, 2013-07-24\n MOD, NYSE, 145.45, 2013-08-02\nMOG.A, NYSE, 8.43, 2013-07-27\n MHK, NYSE, 10.84, 2013-08-02\n MSI, NYSE, 11.96, 2013-07-25\n MCY, NYSE, 3.28, 2013-07-30\n MRK, NYSE, 2.44, 2013-07-31\n MRO, NYSE, -5.63, 2013-08-07\n POWR, NYSE, 20.00, 2013-08-08\n MTG, NYSE, 118.75, 2013-07-24\n MTB, NYSE, 26.19, 2013-07-18\n MTX, NYSE, 8.62, 2013-07-26\n MUR, NYSE, 12.90, 2013-08-01\n MYE, NYSE, 19.05, 2013-07-19\n NBL, NYSE, -5.48, 2013-07-26\n NBR, NYSE, -11.11, 2013-07-24\n NE, NYSE, 12.50, 2013-07-18\n NEM, NYSE, -124.39, 2013-07-27\n NFG, NYSE, 6.15, 2013-08-09\n NHI, NYSE, -1.14, 2013-08-07\n NI, NYSE, -4.17, 2013-08-01\n NJR, NYSE, 15.00, 2013-08-08\n THC, NYSE, -4.35, 2013-08-07\n NNN, NYSE, 0.00, 2013-08-02\n NOC, NYSE, 20.59, 2013-07-25\n NR, NYSE, -5.26, 2013-07-26\n NSC, NYSE, -2.67, 2013-07-24\n NUE, NYSE, -10.00, 2013-07-19\n NVR, NYSE, -18.34, 2013-07-23\n NWL, NYSE, 2.04, 2013-07-27\n NWN, NYSE, -11.11, 2013-08-08\n NYT, NYSE, 16.67, 2013-08-02\n OCR, NYSE, 4.65, 2013-07-25\n OGE, NYSE, -2.13, 2013-08-09\n OHI, NYSE, 1.64, 2013-08-01\n OI, NYSE, 2.53, 2013-07-25\n OII, NYSE, 8.33, 2013-07-25\n OKE, NYSE, -225.93, 2013-07-31\n OLN, NYSE, 3.85, 2013-07-26\n BRS, NYSE, 1.01, 2013-08-06\n OMC, NYSE, 0.00, 2013-07-19\n OMI, NYSE, 0.00, 2013-07-30\n ORB, NYSE, 17.39, 2013-07-19\n ORI, NYSE, 1750.00, 2013-07-26\n OSK, NYSE, 53.21, 2013-07-31\n OXY, NYSE, -1.86, 2013-07-31\n FCFS, NYSE, 1.79, 2013-07-18\n PBI, NYSE, 15.56, 2013-07-31\n PCG, NYSE, 9.72, 2013-08-01\n PCL, NYSE, 21.74, 2013-07-30\n PCP, NYSE, -0.69, 2013-07-26\n TPC, NYSE, -11.11, 2013-08-10\n PEG, NYSE, 4.35, 2013-07-31\n PEI, NYSE, 7.69, 2013-07-24\n PEP, NYSE, 10.08, 2013-07-25\n PFE, NYSE, 3.70, 2013-07-31\n PG, NYSE, 2.60, 2013-08-02\n PGR, NYSE, -2.44, 2013-07-12\n PH, NYSE, -8.72, 2013-08-07\n PHM, NYSE, -10.34, 2013-07-26\n PKD, NYSE, 0.00, 2013-08-07\n PKY, NYSE, 0.00, 2013-08-06\n PNC, NYSE, 21.34, 2013-07-18\n PNM, NYSE, 15.15, 2013-08-03\n PNR, NYSE, 2.22, 2013-07-24\n PNW, NYSE, 3.51, 2013-08-03\n POM, NYSE, -8.33, 2013-08-08\n POT, NYSE, -10.98, 2013-07-26\n PPG, NYSE, 4.70, 2013-07-19\n PPL, NYSE, 0.00, 2013-08-02' if __name__ == "__main__": main()
5,185.88
128,925
0.544826
import pandas as pd from pandas.compat import StringIO import numpy numpy.set_printoptions(threshold=numpy.nan) def main(): df = pd.read_csv(StringIO(earnings), sep=",", header=None, names=['symbol', 'exchange', 'eps_pct_diff_surp', 'asof_date']) df = df.sort_values(by=['asof_date']) print(df.head()) print(len(df)) df.to_csv('../../data/events/nyse_earnings_surprises_2013.csv', index=False) myString = ', '.join('"{0}"'.format(s) for s in df.symbol.unique()) myString = myString.replace(" ", "") print(myString) earnings = 'CFN, NYSE, -21.82, 2013-02-09\n NDZ, NYSE, 30.77, 2013-01-29\n AZZ, NYSE, -1.64, 2013-01-10\n CLC, NYSE, 2.86, 2013-01-17\n CMC, NYSE, 64.71, 2013-01-08\n FC, NYSE, 15.38, 2013-01-04\n FDO, NYSE, -6.76, 2013-01-04\n FUL, NYSE, 14.29, 2013-01-17\n LEN, NYSE, 30.23, 2013-01-16\n LNN, NYSE, 53.33, 2013-01-09\n MKC, NYSE, -3.48, 2013-01-25\n RT, NYSE, 0.00, 2013-01-10\n MSM, NYSE, 1.00, 2013-01-11\n RPM, NYSE, -4.76, 2013-01-09\n SVU, NYSE, -50.00, 2013-01-11\n TISI, NYSE, 10.00, 2013-01-08\n TXI, NYSE, -5.88, 2013-01-10\n UNF, NYSE, 15.79, 2013-01-04\n WOR, NYSE, 12.20, 2013-01-04\n GBX, NYSE, 12.90, 2013-01-10\n SJR, NYSE, 11.11, 2013-01-10\n OMN, NYSE, -50.00, 2013-01-23\n MON, NYSE, 67.57, 2013-01-09\n GPN, NYSE, 6.90, 2013-01-09\n AYI, NYSE, -13.75, 2013-01-09\n STZ, NYSE, 14.55, 2013-01-10\n SNX, NYSE, 11.54, 2013-01-11\n TAL, NYSE, 600.00, 2013-01-23\n IHS, NYSE, 12.35, 2013-01-09\n EDU, NYSE, -150.00, 2013-01-30\n SAR, NYSE, 28.57, 2013-01-15\n ZEP, NYSE, 11.11, 2013-01-08\n MG, NYSE, 0.00, 2013-01-09\n MOS, NYSE, 7.14, 2013-01-04\n ABT, NYSE, 1.33, 2013-01-24\n ABX, NYSE, 1.83, 2013-02-15\n AB, NYSE, 21.21, 2013-02-13\n TAP, NYSE, 7.81, 2013-02-15\n ACO, NYSE, -15.91, 2013-01-26\n ADM, NYSE, -26.83, 2013-02-05\n AEM, NYSE, -13.33, 2013-02-14\n AEP, NYSE, 11.11, 2013-02-16\n AES, NYSE, 6.67, 2013-02-28\n AET, NYSE, -2.08, 2013-02-01\n AFL, NYSE, 0.00, 2013-02-06\n AGCO, NYSE, 1.02, 2013-02-06\n HES, NYSE, -2.44, 2013-01-31\n AIG, NYSE, 322.22, 2013-02-22\n AIN, NYSE, -9.68, 2013-02-07\n AJG, NYSE, 2.63, 2013-01-30\n ALU, NYSE, 0.00, 2013-02-08\n MATX, NYSE, 24.14, 2013-02-08\n ALK, NYSE, -4.11, 2013-01-25\n ALX, NYSE, -11.52, 2013-02-27\n BEAM, NYSE, 0.00, 2013-02-02\n AME, NYSE, 2.08, 2013-01-25\n TWX, NYSE, 6.36, 2013-02-07\n AVD, NYSE, 11.43, 2013-03-01\n AMN, NYSE, 36.36, 2013-02-22\n AN, NYSE, 3.08, 2013-02-01\n AON, NYSE, 1.60, 2013-02-02\n AP, NYSE, 77.78, 2013-02-05\n APA, NYSE, -1.30, 2013-02-15\n APC, NYSE, 30.00, 2013-02-05\n APD, NYSE, 0.78, 2013-01-24\n APH, NYSE, 4.44, 2013-01-18\n ARG, NYSE, -3.70, 2013-01-25\n AAN, NYSE, -4.00, 2013-02-08\n ARW, NYSE, 13.89, 2013-02-08\n ASGN, NYSE, -25.00, 2013-02-15\n ASH, NYSE, -17.65, 2013-01-30\n ASR, NYSE, 56.88, 2013-02-26\n GAS, NYSE, -9.90, 2013-02-07\n ATO, NYSE, -5.13, 2013-02-07\n ATW, NYSE, 17.02, 2013-01-31\n AU, NYSE, -67.44, 2013-02-21\n AVP, NYSE, 37.04, 2013-02-13\n AVT, NYSE, 21.69, 2013-01-25\n AVY, NYSE, 10.20, 2013-01-31\n AXP, NYSE, 0.00, 2013-01-18\n B, NYSE, 7.84, 2013-02-23\n BA, NYSE, 7.56, 2013-01-31\n BAC, NYSE, 50.00, 2013-01-18\n BAX, NYSE, 0.00, 2013-01-25\n BC, NYSE, 122.22, 2013-01-25\n OMX, NYSE, 6.67, 2013-02-21\n BCE, NYSE, -2.99, 2013-02-08\n BCR, NYSE, 1.80, 2013-02-01\n BCS, NYSE, 40.74, 2013-02-13\n BDX, NYSE, 9.76, 2013-02-06\n BEN, NYSE, 1.68, 2013-02-02\n BGG, NYSE, 250.00, 2013-01-25\n BHE, NYSE, 10.00, 2013-02-05\n BHI, NYSE, 1.64, 2013-01-24\n BID, NYSE, 0.92, 2013-03-01\n BIO, NYSE, 15.67, 2013-02-27\n BK, NYSE, 0.00, 2013-01-16\n BKH, NYSE, 9.68, 2013-02-01\n WRB, NYSE, 28.00, 2013-01-29\n BLC, NYSE, 5.71, 2013-02-09\n BLL, NYSE, -3.03, 2013-02-01\n BLX, NYSE, 20.75, 2013-02-08\n BMI, NYSE, -11.36, 2013-02-07\n BMS, NYSE, 4.00, 2013-02-01\n BMY, NYSE, 9.30, 2013-01-25\n BOH, NYSE, 1.12, 2013-01-31\n BXS, NYSE, -25.00, 2013-01-24\n BPL, NYSE, 18.52, 2013-02-09\nBRK.A, NYSE, 175.73, 2013-03-02\n BRO, NYSE, 7.41, 2013-02-02\n BSX, NYSE, 63.64, 2013-01-30\n BT, NYSE, -89.22, 2013-02-02\n MTRN, NYSE, 17.14, 2013-03-01\n CACI, NYSE, 3.66, 2013-01-31\n CAT, NYSE, -13.10, 2013-01-29\n CB, NYSE, 10.00, 2013-01-30\n CBI, NYSE, 9.64, 2013-02-28\n CBM, NYSE, 100.00, 2013-02-07\n CBU, NYSE, -3.70, 2013-01-23\n CBT, NYSE, -28.57, 2013-01-31\n CCC, NYSE, 35.71, 2013-02-22\n CCE, NYSE, 4.65, 2013-02-08\n C, NYSE, -20.69, 2013-01-18\n CCK, NYSE, -7.27, 2013-01-31\n CCU, NYSE, -12.21, 2013-02-01\n CDE, NYSE, -15.15, 2013-02-22\n CDI, NYSE, 8.70, 2013-02-27\n CAH, NYSE, 9.41, 2013-02-06\n CFR, NYSE, 5.38, 2013-01-31\n CHD, NYSE, 0.00, 2013-02-06\n CKP, NYSE, -50.00, 2013-03-06\n CPK, NYSE, 18.60, 2013-03-08\n CI, NYSE, 6.08, 2013-02-08\n CIA, NYSE, -100.00, 2013-03-12\n CKH, NYSE, -93.55, 2013-02-28\n CL, NYSE, 0.71, 2013-02-01\n CLF, NYSE, -25.45, 2013-02-13\n CLH, NYSE, -25.00, 2013-02-21\n CLX, NYSE, 11.11, 2013-02-05\n CMA, NYSE, 7.81, 2013-01-17\n CMO, NYSE, -6.06, 2013-01-31\n CRK, NYSE, -77.42, 2013-02-12\n CMS, NYSE, 4.17, 2013-02-22\n CNA, NYSE, -150.00, 2013-02-12\n CNW, NYSE, -10.34, 2013-02-07\n CHG, NYSE, -4.12, 2013-02-27\n CNL, NYSE, 12.50, 2013-02-20\n COG, NYSE, 14.29, 2013-02-22\n COT, NYSE, -66.67, 2013-02-16\n CP, NYSE, -0.78, 2013-01-30\n CPF, NYSE, 11.54, 2013-02-01\n CQB, NYSE, -17.65, 2013-03-12\n CR, NYSE, -5.15, 2013-01-29\nCRD.B, NYSE, 52.38, 2013-02-14\n CRS, NYSE, 1.64, 2013-02-01\n CSC, NYSE, 22.22, 2013-02-06\n CSL, NYSE, 6.49, 2013-02-09\n CTB, NYSE, 35.29, 2013-02-26\n CTL, NYSE, -1.47, 2013-02-14\n CTS, NYSE, -21.74, 2013-01-29\n CUB, NYSE, -32.86, 2013-02-12\n CMI, NYSE, 14.94, 2013-02-07\n CUZ, NYSE, 40.00, 2013-02-14\n CVC, NYSE, -400.00, 2013-03-01\n CVH, NYSE, 35.82, 2013-02-07\n CW, NYSE, 4.40, 2013-02-21\n CWT, NYSE, 33.33, 2013-02-28\n CX, NYSE, -258.33, 2013-02-08\n CYN, NYSE, -13.00, 2013-01-25\n D, NYSE, 1.47, 2013-02-01\n DBD, NYSE, -8.16, 2013-02-13\n DCO, NYSE, -23.81, 2013-03-05\n DD, NYSE, 22.22, 2013-01-23\n CVA, NYSE, -13.04, 2013-02-07\n DHR, NYSE, 0.00, 2013-01-30\n DIS, NYSE, 2.60, 2013-02-06\n DLX, NYSE, 11.76, 2013-01-25\n DNB, NYSE, -1.24, 2013-02-12\n RRD, NYSE, 16.22, 2013-02-27\n DOV, NYSE, 1.87, 2013-01-25\n DOW, NYSE, -2.94, 2013-02-01\n DRE, NYSE, 0.00, 2013-01-31\n DHI, NYSE, 42.86, 2013-01-30\n UFS, NYSE, -7.09, 2013-02-02\n DTE, NYSE, 0.00, 2013-02-21\n DUK, NYSE, 7.69, 2013-02-14\n DVN, NYSE, 2.63, 2013-02-21\n DV, NYSE, 55.36, 2013-02-07\n EAT, NYSE, 0.00, 2013-01-23\n ECL, NYSE, 0.00, 2013-02-27\n ED, NYSE, -6.85, 2013-02-01\n EDE, NYSE, 27.78, 2013-02-15\n EFX, NYSE, 4.00, 2013-02-07\n EGN, NYSE, -15.58, 2013-01-24\n EGP, NYSE, 0.00, 2013-02-13\n ELY, NYSE, 2.00, 2013-01-31\n EMC, NYSE, 6.98, 2013-01-30\n EMR, NYSE, 0.00, 2013-02-06\n EOG, NYSE, 19.26, 2013-02-14\n EQT, NYSE, 14.29, 2013-01-25\n ESE, NYSE, -44.44, 2013-02-08\n ESV, NYSE, 7.87, 2013-02-21\n ETN, NYSE, -10.87, 2013-02-06\n ETR, NYSE, 21.99, 2013-02-09\n EXAR, NYSE, -14.29, 2013-01-24\n F, NYSE, 19.23, 2013-01-30\n OPY, NYSE, 115.79, 2013-02-02\n CLGX, NYSE, -3.12, 2013-02-22\n FNB, NYSE, 4.55, 2013-01-24\n FCF, NYSE, -18.18, 2013-01-31\n FBP, NYSE, -30.00, 2013-02-06\n FICO, NYSE, 6.94, 2013-01-31\n FLO, NYSE, 12.00, 2013-02-08\n FMC, NYSE, 0.00, 2013-02-07\n FOE, NYSE, -250.00, 2013-03-06\n S, NYSE, 4.35, 2013-02-08\n NEE, NYSE, 9.57, 2013-01-30\n FRT, NYSE, 0.91, 2013-02-13\n FRX, NYSE, -61.54, 2013-01-16\n FUN, NYSE, -433.33, 2013-02-20\n FUR, NYSE, -48.15, 2013-03-08\n GBL, NYSE, -28.72, 2013-02-06\n GVA, NYSE, -29.03, 2013-03-01\n BGC, NYSE, -3.45, 2013-02-26\n GD, NYSE, -26.84, 2013-01-24\n GE, NYSE, 2.33, 2013-01-19\n RHP, NYSE, -50.00, 2013-02-13\n AXLL, NYSE, 95.08, 2013-02-13\n GGG, NYSE, 13.33, 2013-01-29\n GHM, NYSE, -22.22, 2013-02-02\n GIB, NYSE, -4.35, 2013-01-31\n GLT, NYSE, -25.71, 2013-02-08\n GLW, NYSE, 3.03, 2013-01-30\n GSK, NYSE, 8.33, 2013-02-07\n GLF, NYSE, -160.71, 2013-02-26\n GNI, NYSE, -14.44, 2013-01-30\n GPC, NYSE, 0.00, 2013-02-20\n GRA, NYSE, 4.72, 2013-02-07\n GTY, NYSE, -10.34, 2013-03-01\n GWW, NYSE, -7.28, 2013-01-25\n HAE, NYSE, 4.17, 2013-01-31\n HAL, NYSE, 3.28, 2013-01-26\n HAR, NYSE, -32.95, 2013-02-01\n HVT, NYSE, 30.43, 2013-02-26\n HRC, NYSE, 6.82, 2013-01-24\n HCC, NYSE, 43.75, 2013-02-13\n HCN, NYSE, 1.19, 2013-02-26\n HCP, NYSE, 1.41, 2013-02-13\n HOG, NYSE, 0.00, 2013-01-30\n HE, NYSE, 21.88, 2013-02-16\n HL, NYSE, -25.00, 2013-02-26\n HMA, NYSE, -5.00, 2013-02-15\n HMC, NYSE, -29.58, 2013-02-01\n HMN, NYSE, 91.43, 2013-02-06\n HFC, NYSE, -8.97, 2013-02-27\n HOT, NYSE, 7.69, 2013-02-08\n HP, NYSE, 8.53, 2013-02-01\n HLS, NYSE, 40.63, 2013-02-19\n HRS, NYSE, 4.17, 2013-01-30\n HSC, NYSE, -3.23, 2013-02-15\n HSY, NYSE, -1.33, 2013-02-01\n HUBB, NYSE, 0.00, 2013-01-25\n HUM, NYSE, 11.21, 2013-02-05\n HXL, NYSE, -5.26, 2013-01-24\n IBM, NYSE, 2.67, 2013-01-23\n IDA, NYSE, 10.00, 2013-02-22\n IEX, NYSE, 2.99, 2013-02-05\n IFF, NYSE, -1.19, 2013-02-08\n DIN, NYSE, 1.22, 2013-02-28\n INT, NYSE, 0.00, 2013-02-22\n IP, NYSE, 6.15, 2013-01-30\n IPG, NYSE, 3.70, 2013-02-23\n IO, NYSE, 30.77, 2013-02-14\n IR, NYSE, 8.57, 2013-02-02\n IRF, NYSE, 6.38, 2013-01-29\n ITW, NYSE, -1.11, 2013-01-30\n IVC, NYSE, -56.00, 2013-02-09\n JEC, NYSE, 0.00, 2013-01-24\n JNJ, NYSE, 1.71, 2013-01-23\n JNY, NYSE, 75.00, 2013-02-14\n K, NYSE, 3.08, 2013-02-06\n KAMN, NYSE, 0.00, 2013-02-26\n KDN, NYSE, 0.00, 2013-02-22\n KEX, NYSE, 9.30, 2013-01-31\n KEY, NYSE, -4.55, 2013-01-25\n KIM, NYSE, 6.45, 2013-02-06\n KMB, NYSE, 0.74, 2013-01-26\n KEM, NYSE, 53.33, 2013-02-01\n KMT, NYSE, -21.88, 2013-01-25\n KO, NYSE, 2.27, 2013-02-13\n KSU, NYSE, 10.98, 2013-01-23\n LDL, NYSE, -10.53, 2013-02-27\n LDR, NYSE, 10.42, 2013-02-12\n LEE, NYSE, 25.00, 2013-01-23\n LEG, NYSE, 10.34, 2013-02-05\n LLY, NYSE, 8.97, 2013-01-30\n LM, NYSE, 29.63, 2013-02-02\n LNC, NYSE, 3.77, 2013-02-07\n LPX, NYSE, -10.00, 2013-02-09\n LXU, NYSE, 145.00, 2013-03-01\n LTC, NYSE, -1.72, 2013-02-22\n L, NYSE, -37.93, 2013-02-12\n LUK, NYSE, 210.17, 2013-02-26\n LUV, NYSE, 28.57, 2013-01-25\n LUX, NYSE, 4.35, 2013-03-01\n MKL, NYSE, 314.07, 2013-02-05\n MAN, NYSE, 18.18, 2013-01-31\n MTW, NYSE, 12.50, 2013-02-01\n SM, NYSE, 95.65, 2013-02-21\n MAS, NYSE, 500.00, 2013-02-12\n MTZ, NYSE, 2.22, 2013-03-01\n MCD, NYSE, 3.76, 2013-01-24\n MDC, NYSE, 40.48, 2013-02-01\n MDP, NYSE, 1.14, 2013-01-25\n MDR, NYSE, 13.04, 2013-03-01\n MDU, NYSE, 2.56, 2013-02-05\n MED, NYSE, 12.00, 2013-03-08\n CVS, NYSE, 2.73, 2013-02-07\n MFC, NYSE, -12.50, 2013-02-08\n MGA, NYSE, 36.84, 2013-03-02\n MGM, NYSE, 0.00, 2013-02-21\n MLR, NYSE, -11.76, 2013-03-07\n MLI, NYSE, 14.29, 2013-02-06\n MMC, NYSE, 0.00, 2013-02-13\n MMM, NYSE, 0.00, 2013-01-25\n MSA, NYSE, 3.64, 2013-02-14\n MNR, NYSE, 38.46, 2013-02-08\n MO, NYSE, 1.85, 2013-02-01\n MOD, NYSE, -75.00, 2013-02-02\nMOG.A, NYSE, -8.54, 2013-01-26\n MHK, NYSE, 7.45, 2013-02-22\n MSI, NYSE, 7.61, 2013-01-24\n MCY, NYSE, -168.00, 2013-02-05\n MRK, NYSE, 2.47, 2013-02-02\n MRO, NYSE, -19.12, 2013-02-07\n POWR, NYSE, 18.18, 2013-03-08\n MTG, NYSE, -37.87, 2013-03-01\n MTB, NYSE, 2.76, 2013-01-17\n MTX, NYSE, 6.38, 2013-02-01\n MUR, NYSE, 59.23, 2013-01-31\n MYE, NYSE, -7.14, 2013-02-14\n NBL, NYSE, 54.21, 2013-02-08\n NBR, NYSE, 3.45, 2013-02-20\n NE, NYSE, -19.35, 2013-01-24\n NEM, NYSE, 13.27, 2013-02-22\n NFG, NYSE, 6.58, 2013-02-08\n NHI, NYSE, 1.20, 2013-02-15\n NI, NYSE, 0.00, 2013-02-20\n NJR, NYSE, -17.48, 2013-02-08\n THC, NYSE, -24.64, 2013-02-27\n NNN, NYSE, 4.55, 2013-02-08\n NOC, NYSE, 18.39, 2013-01-31\n NPK, NYSE, -11.23, 2013-02-16\n NR, NYSE, 0.00, 2013-02-15\n NSC, NYSE, 9.24, 2013-01-23\n NUE, NYSE, 55.17, 2013-01-30\n NVR, NYSE, 8.22, 2013-01-25\n NWL, NYSE, 2.38, 2013-02-02\n NWN, NYSE, -4.55, 2013-03-02\n NYT, NYSE, 3.23, 2013-02-08\n OCR, NYSE, 1.18, 2013-02-20\n OGE, NYSE, 14.71, 2013-02-28\n OHI, NYSE, 3.57, 2013-02-12\n OI, NYSE, 8.11, 2013-01-31\n OII, NYSE, 2.78, 2013-02-14\n OKE, NYSE, 17.78, 2013-02-26\n OLN, NYSE, 2.94, 2013-01-29\n BRS, NYSE, 32.95, 2013-02-05\n OLP, NYSE, 0.00, 2013-03-15\n OMC, NYSE, 3.67, 2013-02-13\n OMI, NYSE, -12.77, 2013-02-12\n ORB, NYSE, 31.82, 2013-02-15\n ORI, NYSE, -28.57, 2013-01-25\n OSK, NYSE, 93.55, 2013-01-26\n OXY, NYSE, 10.24, 2013-02-01\n PHX, NYSE, -18.75, 2013-02-08\n FCFS, NYSE, 2.20, 2013-01-24\n PBI, NYSE, 7.69, 2013-02-01\n PCG, NYSE, 3.51, 2013-02-22\n PCL, NYSE, 68.97, 2013-01-29\n PCP, NYSE, -3.23, 2013-01-25\n TPC, NYSE, 0.00, 2013-02-22\n PDS, NYSE, 250.00, 2013-02-15\n PEG, NYSE, 5.13, 2013-02-22\n PEI, NYSE, 0.00, 2013-02-26\n PEP, NYSE, 3.81, 2013-02-15\n PFE, NYSE, 6.82, 2013-01-30\n PG, NYSE, 9.91, 2013-01-26\n PGR, NYSE, 0.00, 2013-01-19\n PH, NYSE, 6.25, 2013-01-19\n PHG, NYSE, -4.17, 2013-01-30\n PHM, NYSE, 9.68, 2013-02-01\n PKD, NYSE, -150.00, 2013-02-22\n PKY, NYSE, 17.39, 2013-02-12\n PNC, NYSE, 24.82, 2013-01-18\n PNM, NYSE, 18.18, 2013-03-02\n PNR, NYSE, 6.82, 2013-01-30\n PNW, NYSE, 41.18, 2013-02-23\n POM, NYSE, -5.00, 2013-03-02\n POT, NYSE, -11.86, 2013-02-01\n PPG, NYSE, -0.65, 2013-01-15\n PPL, NYSE, 6.52, 2013-02-15\n PRGO, NYSE, 3.82, 2013-02-02\n PL, NYSE, 11.36, 2013-02-07\n PSB, NYSE, 5.04, 2013-02-20\n CSH, NYSE, 12.61, 2013-01-25\n PWR, NYSE, 36.11, 2013-02-22\n PX, NYSE, 0.00, 2013-01-24\n KWR, NYSE, 26.32, 2013-03-07\n R, NYSE, 6.36, 2013-02-01\n RBC, NYSE, 2.70, 2013-02-05\n RDC, NYSE, 28.57, 2013-03-01\n HTSI, NYSE, -20.69, 2013-02-01\n RES, NYSE, 8.33, 2013-01-24\n RGS, NYSE, -76.92, 2013-02-01\n RGR, NYSE, 36.99, 2013-02-28\n RHI, NYSE, 2.44, 2013-01-30\n RJF, NYSE, 0.00, 2013-01-24\n RLI, NYSE, 102.27, 2013-01-24\n ROG, NYSE, -8.62, 2013-02-20\n ROK, NYSE, -2.38, 2013-01-31\n ROL, NYSE, -5.88, 2013-01-24\n ROP, NYSE, 1.37, 2013-01-29\n RTI, NYSE, 25.00, 2013-02-07\n RTN, NYSE, 23.08, 2013-01-25\n RYL, NYSE, 12.00, 2013-01-30\n BSAC, NYSE, -1.96, 2013-02-05\n T, NYSE, -6.38, 2013-01-25\n SCG, NYSE, 0.00, 2013-02-22\n SCHW, NYSE, 0.00, 2013-01-17\n SCL, NYSE, -5.56, 2013-02-20\n SMG, NYSE, 0.88, 2013-02-07\n SEE, NYSE, 17.24, 2013-02-20\n SF, NYSE, 5.17, 2013-02-26\n SFE, NYSE, -121.74, 2013-03-08\n SHW, NYSE, -0.87, 2013-02-01\n STC, NYSE, 29.27, 2013-02-15\n SJI, NYSE, -6.67, 2013-03-01\n JOE, NYSE, -1000.00, 2013-03-01\n SJW, NYSE, 72.22, 2013-02-20\n SLB, NYSE, 0.00, 2013-01-19\n HSH, NYSE, 29.17, 2013-02-01\n AOS, NYSE, 12.35, 2013-01-25\n SNA, NYSE, 4.38, 2013-02-08\n PII, NYSE, 0.81, 2013-01-30\n SNV, NYSE, 0.00, 2013-01-23\n SO, NYSE, 12.82, 2013-01-31\n SON, NYSE, 3.70, 2013-02-14\n SPA, NYSE, 30.00, 2013-02-06\n TRV, NYSE, 500.00, 2013-01-23\n SR, NYSE, 14.68, 2013-02-06\n NVE, NYSE, 0.00, 2013-02-23\n SCI, NYSE, 10.00, 2013-02-13\n SSP, NYSE, -3.85, 2013-02-27\n STT, NYSE, 11.00, 2013-01-19\n STI, NYSE, 6.56, 2013-01-19\n STJ, NYSE, 2.22, 2013-01-24\n STL, NYSE, 14.29, 2013-01-24\n STR, NYSE, 8.57, 2013-02-21\n STE, NYSE, 3.57, 2013-02-07\n SYK, NYSE, 0.88, 2013-01-24\n SUN, NYSE, -4.88, 2013-03-30\n SUP, NYSE, -61.54, 2013-03-02\n SWK, NYSE, 3.01, 2013-01-25\n SWN, NYSE, 2.33, 2013-02-21\n SWS, NYSE, 0.00, 2013-02-07\n SWX, NYSE, -2.44, 2013-02-27\n SWY, NYSE, 23.68, 2013-02-22\n SXI, NYSE, 1.10, 2013-02-02\n SYY, NYSE, 19.51, 2013-02-05\n TNC, NYSE, 6.90, 2013-02-20\n TCB, NYSE, -16.67, 2013-01-31\n TCO, NYSE, 5.15, 2013-02-14\n TDS, NYSE, -725.00, 2013-02-27\n TDW, NYSE, 38.64, 2013-02-02\n TDY, NYSE, 8.33, 2013-01-25\n TE, NYSE, 0.00, 2013-02-06\n TER, NYSE, 600.00, 2013-01-24\n TEVA, NYSE, -0.75, 2013-02-08\n TEX, NYSE, -51.28, 2013-02-20\n TFX, NYSE, 1.79, 2013-02-22\n TEN, NYSE, -2.94, 2013-02-01\n TKR, NYSE, 25.00, 2013-01-25\n TMK, NYSE, 1.53, 2013-02-05\n TMO, NYSE, 6.25, 2013-02-01\n TOT, NYSE, -1.12, 2013-02-14\n TM, NYSE, -44.72, 2013-02-06\n TR, NYSE, 37.50, 2013-02-14\n TRN, NYSE, 7.14, 2013-02-21\n TRP, NYSE, -15.09, 2013-02-13\n TRR, NYSE, 566.67, 2013-02-07\n TSO, NYSE, -2.90, 2013-02-07\n TSS, NYSE, -3.03, 2013-01-23\n TTI, NYSE, -21.05, 2013-03-01\n TXT, NYSE, -1.75, 2013-01-24\n TYL, NYSE, 10.71, 2013-02-07\n TSN, NYSE, 23.08, 2013-02-02\n UDR, NYSE, 2.94, 2013-02-06\n UFI, NYSE, -42.86, 2013-01-23\n UGI, NYSE, -15.89, 2013-02-01\n UAM, NYSE, 45.45, 2013-02-20\n UHS, NYSE, 9.89, 2013-03-01\n UHT, NYSE, 268.42, 2013-02-28\n UIL, NYSE, -9.68, 2013-02-22\n UNH, NYSE, 0.00, 2013-01-18\n KMPR, NYSE, -250.00, 2013-02-08\n UNM, NYSE, 5.13, 2013-02-06\n UNP, NYSE, 1.39, 2013-01-25\n UNT, NYSE, 2.06, 2013-02-20\n URS, NYSE, -1.04, 2013-02-26\n USG, NYSE, -67.86, 2013-02-07\n MUX, NYSE, -600.00, 2013-03-09\n USM, NYSE, -1100.00, 2013-02-27\n USPH, NYSE, 3.03, 2013-03-08\n UTL, NYSE, 3.13, 2013-01-31\n UTX, NYSE, 26.47, 2013-01-24\n VMI, NYSE, 8.48, 2013-02-13\n VAR, NYSE, 3.49, 2013-01-24\n VFC, NYSE, 1.32, 2013-02-16\n CBS, NYSE, -8.57, 2013-02-15\n VLO, NYSE, 57.98, 2013-01-30\n VMC, NYSE, -81.82, 2013-02-15\n VLY, NYSE, 0.00, 2013-01-31\n VNO, NYSE, 6.09, 2013-02-27\n VSH, NYSE, 37.50, 2013-02-06\n WTS, NYSE, 5.17, 2013-02-20\n WBS, NYSE, 6.12, 2013-01-19\n WEC, NYSE, 4.88, 2013-01-31\n WFC, NYSE, 3.41, 2013-01-14\n WG, NYSE, 57.14, 2013-03-07\n WGL, NYSE, 9.62, 2013-02-07\n WHR, NYSE, 3.15, 2013-02-01\n WMB, NYSE, -3.85, 2013-02-21\n WMK, NYSE, 20.29, 2013-03-06\n WNC, NYSE, 3.23, 2013-02-06\n TEG, NYSE, -5.32, 2013-03-01\n WR, NYSE, 80.00, 2013-03-01\n WRE, NYSE, 2.17, 2013-02-14\n WRI, NYSE, 4.44, 2013-02-15\n WPP, NYSE, -175.00, 2013-02-12\n WSO, NYSE, -12.77, 2013-02-15\n WST, NYSE, 8.93, 2013-02-22\n WWW, NYSE, 200.00, 2013-02-20\n WY, NYSE, 36.84, 2013-01-26\n X, NYSE, 45.33, 2013-01-30\n XL, NYSE, 138.24, 2013-02-08\n XOM, NYSE, 10.00, 2013-02-02\n XRX, NYSE, 7.14, 2013-01-25\n Y, NYSE, 54.64, 2013-02-22\n HRG, NYSE, -50.00, 2013-02-09\n CRY, NYSE, 33.33, 2013-02-15\n CHK, NYSE, 85.71, 2013-02-22\n DDR, NYSE, 0.00, 2013-02-13\n ELS, NYSE, 0.00, 2013-01-29\n ALG, NYSE, 37.93, 2013-03-07\n ETH, NYSE, 5.41, 2013-01-23\n ATR, NYSE, 0.00, 2013-02-08\n GGP, NYSE, 6.90, 2013-02-05\n MSL, NYSE, -10.00, 2013-01-30\n RCL, NYSE, 66.67, 2013-02-05\n CWEI, NYSE, -34.04, 2013-02-22\n HR, NYSE, 0.00, 2013-02-21\n RGA, NYSE, 35.56, 2013-02-01\n RIG, NYSE, 12.35, 2013-03-02\n SKT, NYSE, 2.22, 2013-02-13\n TWI, NYSE, -80.85, 2013-02-26\n BDN, NYSE, 17.86, 2013-02-07\n KGC, NYSE, -4.55, 2013-02-14\n YPF, NYSE, 26.67, 2013-03-13\n CPT, NYSE, 1.04, 2013-02-01\n SGY, NYSE, 67.27, 2013-02-26\n BFS, NYSE, -11.48, 2013-03-08\n BWA, NYSE, 3.57, 2013-02-15\n EQR, NYSE, 0.00, 2013-02-06\n CLP, NYSE, -81.25, 2013-02-08\n KOF, NYSE, -7.78, 2013-02-28\n OKS, NYSE, 3.13, 2013-02-26\n SQM, NYSE, -15.63, 2013-03-06\n BYD, NYSE, -138.46, 2013-03-05\n CBL, NYSE, 8.77, 2013-02-06\n DECK, NYSE, 7.36, 2013-03-01\n IT, NYSE, 6.78, 2013-02-08\n GFI, NYSE, -36.36, 2013-02-15\n HST, NYSE, 8.11, 2013-02-22\n LXP, NYSE, 0.00, 2013-02-22\n OMG, NYSE, -533.33, 2013-02-20\n REG, NYSE, 8.62, 2013-01-31\n TUC, NYSE, -5.56, 2013-03-08\n AF, NYSE, 7.14, 2013-01-24\n BFR, NYSE, 13.33, 2013-02-09\n HHS, NYSE, 26.32, 2013-02-01\n MHO, NYSE, -3.45, 2013-02-01\n NFX, NYSE, -36.36, 2013-02-20\n SPG, NYSE, 13.93, 2013-02-05\n SU, NYSE, -14.20, 2013-02-06\n SUI, NYSE, -2.44, 2013-02-22\n TV, NYSE, 5.13, 2013-02-26\n CGI, NYSE, 0.00, 2013-01-24\n CYT, NYSE, 77.42, 2013-02-01\n EMN, NYSE, 0.00, 2013-02-01\n GRT, NYSE, 0.00, 2013-02-15\n MAA, NYSE, -1.74, 2013-02-07\n PLT, NYSE, 0.00, 2013-01-30\n BZH, NYSE, 24.27, 2013-01-29\n ELX, NYSE, 0.00, 2013-02-01\n AGM, NYSE, -5.41, 2013-03-19\n MLM, NYSE, -13.21, 2013-02-13\n AKS, NYSE, 14.29, 2013-01-30\n ALB, NYSE, 18.18, 2013-01-23\n VRX, NYSE, -4.00, 2013-03-01\n CBR, NYSE, 140.00, 2013-02-22\n MAC, NYSE, 3.45, 2013-02-07\n RKT, NYSE, 5.47, 2013-01-23\n RYN, NYSE, 3.51, 2013-01-25\n ADC, NYSE, 1.96, 2013-02-28\nBRK.B, NYSE, 0.88, 2013-03-02\n EXP, NYSE, 0.00, 2013-02-07\n GGB, NYSE, -66.67, 2013-02-22\n SSD, NYSE, -100.00, 2013-02-08\n ESS, NYSE, 4.02, 2013-02-01\n FR, NYSE, 0.00, 2013-02-21\n HIW, NYSE, 0.00, 2013-02-13\n IMAX, NYSE, 58.33, 2013-02-22\n AIV, NYSE, 4.00, 2013-02-08\n FCH, NYSE, 50.00, 2013-02-20\n ITGR, NYSE, 6.00, 2013-02-26\n GEO, NYSE, 7.32, 2013-02-22\n CLI, NYSE, 4.76, 2013-02-08\n DAR, NYSE, -20.00, 2013-02-28\n RS, NYSE, 9.28, 2013-02-22\n CPE, NYSE, -66.67, 2013-03-15\n KNX, NYSE, 4.76, 2013-01-31\n O, NYSE, 3.70, 2013-02-15\n PKX, NYSE, -15.35, 2013-03-02\n COF, NYSE, -12.35, 2013-01-18\n CYD, NYSE, -23.14, 2013-02-28\n IRS, NYSE, 57.50, 2013-02-20\n MCK, NYSE, -13.50, 2013-02-01\n SWC, NYSE, 116.67, 2013-02-28\n STM, NYSE, -22.22, 2013-01-31\n TEO, NYSE, 28.36, 2013-03-01\n TRK, NYSE, 400.00, 2013-03-07\n GFF, NYSE, 300.00, 2013-01-31\n LMT, NYSE, -0.56, 2013-01-25\n APU, NYSE, -13.89, 2013-02-01\n AGU, NYSE, 6.93, 2013-02-22\n LH, NYSE, -4.35, 2013-02-09\n DDD, NYSE, 0.00, 2013-02-26\n WEX, NYSE, 0.94, 2013-02-07\n AFG, NYSE, 3.08, 2013-02-12\n RMD, NYSE, 3.92, 2013-01-25\n WAB, NYSE, 2.29, 2013-02-20\n CIB, NYSE, 20.39, 2013-03-05\n CAM, NYSE, -1.04, 2013-02-01\n FCX, NYSE, 5.41, 2013-01-23\n RNR, NYSE, 70.27, 2013-02-06\n AVX, NYSE, -20.00, 2013-01-25\n RWT, NYSE, 85.19, 2013-02-22\n AXE, NYSE, 0.76, 2013-01-30\n CLB, NYSE, 3.54, 2013-01-31\n MD, NYSE, 1.54, 2013-02-01\n THG, NYSE, 6.25, 2013-02-07\n BAP, NYSE, 3.72, 2013-02-06\n DO, NYSE, 28.18, 2013-02-06\n RE, NYSE, 175.86, 2013-02-07\n DST, NYSE, 17.82, 2013-02-01\n EL, NYSE, 11.54, 2013-02-06\n ESC, NYSE, -34.88, 2013-03-01\n MIG, NYSE, -100.00, 2013-02-13\n WAT, NYSE, 0.63, 2013-01-23\n EME, NYSE, 11.48, 2013-02-27\n HIG, NYSE, 80.00, 2013-02-05\n ITT, NYSE, 2.63, 2013-02-28\n SPN, NYSE, 4.26, 2013-02-27\n SWM, NYSE, -9.18, 2013-02-07\n SCCO, NYSE, 0.00, 2013-02-02\n RCI, NYSE, 20.55, 2013-02-15\n EIX, NYSE, 66.04, 2013-02-27\n IRM, NYSE, -20.00, 2013-03-01\n REV, NYSE, -19.18, 2013-02-06\n SPH, NYSE, -17.46, 2013-02-08\n CCJ, NYSE, 46.34, 2013-02-09\n PGI, NYSE, -6.67, 2013-02-14\n CRR, NYSE, 2.30, 2013-02-01\n BVN, NYSE, -26.67, 2013-03-01\n FCN, NYSE, 11.67, 2013-03-01\n RPT, NYSE, 8.00, 2013-02-13\n TUP, NYSE, 1.79, 2013-01-30\n ASB, NYSE, 0.00, 2013-01-18\n GWR, NYSE, -2.47, 2013-02-13\n TBI, NYSE, 35.71, 2013-02-07\n FFG, NYSE, 24.00, 2013-02-08\n USNA, NYSE, 4.96, 2013-02-06\n CSV, NYSE, 4.35, 2013-02-26\n LVB, NYSE, 12.77, 2013-03-07\n ALR, NYSE, 6.25, 2013-02-16\n OCN, NYSE, -7.84, 2013-03-01\n PAA, NYSE, 42.03, 2013-02-07\n DNR, NYSE, 24.14, 2013-02-22\n HMY, NYSE, 50.00, 2013-02-05\n TGI, NYSE, 5.80, 2013-01-31\n PAG, NYSE, 7.55, 2013-02-07\n GEL, NYSE, -2.86, 2013-02-15\n IM, NYSE, 23.73, 2013-02-14\n LIN, NYSE, -21.92, 2013-03-01\n NUS, NYSE, 2.11, 2013-02-07\n CNI, NYSE, -0.70, 2013-01-23\n LAD, NYSE, 10.45, 2013-02-21\n NSP, NYSE, 4.44, 2013-02-09\n DEL, NYSE, -29.63, 2013-02-28\n DGX, NYSE, -3.81, 2013-01-24\n KRC, NYSE, 3.23, 2013-01-31\n MTH, NYSE, 50.00, 2013-02-01\n NCR, NYSE, 4.35, 2013-02-08\n OFG, NYSE, -50.00, 2013-02-08\n IVZ, NYSE, -4.26, 2013-02-01\n DX, NYSE, 9.68, 2013-02-21\n FBC, NYSE, 38.27, 2013-02-09\n ALV, NYSE, 9.85, 2013-02-01\n ARE, NYSE, 0.87, 2013-02-08\n BBT, NYSE, 2.86, 2013-01-18\n CGG, NYSE, -59.32, 2013-03-02\n BXP, NYSE, 2.42, 2013-01-30\n MS, NYSE, 73.08, 2013-01-19\n SRT, NYSE, 200.00, 2013-02-28\n HLX, NYSE, 162.86, 2013-02-21\n FLS, NYSE, 0.35, 2013-02-22\n MT, NYSE, -880.00, 2013-02-07\n PXD, NYSE, -2.35, 2013-02-14\n SLG, NYSE, 0.87, 2013-01-31\n NAT, NYSE, 0.00, 2013-02-12\n CSU, NYSE, -22.22, 2013-03-07\n DRQ, NYSE, 2.70, 2013-03-01\n FDP, NYSE, -100.00, 2013-02-20\n NLY, NYSE, 35.29, 2013-02-07\n TLM, NYSE, -300.00, 2013-02-18\n TSM, NYSE, 0.00, 2013-01-18\n YUM, NYSE, 2.47, 2013-02-05\n AMG, NYSE, 4.94, 2013-01-30\n EPR, NYSE, -4.40, 2013-02-27\n FE, NYSE, 1.27, 2013-02-26\n LFL, NYSE, -80.00, 2013-05-01\n MTD, NYSE, 8.44, 2013-02-07\n SID, NYSE, 57.14, 2013-03-29\n IN, NYSE, -18.18, 2013-03-12\n AI, NYSE, 9.91, 2013-02-07\n URI, NYSE, 23.30, 2013-01-24\n INGR, NYSE, 4.26, 2013-02-08\n RAS, NYSE, 153.85, 2013-02-14\n UNS, NYSE, 12.50, 2013-02-27\n ASI, NYSE, -17.95, 2013-03-07\n ANH, NYSE, 7.14, 2013-02-08\n OFC, NYSE, 4.08, 2013-02-09\n GPX, NYSE, 6.67, 2013-02-27\n WAC, NYSE, 11.32, 2013-03-19\n RBA, NYSE, -12.50, 2013-02-27\n WDR, NYSE, 5.17, 2013-01-30\n LHO, NYSE, 4.44, 2013-02-21\n LNT, NYSE, -1.72, 2013-02-15\n LVLT, NYSE, 11.11, 2013-02-13\n MFA, NYSE, 0.00, 2013-03-07\n OME, NYSE, 33.33, 2013-03-06\n EQY, NYSE, 7.14, 2013-02-21\n FII, NYSE, 10.00, 2013-01-25\n FMX, NYSE, 39.60, 2013-02-28\n LLL, NYSE, 6.13, 2013-01-31\n VTR, NYSE, 2.06, 2013-02-16\n WCN, NYSE, -7.69, 2013-02-15\n AVB, NYSE, -0.71, 2013-01-31\n GIL, NYSE, 6.67, 2013-02-07\n HZO, NYSE, 10.00, 2013-01-30\n AWR, NYSE, 43.24, 2013-03-01\n CLS, NYSE, 46.67, 2013-01-23\n EPD, NYSE, 7.58, 2013-02-01\n RSG, NYSE, -13.95, 2013-02-08\n WM, NYSE, -5.00, 2013-02-15\n AKR, NYSE, 3.57, 2013-02-06\n CVG, NYSE, 4.17, 2013-02-08\n RRC, NYSE, 228.57, 2013-02-27\n SAP, NYSE, -2.38, 2013-01-24\n CCI, NYSE, 57.14, 2013-01-24\n PQ, NYSE, -20.00, 2013-03-01\n WFT, NYSE, -94.44, 2013-02-27\n CAA, NYSE, 14.29, 2013-02-01\n ENB, NYSE, -6.67, 2013-02-16\n GMK, NYSE, -8.33, 2013-02-28\n MMR, NYSE, 75.00, 2013-01-19\n PB, NYSE, 1.19, 2013-01-26\n VIV, NYSE, -7.25, 2013-02-26\n AXL, NYSE, -111.76, 2013-02-09\n BP, NYSE, 19.05, 2013-02-06\n ETM, NYSE, 13.04, 2013-02-09\n HT, NYSE, 10.00, 2013-02-21\n BYI, NYSE, 5.26, 2013-02-01\n CEB, NYSE, 4.84, 2013-02-07\n INFY, NYSE, 5.56, 2013-01-12\n JLL, NYSE, -0.38, 2013-01-30\n AZN, NYSE, 24.64, 2013-02-01\n SFG, NYSE, 7.23, 2013-01-30\n TREX, NYSE, 27.78, 2013-02-20\n GS, NYSE, 61.38, 2013-01-17\n SYX, NYSE, -144.44, 2013-03-06\n WCC, NYSE, -2.75, 2013-02-01\n JNPR, NYSE, 26.67, 2013-01-25\n RDN, NYSE, -146.43, 2013-02-12\n RAI, NYSE, 4.11, 2013-02-13\n SKX, NYSE, 172.73, 2013-02-14\n WTM, NYSE, 724.10, 2013-02-06\n NCI, NYSE, 29.17, 2013-02-15\n BLT, NYSE, -21.74, 2013-03-08\n BLK, NYSE, 5.88, 2013-01-18\n CIR, NYSE, 25.45, 2013-03-01\n PKG, NYSE, -1.61, 2013-01-23\n PKI, NYSE, 0.00, 2013-02-01\n UGP, NYSE, 38.10, 2013-02-21\n WWE, NYSE, 0.00, 2013-03-01\n SNN, NYSE, 2.86, 2013-02-08\n UPS, NYSE, -4.35, 2013-02-01\n XOXO, NYSE, 62.50, 2013-03-07\n SLF, NYSE, 36.36, 2013-02-14\n CDR, NYSE, 33.33, 2013-03-08\n RLH, NYSE, -21.43, 2013-03-01\n EW, NYSE, 16.88, 2013-02-05\n MET, NYSE, 5.93, 2013-02-13\n FBR, NYSE, -28.57, 2013-01-31\n VVC, NYSE, 23.81, 2013-02-15\n BAM, NYSE, 148.28, 2013-02-16\n NVS, NYSE, 0.00, 2013-01-24\n VGR, NYSE, -43.75, 2013-02-27\n BHLB, NYSE, 0.00, 2013-01-29\n CRL, NYSE, 6.67, 2013-02-14\n CYH, NYSE, 0.00, 2013-02-22\n MBT, NYSE, 65.71, 2013-03-20\n MTOR, NYSE, -375.00, 2013-01-31\n CNQ, NYSE, -29.55, 2013-03-08\n ERJ, NYSE, -25.27, 2013-03-13\n VZ, NYSE, -28.30, 2013-01-23\n EVC, NYSE, 12.50, 2013-02-28\n PBR, NYSE, 0.00, 2013-02-05\n XEL, NYSE, 3.57, 2013-02-01\n ALE, NYSE, 0.00, 2013-02-16\n HW, NYSE, -20.00, 2013-01-30\n POL, NYSE, 0.00, 2013-01-30\n UMC, NYSE, 0.00, 2013-02-07\n ASX, NYSE, 41.43, 2013-01-31\n COH, NYSE, -4.65, 2013-01-23\n CXW, NYSE, 7.32, 2013-02-14\n DVA, NYSE, 6.33, 2013-02-15\n EXC, NYSE, -1.54, 2013-02-08\n MCO, NYSE, 7.14, 2013-02-09\n BRFS, NYSE, 43.48, 2013-03-06\n TU, NYSE, -1.15, 2013-02-16\n WIT, NYSE, 0.00, 2013-01-18\n ERF, NYSE, 462.50, 2013-02-22\n GG, NYSE, -22.22, 2013-02-15\n HNT, NYSE, -2.70, 2013-01-31\n NXY, NYSE, -23.44, 2013-02-26\n NYCB, NYSE, -3.45, 2013-01-31\n SXT, NYSE, -8.33, 2013-02-08\n CPG, NYSE, -191.67, 2013-03-15\n AMX, NYSE, -40.00, 2013-02-13\n MPX, NYSE, -50.00, 2013-01-24\n OIS, NYSE, -5.82, 2013-02-20\n BH, NYSE, -35.35, 2013-01-26\n MMP, NYSE, 6.15, 2013-02-06\n PES, NYSE, 250.00, 2013-02-14\n ABB, NYSE, -18.75, 2013-02-15\n RDY, NYSE, -27.27, 2013-02-15\n KMR, NYSE, -19.23, 2013-02-22\n GEN, NYSE, -20.00, 2013-02-12\n ADS, NYSE, 2.38, 2013-02-01\n CVI, NYSE, 5.15, 2013-03-13\n FTI, NYSE, 0.00, 2013-02-13\n PRA, NYSE, 10.64, 2013-02-20\n STO, NYSE, 26.47, 2013-02-08\n BEL, NYSE, -266.67, 2013-02-21\n FIS, NYSE, -8.82, 2013-02-13\n COL, NYSE, 4.44, 2013-01-19\n KAI, NYSE, 7.32, 2013-02-27\n FRM, NYSE, 233.33, 2013-03-09\n ABC, NYSE, 0.00, 2013-01-25\n BG, NYSE, -76.15, 2013-02-08\n FRO, NYSE, 106.52, 2013-02-22\n ECA, NYSE, -3.12, 2013-02-15\n CS, NYSE, -54.76, 2013-02-08\n EEP, NYSE, -30.77, 2013-02-14\n CVX, NYSE, -1.65, 2013-02-02\n DB, NYSE, 280.49, 2013-02-01\n GXP, NYSE, 200.00, 2013-03-01\n JHX, NYSE, 371.43, 2013-02-28\n PFG, NYSE, 10.81, 2013-02-01\n PVR, NYSE, -227.78, 2013-02-21\n AAP, NYSE, 17.33, 2013-02-08\n KND, NYSE, 4.55, 2013-02-26\n WTW, NYSE, 9.09, 2013-02-14\n CNC, NYSE, 42.42, 2013-02-06\n PRU, NYSE, -2.87, 2013-02-07\n BCH, NYSE, 12.94, 2013-02-06\n NS, NYSE, -19.35, 2013-02-02\n ITUB, NYSE, -5.00, 2013-02-05\n SXL, NYSE, 20.88, 2013-02-21\n VALE, NYSE, -26.00, 2013-02-28\n TNP, NYSE, -128.57, 2013-04-20\n LCI, NYSE, 233.33, 2013-02-08\n AUO, NYSE, -122.73, 2013-02-07\n GTI, NYSE, 19.05, 2013-02-27\n HNR, NYSE, -127.27, 2013-05-04\n MWE, NYSE, -38.89, 2013-02-28\n NLS, NYSE, 4.55, 2013-03-05\n RGC, NYSE, 40.00, 2013-02-08\n SBS, NYSE, 48.25, 2013-03-22\n JAH, NYSE, 2.40, 2013-02-15\n NPO, NYSE, 110.71, 2013-02-08\n TRI, NYSE, 9.09, 2013-02-14\n CAE, NYSE, 12.50, 2013-02-14\n LF, NYSE, 971.43, 2013-02-07\n SNY, NYSE, 1.30, 2013-02-08\n WHG, NYSE, 15.91, 2013-02-08\n BANC, NYSE, -300.00, 2013-03-02\n GTN, NYSE, 4.35, 2013-02-21\n BAK, NYSE, -150.00, 2013-02-08\n COP, NYSE, 1.42, 2013-01-31\n CNP, NYSE, 40.00, 2013-02-28\n EEQ, NYSE, -18.18, 2013-02-15\n MRH, NYSE, 60.26, 2013-02-08\n NGS, NYSE, 26.09, 2013-03-15\n NRP, NYSE, 34.88, 2013-02-14\n PXP, NYSE, -22.64, 2013-02-22\n XEC, NYSE, 9.26, 2013-02-20\n IAG, NYSE, -11.11, 2013-02-21\n TS, NYSE, -16.44, 2013-02-22\n EGO, NYSE, 6.67, 2013-02-23\n JNS, NYSE, 35.71, 2013-01-25\n PFS, NYSE, 7.41, 2013-02-02\n ENH, NYSE, 21.68, 2013-02-08\n IHG, NYSE, 5.56, 2013-02-20\n CNX, NYSE, 95.45, 2013-02-01\n AMT, NYSE, -17.07, 2013-02-27\n ABG, NYSE, 10.77, 2013-02-20\n LII, NYSE, 0.00, 2013-02-06\n SRE, NYSE, 11.34, 2013-02-27\n AEE, NYSE, -36.36, 2013-02-21\n PLD, NYSE, 0.00, 2013-02-07\n SAH, NYSE, 4.00, 2013-02-21\n GPI, NYSE, -17.50, 2013-02-20\n FIX, NYSE, -11.11, 2013-03-01\n MMS, NYSE, 12.50, 2013-02-08\n SRI, NYSE, -28.57, 2013-03-02\n RTEC, NYSE, 6.25, 2013-02-05\n NOV, NYSE, 3.47, 2013-02-02\n DF, NYSE, 33.33, 2013-02-14\n SAM, NYSE, 1.63, 2013-02-21\n RL, NYSE, 8.60, 2013-02-07\n FLR, NYSE, 132.35, 2013-02-21\n ALL, NYSE, 942.86, 2013-02-07\n ATI, NYSE, 5.88, 2013-01-24\n EE, NYSE, -14.29, 2013-02-20\n AIT, NYSE, 0.00, 2013-02-01\n CHH, NYSE, 9.76, 2013-02-12\n FMS, NYSE, 105.77, 2013-02-27\n BCO, NYSE, -7.69, 2013-02-02\n CBB, NYSE, -125.00, 2013-02-28\n MWW, NYSE, 0.00, 2013-02-08\n PSA, NYSE, 5.68, 2013-02-22\n E, NYSE, 2.83, 2013-02-16\n JPM, NYSE, 15.83, 2013-01-17\n USB, NYSE, 1.35, 2013-01-17\n HON, NYSE, 0.92, 2013-01-26\n ITG, NYSE, 100.00, 2013-02-01\n ARB, NYSE, 6.25, 2013-02-26\n APL, NYSE, 0.00, 2013-02-19\n AVA, NYSE, -42.22, 2013-02-21\n AXS, NYSE, 64.96, 2013-02-05\n CHT, NYSE, 5.26, 2013-01-31\n MOH, NYSE, 145.45, 2013-02-08\n CVD, NYSE, 2.82, 2013-01-25\n AHT, NYSE, 2.63, 2013-02-28\n GPK, NYSE, 12.50, 2013-02-08\n CNO, NYSE, 8.70, 2013-02-12\n AUQ, NYSE, -28.57, 2013-03-26\n JRN, NYSE, 34.62, 2013-03-08\nGRP.U, NYSE, -14.92, 2013-03-06\n NFP, NYSE, 11.43, 2013-02-15\n CRI, NYSE, 2.30, 2013-02-28\n FMD, NYSE, -20.00, 2013-02-08\n FPO, NYSE, 10.34, 2013-02-22\n TRQ, NYSE, -350.00, 2013-03-26\n WLL, NYSE, 9.21, 2013-02-28\n AEL, NYSE, 14.63, 2013-02-21\n AHL, NYSE, 87.60, 2013-02-08\n AUY, NYSE, -3.70, 2013-02-21\n CMP, NYSE, 0.00, 2013-02-07\n KRO, NYSE, -400.00, 2013-03-13\n TPX, NYSE, 9.09, 2013-01-25\n UTI, NYSE, 75.00, 2013-02-01\n PJC, NYSE, 31.34, 2013-01-31\n TRW, NYSE, 14.81, 2013-02-16\n AIZ, NYSE, 122.58, 2013-02-07\n HTH, NYSE, 62.50, 2013-03-16\n ETP, NYSE, 0.00, 2013-02-21\n SMI, NYSE, 500.00, 2013-02-07\n LSE, NYSE, -6.25, 2013-02-16\n BBD, NYSE, -2.63, 2013-01-29\n NRG, NYSE, 124.14, 2013-02-28\n HOS, NYSE, 29.17, 2013-02-07\n ABR, NYSE, 160.00, 2013-02-16\n FHN, NYSE, 0.00, 2013-01-19\n AGO, NYSE, 32.39, 2013-02-28\n HSP, NYSE, 1.85, 2013-02-14\n HNI, NYSE, -6.98, 2013-02-06\n GHL, NYSE, -32.43, 2013-01-24\n XPO, NYSE, -14.00, 2013-02-28\n CVO, NYSE, 23.08, 2013-02-28\n CHE, NYSE, 16.92, 2013-02-19\n GNW, NYSE, 30.77, 2013-02-06\n CBG, NYSE, 12.24, 2013-02-07\n SFL, NYSE, -26.67, 2013-02-26\n NEU, NYSE, -15.57, 2013-01-29\n GOL, NYSE, -109.09, 2013-03-26\n CAB, NYSE, 4.17, 2013-02-15\n LTM, NYSE, 1.82, 2013-02-22\n VVI, NYSE, 10.53, 2013-02-02\n WCG, NYSE, 0.00, 2013-02-14\n HEP, NYSE, -2.63, 2013-02-22\n DPZ, NYSE, 8.47, 2013-03-01\n BDC, NYSE, 9.86, 2013-02-08\n EGY, NYSE, -171.43, 2013-03-15\n LPL, NYSE, 2.63, 2013-02-22\n ENS, NYSE, 12.82, 2013-02-07\n BMR, NYSE, 5.88, 2013-02-06\n ACC, NYSE, 9.26, 2013-02-13\n KRG, NYSE, -9.09, 2013-02-08\n WLK, NYSE, 13.60, 2013-02-20\n EXR, NYSE, 4.65, 2013-02-22\n CNS, NYSE, 16.67, 2013-01-24\n IOC, NYSE, 264.29, 2013-02-28\n STON, NYSE, -233.33, 2013-03-16\n CPL, NYSE, 38.10, 2013-03-13\n TPGI, NYSE, -114.29, 2013-02-14\n SHO, NYSE, -3.33, 2013-02-20\n CUBE, NYSE, 5.00, 2013-02-22\n NRF, NYSE, 170.37, 2013-02-15\n BBW, NYSE, -68.29, 2013-02-15\n DLR, NYSE, 4.31, 2013-02-16\n NWE, NYSE, 2.63, 2013-02-15\n ORA, NYSE, 200.00, 2013-02-28\n NP, NYSE, 5.26, 2013-02-21\n SMA, NYSE, -21.05, 2013-02-22\n BBG, NYSE, 25.00, 2013-02-22\n BXC, NYSE, -163.16, 2013-02-14\n KNL, NYSE, 32.14, 2013-02-06\n LVS, NYSE, -8.47, 2013-01-31\n HLF, NYSE, 0.96, 2013-02-20\n MIC, NYSE, -20.41, 2013-02-21\n PHH, NYSE, -11.54, 2013-02-07\n CE, NYSE, 6.35, 2013-01-29\n EDR, NYSE, 0.00, 2013-02-20\n WTI, NYSE, 8.33, 2013-02-27\n ARC, NYSE, -100.00, 2013-03-01\n PBH, NYSE, 8.82, 2013-02-08\n HUN, NYSE, 0.00, 2013-02-13\n DLB, NYSE, 4.44, 2013-01-30\n DSX, NYSE, -33.33, 2013-03-15\n LAZ, NYSE, 84.85, 2013-02-08\n TGP, NYSE, 1.82, 2013-02-22\n TLP, NYSE, -43.48, 2013-03-13\n DRH, NYSE, 16.00, 2013-03-01\n HTGC, NYSE, 8.70, 2013-03-01\n KFN, NYSE, 5.26, 2013-02-06\n THS, NYSE, 0.00, 2013-02-22\n NSR, NYSE, -12.50, 2013-02-06\n WAL, NYSE, 0.00, 2013-01-25\n SLW, NYSE, 2.04, 2013-03-22\n MPW, NYSE, 0.00, 2013-02-08\nRDS.B, NYSE, 16.00, 2013-02-01\n GNK, NYSE, -24.71, 2013-02-21\n MFB, NYSE, 4.76, 2013-03-07\nRDS.A, NYSE, 9.95, 2013-02-01\n ITC, NYSE, 0.93, 2013-02-28\n FTK, NYSE, -158.82, 2013-03-14\n PIKE, NYSE, 168.00, 2013-02-06\n ALJ, NYSE, 0.00, 2013-03-07\n DRC, NYSE, -4.55, 2013-03-01\n STN, NYSE, 8.06, 2013-02-22\n SSW, NYSE, -6.90, 2013-03-06\n CF, NYSE, 3.41, 2013-02-20\n HPY, NYSE, 0.00, 2013-02-08\n ACCO, NYSE, 0.00, 2013-02-14\n ROC, NYSE, -6.25, 2013-02-20\n WPZ, NYSE, -28.57, 2013-02-20\n LCC, NYSE, 44.44, 2013-01-24\n GLP, NYSE, 58.82, 2013-03-15\n AMP, NYSE, 15.54, 2013-01-31\n DHT, NYSE, 108.33, 2013-01-30\n FNF, NYSE, 17.86, 2013-02-20\n NM, NYSE, 20.00, 2013-02-20\n CCO, NYSE, 25.00, 2013-02-20\n BWP, NYSE, 0.00, 2013-02-12\n ICE, NYSE, 5.14, 2013-02-07\n BKD, NYSE, -57.14, 2013-02-12\n AAV, NYSE, 350.00, 2013-03-28\n BAS, NYSE, -42.11, 2013-02-20\n CPA, NYSE, -9.87, 2013-02-07\n LYV, NYSE, -147.06, 2013-02-27\n WNR, NYSE, 5.84, 2013-03-01\n CMG, NYSE, 0.00, 2013-02-06\n RGP, NYSE, -180.00, 2013-02-21\n KOP, NYSE, 11.86, 2013-02-15\n UAL, NYSE, -7.41, 2013-01-25\n ETE, NYSE, -90.91, 2013-02-21\n RSO, NYSE, -17.65, 2013-03-05\n XCO, NYSE, 6.25, 2013-02-21\n PAC, NYSE, 41.18, 2013-02-28\n NYX, NYSE, 10.26, 2013-02-06\n TDG, NYSE, 51.65, 2013-02-05\n BMA, NYSE, 18.40, 2013-02-15\n THI, NYSE, -2.82, 2013-02-22\n BTE, NYSE, -40.48, 2013-03-08\n CNH, NYSE, 29.58, 2013-02-01\n GLA, NYSE, 67.44, 2013-02-14\n POR, NYSE, -9.52, 2013-02-23\n HIL, NYSE, -100.00, 2013-03-12\n HVB, NYSE, -20.00, 2013-02-01\n KS, NYSE, 0.00, 2013-02-14\n HK, NYSE, 0.00, 2013-03-01\n DCP, NYSE, 59.62, 2013-02-28\n DK, NYSE, 10.10, 2013-03-08\n CODI, NYSE, 14.81, 2013-03-07\n VG, NYSE, 25.00, 2013-02-14\n MA, NYSE, 1.46, 2013-02-01\n MWA, NYSE, -200.00, 2013-02-06\n KOG, NYSE, 14.29, 2013-03-01\n PWE, NYSE, -500.00, 2013-02-15\n PGTI, NYSE, 100.00, 2013-02-21\n AWH, NYSE, 16.23, 2013-02-14\n NSH, NYSE, -65.71, 2013-02-02\n WYN, NYSE, 5.00, 2013-02-07\n WNS, NYSE, 0.00, 2013-01-17\n AYR, NYSE, 36.84, 2013-02-22\n EVR, NYSE, 55.77, 2013-01-31\n HBI, NYSE, 7.00, 2013-02-06\n WU, NYSE, 20.00, 2013-02-13\n OC, NYSE, -31.25, 2013-02-21\n MR, NYSE, 2.08, 2013-02-26\n DAC, NYSE, -21.43, 2013-02-12\n AWI, NYSE, 3.03, 2013-02-20\n SUSS, NYSE, 444.44, 2013-02-28\n DEI, NYSE, 0.00, 2013-02-13\n OB, NYSE, -200.00, 2013-02-06\n SBH, NYSE, -5.88, 2013-02-08\n EBS, NYSE, -4.35, 2013-03-08\n KBR, NYSE, 122.22, 2013-02-21\n AER, NYSE, 30.95, 2013-02-21\n NOA, NYSE, -11.11, 2013-02-06\n SPR, NYSE, -2.27, 2013-02-13\n ANW, NYSE, 0.00, 2013-02-28\n DCT, NYSE, 10.00, 2013-02-08\n SE, NYSE, -3.03, 2013-02-06\n TOO, NYSE, 16.67, 2013-02-22\n TSL, NYSE, -39.77, 2013-02-27\n TWC, NYSE, 1.95, 2013-02-01\n MVO, NYSE, -5.06, 2013-03-15\n CO, NYSE, 40.00, 2013-02-27\n EXK, NYSE, -45.83, 2013-03-13\n EIG, NYSE, -25.00, 2013-02-28\n HF, NYSE, 21.62, 2013-03-07\n CEL, NYSE, 34.78, 2013-03-05\n FIG, NYSE, 53.85, 2013-02-28\n NGLS, NYSE, 0.00, 2013-02-15\n TCAP, NYSE, 3.64, 2013-03-07\n GFA, NYSE, -483.33, 2013-03-12\n BR, NYSE, -5.56, 2013-02-08\n SCR, NYSE, 85.71, 2013-03-08\n CNK, NYSE, -12.82, 2013-02-21\n DAL, NYSE, 0.00, 2013-01-23\n ORN, NYSE, 250.00, 2013-03-01\n ACM, NYSE, 9.09, 2013-02-06\n JMP, NYSE, 62.50, 2013-02-14\n SLH, NYSE, 1.69, 2013-02-08\n CLR, NYSE, 16.85, 2013-02-28\n BGS, NYSE, -17.95, 2013-02-15\n STAR, NYSE, 12.50, 2013-02-27\n YGE, NYSE, -74.07, 2013-03-05\n DFS, NYSE, -9.40, 2013-03-06\n TEL, NYSE, 1.56, 2013-01-24\n BX, NYSE, 25.53, 2013-02-01\n SEP, NYSE, 8.11, 2013-02-06\n BZ, NYSE, -30.00, 2013-02-27\n PPO, NYSE, -28.26, 2013-02-21\n PRO, NYSE, 25.00, 2013-02-13\n WBC, NYSE, 13.68, 2013-02-16\n DHX, NYSE, 7.14, 2013-01-31\n PMC, NYSE, 13.79, 2013-02-08\n HGG, NYSE, 0.00, 2013-02-01\n OWW, NYSE, -14.29, 2013-02-15\n VR, NYSE, 35.58, 2013-02-01\n CXO, NYSE, -5.88, 2013-02-21\n G, NYSE, 4.76, 2013-02-08\n EJ, NYSE, 160.00, 2013-03-13\n WX, NYSE, 32.00, 2013-03-08\n CMLP, NYSE, -50.00, 2013-02-06\n VMW, NYSE, -5.56, 2013-01-29\n CZZ, NYSE, 63.64, 2013-02-08\n CGA, NYSE, -3.23, 2013-02-09\n TDC, NYSE, 5.71, 2013-02-08\n FLY, NYSE, 137.65, 2013-03-08\n DUF, NYSE, 6.25, 2013-02-26\n MAIN, NYSE, 12.00, 2013-03-08\n REN, NYSE, -50.00, 2013-03-08\n TGH, NYSE, 9.57, 2013-02-13\n DFT, NYSE, -5.00, 2013-02-07\n RF, NYSE, 10.00, 2013-01-23\n PZN, NYSE, -22.22, 2013-02-13\n LL, NYSE, 19.05, 2013-02-21\n NMM, NYSE, 0.00, 2013-01-25\n OZM, NYSE, 5.48, 2013-02-08\n ES, NYSE, -5.08, 2013-02-20\n MSCI, NYSE, -1.89, 2013-02-08\n ARR, NYSE, -18.52, 2013-02-23\n KW, NYSE, 275.00, 2013-03-13\n GTS, NYSE, -10.17, 2013-02-07\n FOR, NYSE, 222.22, 2013-02-14\n LRN, NYSE, 4.35, 2013-02-06\n TNK, NYSE, -125.00, 2013-02-22\n N, NYSE, 21.43, 2013-02-01\n DAN, NYSE, 5.56, 2013-02-22\n BIP, NYSE, 12.07, 2013-02-09\n CPN, NYSE, -500.00, 2013-02-14\n SOL, NYSE, 2.70, 2013-03-15\n PM, NYSE, 1.64, 2013-02-08\n HI, NYSE, 7.89, 2013-02-05\n V, NYSE, 2.25, 2013-02-07\n IPI, NYSE, 0.00, 2013-02-14\n AWK, NYSE, -14.29, 2013-02-27\n HTS, NYSE, 37.84, 2013-02-13\n DPS, NYSE, -4.71, 2013-02-14\n CFX, NYSE, 7.69, 2013-02-07\n WES, NYSE, -27.91, 2013-02-28\n SB, NYSE, -10.00, 2013-02-21\n LO, NYSE, 3.95, 2013-02-14\n LPS, NYSE, 10.45, 2013-02-08\n FF, NYSE, -31.82, 2013-03-19\n NNA, NYSE, 150.00, 2013-02-13\n EPB, NYSE, 14.55, 2013-01-17\n JBT, NYSE, 3.23, 2013-03-07\n DL, NYSE, 33.33, 2013-02-27\n RAX, NYSE, -4.55, 2013-02-13\n HCI, NYSE, 67.61, 2013-03-06\n EC, NYSE, -20.47, 2013-02-16\n CLW, NYSE, 10.53, 2013-02-21\n MJN, NYSE, 5.88, 2013-02-01\n EPC, NYSE, 1.85, 2013-02-01\n BPI, NYSE, -3.33, 2013-03-13\n RST, NYSE, 55.56, 2013-03-01\n DGI, NYSE, 92.31, 2013-02-27\n SWI, NYSE, 10.34, 2013-02-05\n CYS, NYSE, -46.15, 2013-02-07\n IVR, NYSE, 20.31, 2013-02-06\n BUD, NYSE, -5.08, 2013-02-28\n PMT, NYSE, -2.35, 2013-02-08\n STWD, NYSE, 15.38, 2013-02-28\n CFN, NYSE, -16.98, 2013-02-09\n SPB, NYSE, 71.43, 2013-02-07\n ARI, NYSE, -10.34, 2013-02-28\n CLNY, NYSE, -13.89, 2013-03-07\n ART, NYSE, 300.00, 2013-02-15\n SEM, NYSE, 12.00, 2013-02-22\n BSBR, NYSE, 578.57, 2013-03-28\n DOLE, NYSE, -6100.00, 2013-03-13\n VSI, NYSE, 0.00, 2013-02-27\n TWO, NYSE, -15.15, 2013-02-07\n CVE, NYSE, -14.29, 2013-02-15\n H, NYSE, 81.82, 2013-02-14\n LEA, NYSE, 7.25, 2013-02-02\n CLD, NYSE, 8.00, 2013-02-14\n AOL, NYSE, 7.50, 2013-02-09\n CHSP, NYSE, 5.13, 2013-02-22\n PEB, NYSE, 0.00, 2013-02-22\n CIT, NYSE, 60.94, 2013-01-30\n KAR, NYSE, -4.55, 2013-02-21\n CIE, NYSE, -66.67, 2013-02-27\n TMH, NYSE, 8.33, 2013-02-06\n KRA, NYSE, -300.00, 2013-02-28\n SYA, NYSE, -29.41, 2013-02-05\n TRNO, NYSE, -162.50, 2013-02-16\n PDM, NYSE, -2.70, 2013-02-08\n GNRC, NYSE, 26.09, 2013-02-15\n ACW, NYSE, -2.17, 2013-03-07\n BALT, NYSE, -11.76, 2013-02-21\n ST, NYSE, 2.17, 2013-01-31\n SEMG, NYSE, 55.56, 2013-03-01\n CALX, NYSE, 20.00, 2013-02-06\n MXL, NYSE, -57.14, 2013-02-06\n STNG, NYSE, -60.00, 2013-02-26\n PRI, NYSE, -1.43, 2013-02-08\n SDRL, NYSE, -93.65, 2013-03-01\n CLDT, NYSE, 0.00, 2013-02-20\n EXL, NYSE, 0.00, 2013-02-28\n LYB, NYSE, -0.88, 2013-02-02\n PNG, NYSE, 7.14, 2013-02-07\n PLOW, NYSE, -25.00, 2013-03-12\n SIX, NYSE, 198.00, 2013-02-21\n NKA, NYSE, 1066.67, 2013-02-01\n RRTS, NYSE, 0.00, 2013-02-07\n JKS, NYSE, -332.48, 2013-04-11\n CODE, NYSE, -13.64, 2013-01-30\n FAF, NYSE, 44.64, 2013-02-22\n QEP, NYSE, 3.13, 2013-02-20\n OAS, NYSE, 6.52, 2013-02-26\n VPG, NYSE, 15.38, 2013-02-13\n HPP, NYSE, 9.52, 2013-03-07\n FN, NYSE, 9.09, 2013-02-05\n ECT, NYSE, 65.85, 2013-03-16\n QUAD, NYSE, -6.67, 2013-03-05\n KKR, NYSE, 54.84, 2013-02-08\n RLD, NYSE, 20.00, 2013-02-07\n AMRC, NYSE, 44.44, 2013-03-19\n GDOT, NYSE, 50.00, 2013-02-01\n AT, NYSE, -160.00, 2013-03-01\n ENV, NYSE, 0.00, 2013-02-15\n IL, NYSE, 200.00, 2013-02-22\n WSR, NYSE, -12.00, 2013-03-13\n SFUN, NYSE, 35.71, 2013-02-09\n COR, NYSE, 5.00, 2013-02-23\n VC, NYSE, 20.62, 2013-03-01\n CCSC, NYSE, -20.00, 2013-03-07\n CCG, NYSE, 0.00, 2013-02-27\n EFC, NYSE, -72.73, 2013-02-14\n TOWR, NYSE, 183.33, 2013-02-16\n CHMT, NYSE, -53.13, 2013-02-26\n HBM, NYSE, 200.00, 2013-02-21\n EXAM, NYSE, 55.56, 2013-02-28\n XUE, NYSE, 7.69, 2013-02-28\n CMRE, NYSE, 6.67, 2013-01-24\n NOAH, NYSE, 20.00, 2013-02-26\n IPHI, NYSE, -40.00, 2013-02-05\n BITA, NYSE, 33.33, 2013-03-08\n BAH, NYSE, 11.11, 2013-01-31\n GM, NYSE, -2.04, 2013-02-15\n TROX, NYSE, -60.00, 2013-02-21\n DANG, NYSE, 20.00, 2013-03-08\n YOKU, NYSE, 9.09, 2013-03-01\n FRC, NYSE, -16.44, 2013-01-17\n RFP, NYSE, 52.38, 2013-02-13\n ISS, NYSE, 15.38, 2013-03-09\n WD, NYSE, -14.29, 2013-03-07\n FLT, NYSE, 10.00, 2013-02-08\n GCAP, NYSE, -325.00, 2013-03-13\n FRF, NYSE, -25.93, 2013-03-29\n SWFT, NYSE, 46.15, 2013-01-24\n AG, NYSE, -10.34, 2013-02-27\n QRE, NYSE, -174.07, 2013-03-07\n AAT, NYSE, 11.76, 2013-02-20\n MCC, NYSE, 5.41, 2013-02-07\n NLSN, NYSE, 3.51, 2013-02-12\n AGRO, NYSE, -71.43, 2013-03-22\n BKU, NYSE, 27.08, 2013-01-30\n INXN, NYSE, -38.89, 2013-02-28\n NPTN, NYSE, 16.67, 2013-02-22\n INN, NYSE, 25.00, 2013-02-27\n KMI, NYSE, -5.88, 2013-01-17\n HCA, NYSE, 9.64, 2013-02-05\n MX, NYSE, 135.21, 2013-01-31\n HII, NYSE, 8.89, 2013-02-28\n QIHU, NYSE, 175.00, 2013-03-06\n APO, NYSE, 119.48, 2013-02-09\n GNC, NYSE, 8.70, 2013-02-15\n SDT, NYSE, 11.48, 2013-03-16\n UAN, NYSE, 16.67, 2013-02-28\n ARCO, NYSE, 5.00, 2013-03-09\n ELLI, NYSE, 36.36, 2013-02-15\n TMS, NYSE, -23.81, 2013-02-15\n SQNS, NYSE, -16.00, 2013-02-08\n STAG, NYSE, 17.24, 2013-02-21\n AL, NYSE, 8.33, 2013-03-01\n TLLP, NYSE, 10.42, 2013-02-12\n RENN, NYSE, 14.29, 2013-03-12\n NQ, NYSE, 800.00, 2013-03-07\n THR, NYSE, -14.29, 2013-02-08\n KOS, NYSE, 125.00, 2013-02-26\n RLJ, NYSE, 4.35, 2013-02-28\n NGL, NYSE, -7.41, 2013-02-16\n FENG, NYSE, 100.00, 2013-03-07\n LNKD, NYSE, 900.00, 2013-02-08\n NMFC, NYSE, 5.88, 2013-03-07\n ACTV, NYSE, 5.26, 2013-02-15\n TAOM, NYSE, 700.00, 2013-03-15\n RATE, NYSE, -60.00, 2013-02-13\n VHS, NYSE, -22.22, 2013-01-31\n MPC, NYSE, 8.13, 2013-01-31\n MITT, NYSE, -1.16, 2013-03-06\n OILT, NYSE, 0.00, 2013-03-07\n SXC, NYSE, 14.71, 2013-02-06\n AMTG, NYSE, -8.57, 2013-03-07\n AMID, NYSE, -2500.00, 2013-04-17\n WAIR, NYSE, -7.41, 2013-01-30\n PER, NYSE, -7.58, 2013-03-02\n PPP, NYSE, -44.44, 2013-02-22\n FNV, NYSE, -8.33, 2013-03-20\n FSM, NYSE, 16.67, 2013-03-21\n FBHS, NYSE, 4.55, 2013-02-01\n XLS, NYSE, 4.44, 2013-03-02\n XYL, NYSE, 2.17, 2013-02-08\n NDRO, NYSE, 4.76, 2013-03-19\n RNF, NYSE, -33.33, 2013-03-20\n VAC, NYSE, 25.53, 2013-02-22\n CHKR, NYSE, -7.25, 2013-03-16\n PACD, NYSE, 14.29, 2013-02-28\n INVN, NYSE, 0.00, 2013-01-24\n DLPH, NYSE, 3.45, 2013-02-06\n MN, NYSE, 0.00, 2013-02-14\n RRMS, NYSE, -25.00, 2013-03-01\n WPX, NYSE, -400.00, 2013-03-01\n LPI, NYSE, 0.00, 2013-03-13\n SN, NYSE, -80.00, 2013-03-07\n KORS, NYSE, 60.00, 2013-02-13\n BCEI, NYSE, -7.89, 2013-03-15\n BOXC, NYSE, 4.78, 2013-01-29\n PVG, NYSE, -25.00, 2013-03-06\n POST, NYSE, 30.43, 2013-02-08\n SLCA, NYSE, 32.26, 2013-02-27\n MTDR, NYSE, -116.67, 2013-03-14\n GWAY, NYSE, -200.00, 2013-02-13\n EPAM, NYSE, -10.81, 2013-02-28\n RNDY, NYSE, 5.56, 2013-03-01\n CPAC, NYSE, -13.33, 2013-02-21\n PRLB, NYSE, 7.69, 2013-02-14\n YELP, NYSE, -50.00, 2013-02-07\n NSM, NYSE, 7.58, 2013-03-08\n ALSN, NYSE, 257.14, 2013-02-20\n DWRE, NYSE, 350.00, 2013-02-15\n VNTV, NYSE, 16.13, 2013-02-21\n ET, NYSE, 34.78, 2013-02-22\n VIPS, NYSE, 1100.00, 2013-02-22\n VCRA, NYSE, -33.33, 2013-02-28\n RM, NYSE, -1.89, 2013-02-28\n BNNY, NYSE, 0.00, 2013-02-12\n MM, NYSE, 200.00, 2013-02-20\n RXN, NYSE, -15.00, 2013-02-12\n GLOG, NYSE, -20.00, 2013-02-28\n PBA, NYSE, 44.44, 2013-03-02\n RPAI, NYSE, 15.79, 2013-02-20\n OAK, NYSE, 63.33, 2013-02-15\n FET, NYSE, -3.45, 2013-02-15\n MRC, NYSE, 17.02, 2013-02-22\n PSX, NYSE, 21.18, 2013-01-31\n TUMI, NYSE, 0.00, 2013-03-21\n ACRE, NYSE, -38.10, 2013-04-02\n EVER, NYSE, 17.24, 2013-01-31\n PDH, NYSE, -13.79, 2013-02-07\n WMC, NYSE, 3.23, 2013-04-03\n WAGE, NYSE, 0.00, 2013-02-21\n HTA, NYSE, 0.00, 2013-02-21\n ALEX, NYSE, 42.86, 2013-02-20\n BKW, NYSE, 53.33, 2013-02-16\n EQM, NYSE, 51.22, 2013-01-25\n NOW, NYSE, 38.46, 2013-01-31\n EGL, NYSE, 18.46, 2013-03-13\n NGVC, NYSE, 25.00, 2013-02-01\n NTI, NYSE, -25.00, 2013-03-14\n AMRE, NYSE, 4.35, 2013-02-20\n GMED, NYSE, 15.79, 2013-02-28\n MANU, NYSE, -46.43, 2013-02-15\n HCLP, NYSE, -28.57, 2013-02-01\n ADT, NYSE, 4.76, 2013-01-31\n TRLA, NYSE, -20.00, 2013-02-13\n SRC, NYSE, 8.82, 2013-02-28\n NBHC, NYSE, -14.29, 2013-01-29\n BSMX, NYSE, -4.17, 2013-02-19\n HY, NYSE, 14.53, 2013-02-20\n SMLP, NYSE, 40.00, 2013-03-14\n DYN, NYSE, -1714.29, 2013-03-15\n LXFR, NYSE, 43.75, 2013-03-12\n LOCK, NYSE, 16.67, 2013-02-21\n JMI, NYSE, 97.78, 2013-03-22\n BERY, NYSE, -40.00, 2013-02-01\n FLTX, NYSE, 0.00, 2013-02-21\n ANFI, NYSE, 30.77, 2013-02-26\n SSTK, NYSE, -100.00, 2013-02-22\n SDLP, NYSE, 90.91, 2013-03-01\n MPLX, NYSE, -25.00, 2013-01-31\n WWAV, NYSE, 5.88, 2013-02-14\n SXE, NYSE, -4121.43, 2013-03-29\n DKL, NYSE, -5.56, 2013-03-06\n RKUS, NYSE, -20.00, 2013-02-13\n WGP, NYSE, 57.14, 2013-02-28\n PBF, NYSE, -92.31, 2013-03-01\n SBY, NYSE, 0.00, 2013-03-01\n RIOM, NYSE, 77.78, 2013-03-29\n BFAM, NYSE, -1186.36, 2013-03-27\n ZTS, NYSE, -79.41, 2013-03-29\n DDC, NYSE, -39.13, 2013-04-04\n ABM, NYSE, 18.18, 2013-03-05\n ANN, NYSE, 0.00, 2013-03-09\n BBY, NYSE, 5.81, 2013-03-02\n BF.B, NYSE, 4.29, 2013-03-07\n BKE, NYSE, 2.40, 2013-03-15\n BNS, NYSE, -3.17, 2013-03-06\n BRC, NYSE, -22.45, 2013-02-22\n CATO, NYSE, -3.57, 2013-03-22\n COO, NYSE, 2.50, 2013-03-08\n CPB, NYSE, 6.06, 2013-02-16\n CFI, NYSE, 10.34, 2013-02-28\n DCI, NYSE, -10.53, 2013-02-26\n DDS, NYSE, -1.03, 2013-02-26\n DE, NYSE, 17.02, 2013-02-14\n DY, NYSE, 50.00, 2013-02-27\n EV, NYSE, -3.85, 2013-02-21\n ENZ, NYSE, -133.33, 2013-03-13\n ESL, NYSE, 13.11, 2013-03-01\nFCE.A, NYSE, 9.09, 2013-03-28\n M, NYSE, 3.54, 2013-02-27\n GCO, NYSE, 1.41, 2013-03-09\n GPS, NYSE, 2.82, 2013-03-01\n HD, NYSE, 4.69, 2013-02-27\n HEI, NYSE, -12.50, 2013-02-21\n HNZ, NYSE, 10.00, 2013-02-28\n HOV, NYSE, -66.67, 2013-03-07\n HRB, NYSE, -633.33, 2013-03-08\n HRL, NYSE, -2.04, 2013-02-22\n HPQ, NYSE, 15.49, 2013-02-22\n JCP, NYSE, -926.32, 2013-02-28\n KR, NYSE, 25.71, 2013-03-08\n KSS, NYSE, 1.84, 2013-03-01\n LB, NYSE, 1.15, 2013-02-28\n LOW, NYSE, 13.04, 2013-02-26\n LZB, NYSE, 16.67, 2013-02-20\n MDT, NYSE, 2.20, 2013-02-20\n MEI, NYSE, 350.00, 2013-03-01\n MPR, NYSE, 0.00, 2013-03-22\n NAV, NYSE, 14.11, 2013-03-08\n JWN, NYSE, 4.48, 2013-02-22\n ODC, NYSE, -35.42, 2013-03-12\n OXM, NYSE, -5.80, 2013-04-03\n PBY, NYSE, -225.00, 2013-04-16\n PLL, NYSE, 8.96, 2013-02-28\n PNY, NYSE, 1.72, 2013-03-07\n PVH, NYSE, 6.67, 2013-03-28\n THO, NYSE, 0.00, 2013-03-08\n TIF, NYSE, 2.19, 2013-03-23\n TJX, NYSE, 1.23, 2013-02-28\n TOL, NYSE, -81.82, 2013-02-21\n TTC, NYSE, 23.26, 2013-02-22\n VAL, NYSE, -9.09, 2013-02-13\n JW.A, NYSE, 13.41, 2013-03-08\n WMT, NYSE, 6.37, 2013-02-22\n WSM, NYSE, 4.69, 2013-03-20\n FL, NYSE, -11.11, 2013-03-09\n CHS, NYSE, 0.00, 2013-03-01\n REX, NYSE, -800.00, 2013-03-29\n BKS, NYSE, -136.00, 2013-03-01\n CAL, NYSE, 75.00, 2013-03-16\n SIG, NYSE, 1.44, 2013-03-29\n ZLC, NYSE, -1.92, 2013-02-22\n AEO, NYSE, 0.00, 2013-03-07\n FGP, NYSE, -10.00, 2013-03-08\n BMO, NYSE, 1.37, 2013-02-27\n RY, NYSE, 0.75, 2013-03-01\n GEF, NYSE, -13.21, 2013-02-28\n MOV, NYSE, 70.83, 2013-03-22\n SKS, NYSE, 13.33, 2013-02-27\n TD, NYSE, 1.55, 2013-03-01\n ANF, NYSE, 14.51, 2013-02-23\n CIEN, NYSE, 116.00, 2013-03-08\n KMG, NYSE, -17.65, 2013-03-09\n IRET, NYSE, -5.88, 2013-03-13\n CM, NYSE, 0.00, 2013-03-01\nHEI.A, NYSE, -18.60, 2013-02-21\n UBA, NYSE, 13.04, 2013-03-07\n KFY, NYSE, 6.90, 2013-03-07\n TGT, NYSE, 12.24, 2013-02-28\n KKD, NYSE, 0.00, 2013-03-15\n NDZ, NYSE, 0.00, 2013-03-06\n MVC, NYSE, -20.00, 2013-03-08\n CBK, NYSE, 52.17, 2013-03-14\n SJM, NYSE, 7.30, 2013-02-16\n BIG, NYSE, 5.03, 2013-03-07\n IDT, NYSE, -7.14, 2013-03-08\n JOY, NYSE, 14.91, 2013-02-28\n SSI, NYSE, -5.93, 2013-03-13\n GME, NYSE, 3.35, 2013-03-29\n DKS, NYSE, -3.74, 2013-03-12\n A, NYSE, -5.97, 2013-02-15\n MTN, NYSE, -3.51, 2013-03-07\n GES, NYSE, 10.47, 2013-03-21\n CRM, NYSE, 66.67, 2013-03-01\n NWY, NYSE, 25.00, 2013-03-22\n PAY, NYSE, 8.11, 2013-03-06\n DSW, NYSE, -4.17, 2013-03-20\n NX, NYSE, -183.33, 2013-03-08\n AGX, NYSE, 15.00, 2013-04-11\n CMD, NYSE, -5.26, 2013-03-08\n DG, NYSE, 7.78, 2013-03-26\n EXPR, NYSE, 1.35, 2013-03-14\n P, NYSE, 0.00, 2013-03-07\n GWRE, NYSE, 181.82, 2013-02-27\n BLOX, NYSE, -20.00, 2013-02-22\n TLYS, NYSE, 6.67, 2013-03-21\n PANW, NYSE, -250.00, 2013-03-01\n WDAY, NYSE, 24.00, 2013-03-08\n RH, NYSE, 4.92, 2013-04-19\n AIR, NYSE, 4.55, 2013-03-20\n ATU, NYSE, -5.41, 2013-03-21\n AZO, NYSE, 0.84, 2013-02-27\n AZZ, NYSE, 2.04, 2013-04-09\n CAG, NYSE, -3.51, 2013-04-04\n CLC, NYSE, 2.17, 2013-03-21\n CMC, NYSE, -80.00, 2013-03-29\n KMX, NYSE, 0.00, 2013-04-11\n FC, NYSE, -27.27, 2013-04-05\n FDO, NYSE, -0.82, 2013-04-11\n FDX, NYSE, -10.87, 2013-03-21\n FUL, NYSE, -3.92, 2013-03-28\n GIS, NYSE, 12.28, 2013-03-21\n KBH, NYSE, 30.43, 2013-03-22\n LEN, NYSE, 100.00, 2013-03-21\n LNN, NYSE, 16.28, 2013-03-28\n LUB, NYSE, -100.00, 2013-03-21\n MKC, NYSE, 1.79, 2013-04-03\n RT, NYSE, 0.00, 2013-04-11\n MSM, NYSE, 0.00, 2013-04-11\n NKE, NYSE, 8.96, 2013-03-22\n ORCL, NYSE, -1.56, 2013-03-21\n PIR, NYSE, 0.00, 2013-04-12\n PKE, NYSE, -21.43, 2013-05-10\n RPM, NYSE, 16.67, 2013-04-05\n SVU, NYSE, -200.00, 2013-04-25\n TXI, NYSE, 25.00, 2013-03-28\n UNF, NYSE, 18.75, 2013-03-28\n WGO, NYSE, 37.50, 2013-03-29\n WOR, NYSE, 6.12, 2013-03-22\n JBL, NYSE, -2.17, 2013-03-21\n GBX, NYSE, 21.62, 2013-04-05\n DRI, NYSE, 0.99, 2013-03-23\n FDS, NYSE, -21.24, 2013-03-20\n SCS, NYSE, 0.00, 2013-03-28\n SJR, NYSE, 5.56, 2013-04-13\n RHT, NYSE, 19.05, 2013-03-28\n OMN, NYSE, -75.00, 2013-04-04\n MON, NYSE, 7.06, 2013-04-04\n GPN, NYSE, -1.14, 2013-04-03\n AYI, NYSE, 0.00, 2013-04-04\n CCL, NYSE, 100.00, 2013-03-16\n CUK, NYSE, 33.33, 2013-03-16\n STZ, NYSE, 4.44, 2013-04-11\n ACN, NYSE, 3.09, 2013-03-29\n SNX, NYSE, 1.15, 2013-03-28\n TAL, NYSE, 50.00, 2013-04-24\n IHS, NYSE, 11.90, 2013-03-22\n EDU, NYSE, 63.64, 2013-04-25\n KED, NYSE, -99.22, 2013-05-02\n CORR, NYSE, -9.09, 2013-05-11\n DFS, NYSE, 18.75, 2013-04-24\n ZEP, NYSE, 54.55, 2013-04-10\n MG, NYSE, -58.82, 2013-04-09\n MOS, NYSE, 5.62, 2013-03-28\n ABT, NYSE, 0.00, 2013-04-18\n ABX, NYSE, 6.98, 2013-04-25\n AB, NYSE, 8.57, 2013-05-02\n ACO, NYSE, -10.64, 2013-04-27\n ADM, NYSE, -5.88, 2013-05-01\n AEM, NYSE, -35.29, 2013-04-26\n AEP, NYSE, 0.00, 2013-04-27\n AES, NYSE, -14.29, 2013-05-10\n AET, NYSE, 8.70, 2013-05-01\n AFL, NYSE, 4.32, 2013-04-25\n AGCO, NYSE, 35.23, 2013-05-01\n HES, NYSE, 24.20, 2013-04-25\n AIG, NYSE, 52.27, 2013-05-03\n AIN, NYSE, 0.00, 2013-05-02\n AJG, NYSE, 33.33, 2013-05-01\n ALU, NYSE, -81.82, 2013-04-27\n MATX, NYSE, 31.25, 2013-05-07\n ALK, NYSE, 15.09, 2013-04-26\n ALX, NYSE, -2.56, 2013-05-07\n BEAM, NYSE, 18.52, 2013-05-03\n AME, NYSE, 3.92, 2013-04-26\n TWX, NYSE, 9.33, 2013-05-02\n AVD, NYSE, 47.50, 2013-05-03\n AMN, NYSE, 33.33, 2013-05-03\n AN, NYSE, 7.94, 2013-04-19\n AON, NYSE, 0.00, 2013-04-27\n APA, NYSE, -9.01, 2013-05-10\n APC, NYSE, 17.39, 2013-05-07\n APD, NYSE, 0.00, 2013-04-24\n APH, NYSE, 1.16, 2013-04-19\n ARG, NYSE, 0.88, 2013-05-03\n AAN, NYSE, -5.63, 2013-04-26\n ARW, NYSE, 3.49, 2013-05-02\n ASGN, NYSE, 94.44, 2013-04-25\n ASH, NYSE, 14.10, 2013-04-25\n ASR, NYSE, -13.25, 2013-04-23\n GAS, NYSE, -2.96, 2013-05-01\n ATO, NYSE, 1.63, 2013-05-02\n ATW, NYSE, 2.40, 2013-05-02\n AU, NYSE, -26.67, 2013-05-14\n AVP, NYSE, 85.71, 2013-05-01\n AVT, NYSE, 3.45, 2013-04-26\n AVY, NYSE, 3.51, 2013-04-25\n AXP, NYSE, 3.60, 2013-04-18\n B, NYSE, -11.11, 2013-04-27\n BA, NYSE, 17.69, 2013-04-25\n BAC, NYSE, -13.04, 2013-04-17\n BAX, NYSE, 0.96, 2013-04-19\n BC, NYSE, 22.58, 2013-04-26\n OMX, NYSE, -52.17, 2013-05-08\n BCE, NYSE, 10.00, 2013-05-10\n BCR, NYSE, 0.00, 2013-04-24\n BDX, NYSE, 6.67, 2013-05-03\n BEN, NYSE, 8.47, 2013-05-01\n BGG, NYSE, -17.59, 2013-04-20\n BHE, NYSE, 10.00, 2013-04-26\n BHI, NYSE, 4.84, 2013-04-20\n BID, NYSE, -175.00, 2013-05-10\n BIO, NYSE, -38.18, 2013-05-08\n BK, NYSE, 9.62, 2013-04-18\n BKH, NYSE, 19.18, 2013-05-03\n WRB, NYSE, 0.00, 2013-04-24\n BLC, NYSE, 6.67, 2013-04-26\n BLL, NYSE, -9.38, 2013-04-26\n BLX, NYSE, -21.82, 2013-04-18\n BMI, NYSE, -58.33, 2013-04-17\n BMS, NYSE, -1.85, 2013-04-26\n BMY, NYSE, 0.00, 2013-04-26\n BOH, NYSE, -6.90, 2013-04-23\n BXS, NYSE, 4.76, 2013-04-23\n BPL, NYSE, 19.44, 2013-05-04\nBRK.A, NYSE, 197.70, 2013-05-04\n BRO, NYSE, 5.13, 2013-04-16\n BSX, NYSE, 0.00, 2013-04-26\n MTRN, NYSE, -2.94, 2013-04-26\n CAI, NYSE, -1.32, 2013-04-25\n CAT, NYSE, -2.24, 2013-04-23\n CB, NYSE, 12.44, 2013-04-23\n CBI, NYSE, 15.49, 2013-05-03\n CBM, NYSE, 85.00, 2013-05-04\n CBU, NYSE, -1.96, 2013-04-24\n CBT, NYSE, -7.25, 2013-05-01\n CCC, NYSE, 20.00, 2013-05-07\n CCE, NYSE, 2.63, 2013-04-26\n C, NYSE, 9.32, 2013-04-16\n CCK, NYSE, 4.17, 2013-04-18\n CDE, NYSE, -74.07, 2013-05-10\n CDI, NYSE, -40.91, 2013-05-03\n CAH, NYSE, 26.32, 2013-05-03\n CFR, NYSE, -4.21, 2013-04-25\n CHD, NYSE, 5.56, 2013-05-03\n CPK, NYSE, 14.93, 2013-05-03\n CI, NYSE, 20.28, 2013-05-03\n CIA, NYSE, 0.00, 2013-05-03\n CKH, NYSE, -156.12, 2013-04-30\n CL, NYSE, 0.00, 2013-04-26\n CLF, NYSE, 87.50, 2013-04-25\n CLH, NYSE, 25.81, 2013-05-02\n CLX, NYSE, -5.66, 2013-05-02\n CMA, NYSE, 4.48, 2013-04-17\n CMO, NYSE, 3.33, 2013-04-25\n CRK, NYSE, -11.36, 2013-04-30\n CMS, NYSE, 15.22, 2013-04-26\n CNA, NYSE, 21.13, 2013-05-01\n CNW, NYSE, -29.63, 2013-05-02\n CHG, NYSE, 19.00, 2013-05-10\n CNL, NYSE, -8.33, 2013-04-30\n COG, NYSE, -20.00, 2013-04-25\n COT, NYSE, -100.00, 2013-05-02\n CP, NYSE, 2.54, 2013-04-25\n CPF, NYSE, 105.00, 2013-04-27\n CQB, NYSE, 28.57, 2013-05-08\n CR, NYSE, -0.95, 2013-04-23\nCRD.B, NYSE, -29.17, 2013-05-09\n CRS, NYSE, -9.21, 2013-04-26\n CSC, NYSE, 32.29, 2013-05-16\n CSL, NYSE, 0.00, 2013-04-25\n CTB, NYSE, 31.82, 2013-05-10\n CTL, NYSE, 10.14, 2013-05-09\n CTS, NYSE, 16.67, 2013-04-24\n CUB, NYSE, 52.24, 2013-05-03\n CMI, NYSE, -22.58, 2013-05-01\n CUZ, NYSE, -8.33, 2013-05-09\n CVC, NYSE, -185.71, 2013-05-10\n CVH, NYSE, 26.58, 2013-05-02\n CW, NYSE, 28.21, 2013-05-02\n CWT, NYSE, -200.00, 2013-05-02\n CX, NYSE, -140.00, 2013-04-27\n CYN, NYSE, -2.17, 2013-04-19\n D, NYSE, -7.78, 2013-04-26\n DBD, NYSE, -125.00, 2013-05-01\n DCO, NYSE, -18.60, 2013-05-07\n DD, NYSE, 1.30, 2013-04-24\n CVA, NYSE, -61.54, 2013-04-18\n DHR, NYSE, -1.32, 2013-04-19\n DIS, NYSE, 2.60, 2013-05-08\n DLX, NYSE, 3.41, 2013-04-26\n DNB, NYSE, 2.26, 2013-05-03\n RRD, NYSE, 12.12, 2013-04-26\n DOV, NYSE, 1.85, 2013-04-18\n DOW, NYSE, 15.00, 2013-04-26\n DRE, NYSE, 0.00, 2013-04-25\n DHI, NYSE, 60.00, 2013-04-27\n UFS, NYSE, -35.37, 2013-04-26\n DTE, NYSE, 30.10, 2013-04-27\n DUK, NYSE, -1.92, 2013-05-04\n DVN, NYSE, 17.86, 2013-05-02\n DV, NYSE, 8.43, 2013-04-24\n EAT, NYSE, 4.35, 2013-04-24\n ECL, NYSE, 3.45, 2013-05-01\n ED, NYSE, 4.85, 2013-05-03\n EDE, NYSE, 11.11, 2013-04-26\n EFX, NYSE, 0.00, 2013-04-25\n EGN, NYSE, -7.32, 2013-04-30\n EGP, NYSE, -1.30, 2013-04-19\n ELP, NYSE, 0.00, 2013-05-17\n ELY, NYSE, 65.00, 2013-04-26\n EMC, NYSE, 3.23, 2013-04-25\n EMR, NYSE, -1.28, 2013-05-08\n EOG, NYSE, 59.29, 2013-05-07\n EQT, NYSE, 26.92, 2013-04-26\n ESE, NYSE, -17.65, 2013-05-08\n ESV, NYSE, 5.43, 2013-04-30\n ETN, NYSE, 6.33, 2013-04-30\n ETR, NYSE, 0.00, 2013-04-26\n EXAR, NYSE, 16.67, 2013-05-01\n F, NYSE, 7.89, 2013-04-25\n CLGX, NYSE, 8.11, 2013-04-25\n FNB, NYSE, -4.76, 2013-04-24\n FCF, NYSE, 0.00, 2013-04-24\n FBP, NYSE, -122.22, 2013-05-04\n FICO, NYSE, -9.38, 2013-04-25\n FLO, NYSE, 6.98, 2013-05-17\n FMC, NYSE, 1.85, 2013-05-01\n FOE, NYSE, 66.67, 2013-04-25\n S, NYSE, 38.24, 2013-04-25\n NEE, NYSE, 10.89, 2013-05-01\n FRT, NYSE, 0.88, 2013-05-02\n FRX, NYSE, 47.06, 2013-04-24\n FSS, NYSE, 20.00, 2013-05-07\n FUN, NYSE, 24.32, 2013-05-09\n FUR, NYSE, 77.78, 2013-05-03\n GBL, NYSE, 17.86, 2013-05-08\n GVA, NYSE, -103.85, 2013-05-10\n BGC, NYSE, -319.23, 2013-05-01\n GD, NYSE, 8.00, 2013-04-25\n GE, NYSE, 11.43, 2013-04-20\n RHP, NYSE, 26.47, 2013-05-08\n AXLL, NYSE, -38.02, 2013-05-08\n GGG, NYSE, 15.07, 2013-04-25\n GHM, NYSE, 28.13, 2013-06-01\n GIB, NYSE, 14.58, 2013-05-01\n GLT, NYSE, 17.65, 2013-05-01\n GLW, NYSE, 15.38, 2013-04-25\n GSK, NYSE, 6.49, 2013-04-26\n GLF, NYSE, 175.00, 2013-04-30\n GNI, NYSE, -14.58, 2013-04-26\n GPC, NYSE, -6.06, 2013-04-20\n GRA, NYSE, 0.00, 2013-04-25\n GTY, NYSE, 0.00, 2013-05-03\n GWW, NYSE, 7.69, 2013-04-17\n HAE, NYSE, 4.35, 2013-05-02\n HAL, NYSE, 17.54, 2013-04-23\n HAR, NYSE, 25.40, 2013-05-03\n HVT, NYSE, 33.33, 2013-05-02\n HRC, NYSE, -2.00, 2013-04-25\n HCC, NYSE, 31.71, 2013-05-01\n HCN, NYSE, 1.11, 2013-05-08\n HCP, NYSE, 2.78, 2013-05-01\n HOG, NYSE, 2.06, 2013-04-26\n HE, NYSE, -12.82, 2013-05-09\n HL, NYSE, -66.67, 2013-05-11\n HMA, NYSE, 0.00, 2013-05-03\n HMC, NYSE, -28.57, 2013-04-27\n HMN, NYSE, 7.84, 2013-04-25\n HFC, NYSE, -7.91, 2013-05-08\n HOT, NYSE, 43.40, 2013-05-01\n HP, NYSE, 5.43, 2013-04-26\n HLS, NYSE, 14.29, 2013-04-26\n HRS, NYSE, 0.00, 2013-05-01\n HSC, NYSE, 50.00, 2013-05-10\n HSY, NYSE, 4.81, 2013-04-26\n HUBB, NYSE, -0.90, 2013-04-19\n HUM, NYSE, 51.12, 2013-05-02\n HXL, NYSE, 4.88, 2013-04-23\n IBM, NYSE, -1.96, 2013-04-19\n IDA, NYSE, 17.54, 2013-05-03\n IEX, NYSE, 4.23, 2013-04-23\n IFF, NYSE, 5.31, 2013-05-08\n DIN, NYSE, 12.87, 2013-05-03\n INT, NYSE, 14.06, 2013-05-01\n IP, NYSE, -12.16, 2013-05-03\n IPG, NYSE, -7.69, 2013-04-20\n IO, NYSE, -85.71, 2013-05-01\n IR, NYSE, 2.44, 2013-04-24\n IRF, NYSE, 27.50, 2013-04-30\n ITW, NYSE, 0.00, 2013-04-24\n JEC, NYSE, -2.44, 2013-04-30\n JNJ, NYSE, 2.13, 2013-04-17\n JNY, NYSE, 0.00, 2013-05-02\n K, NYSE, 0.00, 2013-05-03\n KAMN, NYSE, -2.94, 2013-04-30\n KDN, NYSE, 5.71, 2013-05-10\n KEX, NYSE, 2.15, 2013-04-25\n KEY, NYSE, 5.00, 2013-04-19\n KIM, NYSE, 3.13, 2013-05-01\n KMB, NYSE, 10.45, 2013-04-20\n KEM, NYSE, -133.33, 2013-05-10\n KMT, NYSE, -8.45, 2013-04-26\n KO, NYSE, 2.22, 2013-04-17\n KSU, NYSE, 2.30, 2013-04-20\n LDR, NYSE, -9.52, 2013-05-07\n LEG, NYSE, -13.16, 2013-04-26\n LLY, NYSE, 8.57, 2013-04-25\n LM, NYSE, -13.33, 2013-05-01\n LNC, NYSE, -7.27, 2013-05-02\n LPX, NYSE, 0.00, 2013-05-08\n LXU, NYSE, -110.53, 2013-05-07\n LTC, NYSE, -1.67, 2013-05-01\n L, NYSE, 1.19, 2013-04-30\n LUV, NYSE, 133.33, 2013-04-26\n LUX, NYSE, 7.14, 2013-05-02\n MKL, NYSE, 40.11, 2013-05-01\n MAN, NYSE, 40.00, 2013-04-20\n MTW, NYSE, -35.71, 2013-05-01\n SM, NYSE, 46.43, 2013-05-01\n MAS, NYSE, -7.14, 2013-04-30\n MTZ, NYSE, 12.50, 2013-05-03\n MCD, NYSE, -0.79, 2013-04-20\n MDC, NYSE, 73.08, 2013-05-03\n MDP, NYSE, 4.35, 2013-04-26\n MDR, NYSE, -40.00, 2013-05-09\n MDU, NYSE, 36.36, 2013-05-01\n MED, NYSE, 26.47, 2013-05-09\n CVS, NYSE, 5.06, 2013-05-02\n MFC, NYSE, 18.52, 2013-05-03\n MGA, NYSE, 13.57, 2013-05-11\n MGM, NYSE, 130.00, 2013-05-03\n MMC, NYSE, 4.29, 2013-05-03\n MMM, NYSE, -2.42, 2013-04-26\n MSA, NYSE, -20.31, 2013-04-25\n MNR, NYSE, -7.69, 2013-05-09\n MO, NYSE, 1.89, 2013-04-26\n MOD, NYSE, 5.88, 2013-05-31\nMOG.A, NYSE, -1.23, 2013-04-27\n MHK, NYSE, 3.57, 2013-05-03\n MSI, NYSE, -1.79, 2013-04-25\n MCY, NYSE, 46.81, 2013-04-30\n MRK, NYSE, 8.97, 2013-05-02\n MRO, NYSE, -28.17, 2013-05-08\n POWR, NYSE, 0.00, 2013-05-09\n MTG, NYSE, -60.00, 2013-05-01\n MTB, NYSE, 6.19, 2013-04-16\n MTX, NYSE, 0.00, 2013-04-26\n MUR, NYSE, 11.34, 2013-05-02\n MYE, NYSE, -11.11, 2013-04-25\n NBL, NYSE, 21.31, 2013-04-26\n NBR, NYSE, 13.79, 2013-04-24\n NE, NYSE, 3.51, 2013-04-18\n NEM, NYSE, -8.97, 2013-04-30\n NFG, NYSE, 7.37, 2013-05-03\n NHI, NYSE, 4.94, 2013-05-07\n NI, NYSE, -1.43, 2013-05-01\n NJR, NYSE, 3.16, 2013-05-03\n THC, NYSE, 17.86, 2013-05-01\n NNN, NYSE, 4.35, 2013-05-03\n NOC, NYSE, 12.14, 2013-04-25\n NR, NYSE, 5.88, 2013-04-26\n NSC, NYSE, 3.39, 2013-04-24\n NUE, NYSE, 4.00, 2013-04-19\n NVR, NYSE, -9.64, 2013-04-23\n NWL, NYSE, 9.38, 2013-05-04\n NWN, NYSE, -5.41, 2013-05-03\n NYT, NYSE, -20.00, 2013-04-26\n OCR, NYSE, 4.65, 2013-04-25\n OGE, NYSE, -32.35, 2013-05-03\n OHI, NYSE, 5.08, 2013-05-08\n OI, NYSE, 7.14, 2013-04-24\n OII, NYSE, 16.95, 2013-04-24\n OKE, NYSE, -6.90, 2013-05-01\n OLN, NYSE, 10.64, 2013-04-26\n BRS, NYSE, -1.94, 2013-05-23\n OMC, NYSE, 1.33, 2013-04-19\n OMI, NYSE, 4.76, 2013-04-24\n ORB, NYSE, 43.48, 2013-04-24\n ORI, NYSE, 600.00, 2013-04-26\n OSK, NYSE, 12.94, 2013-05-01\n OXY, NYSE, 7.64, 2013-04-26\n FCFS, NYSE, 0.00, 2013-04-18\n PBI, NYSE, 0.00, 2013-05-01\n PCG, NYSE, -10.00, 2013-05-03\n PCL, NYSE, 9.38, 2013-04-30\n PCP, NYSE, 1.81, 2013-05-10\n TPC, NYSE, 34.78, 2013-05-02\n PDS, NYSE, 14.29, 2013-04-26\n PEG, NYSE, 14.86, 2013-05-01\n PEI, NYSE, 4.76, 2013-04-23\n PEP, NYSE, 8.45, 2013-04-19\n PFE, NYSE, -1.82, 2013-05-01\n PG, NYSE, 3.13, 2013-04-25\n PGR, NYSE, -4.55, 2013-04-11\n PH, NYSE, 0.60, 2013-04-26\n PHM, NYSE, 31.25, 2013-04-26\n PKD, NYSE, 200.00, 2013-05-02\n PKY, NYSE, 15.38, 2013-05-07\n PNC, NYSE, 12.10, 2013-04-18\n PNM, NYSE, -10.00, 2013-05-07\n PNR, NYSE, 3.57, 2013-04-24\n PNW, NYSE, 175.00, 2013-05-04\n POM, NYSE, -4.00, 2013-05-04\n POT, NYSE, 3.28, 2013-04-26\n PPG, NYSE, 1.28, 2013-04-19\n PPL, NYSE, 0.00, 2013-05-03\n PRGO, NYSE, -1.39, 2013-05-08\n PL, NYSE, -4.30, 2013-05-07\n PSB, NYSE, 0.00, 2013-05-07\n WTR, NYSE, 7.41, 2013-05-02\n CSH, NYSE, 8.21, 2013-04-26\n PWR, NYSE, 24.14, 2013-05-03\n PX, NYSE, 0.00, 2013-04-25\n KWR, NYSE, 14.29, 2013-04-30\n R, NYSE, 1.28, 2013-04-24\n RBC, NYSE, -6.09, 2013-05-01\n RDC, NYSE, 5.77, 2013-05-02\n HTSI, NYSE, 11.67, 2013-05-03\n RES, NYSE, -33.33, 2013-04-25\n RGS, NYSE, -90.77, 2013-05-08\n RGR, NYSE, 15.38, 2013-04-30\n RHI, NYSE, -2.44, 2013-04-24\n RJF, NYSE, -9.33, 2013-04-25\n RLI, NYSE, -1.89, 2013-04-18\n ROG, NYSE, 0.00, 2013-05-01\n ROK, NYSE, 2.31, 2013-04-25\n ROL, NYSE, -5.88, 2013-04-25\n ROP, NYSE, 4.10, 2013-04-30\n RTI, NYSE, 20.00, 2013-05-01\n RTN, NYSE, 21.88, 2013-04-26\n RYL, NYSE, 43.33, 2013-04-25\n BSAC, NYSE, -21.74, 2013-04-26\n T, NYSE, 0.00, 2013-04-24\n SCG, NYSE, 7.77, 2013-04-26\n SCHW, NYSE, -6.25, 2013-04-16\n SCL, NYSE, -4.08, 2013-05-01\n SMG, NYSE, -19.60, 2013-05-07\n SEE, NYSE, -5.56, 2013-05-02\n SF, NYSE, 1.75, 2013-05-10\n SFE, NYSE, -46.15, 2013-04-26\n SHW, NYSE, 2.78, 2013-04-19\n SJI, NYSE, -8.43, 2013-05-04\n JOE, NYSE, -200.00, 2013-05-09\n SJW, NYSE, -12.50, 2013-04-25\n SLB, NYSE, 2.02, 2013-04-20\n HSH, NYSE, 9.38, 2013-05-03\n AOS, NYSE, 24.68, 2013-04-24\n SMP, NYSE, 31.25, 2013-05-04\n SNA, NYSE, 4.48, 2013-04-19\n PII, NYSE, 5.94, 2013-04-24\n SNV, NYSE, 0.00, 2013-04-24\n SO, NYSE, -3.92, 2013-04-25\n SON, NYSE, -5.66, 2013-04-19\n SPA, NYSE, -46.15, 2013-05-08\n TRV, NYSE, 14.93, 2013-04-24\n SR, NYSE, -3.36, 2013-05-01\n NVE, NYSE, 12.50, 2013-05-04\n SCI, NYSE, 21.74, 2013-04-25\n SSP, NYSE, 58.33, 2013-05-07\n STT, NYSE, 3.23, 2013-04-20\n STI, NYSE, 3.28, 2013-04-20\n STJ, NYSE, 0.00, 2013-04-18\n STL, NYSE, 7.14, 2013-04-23\n STR, NYSE, -2.38, 2013-05-01\n STE, NYSE, 6.06, 2013-05-08\n SYK, NYSE, 1.98, 2013-04-25\n SUN, NYSE, -7.32, 2013-05-09\n SUP, NYSE, 5.88, 2013-05-04\n SWK, NYSE, 7.29, 2013-04-26\n SWN, NYSE, 7.69, 2013-05-03\n SWX, NYSE, 0.61, 2013-05-04\n SWY, NYSE, -2.78, 2013-04-26\n SYY, NYSE, 16.67, 2013-05-07\n TAC, NYSE, -33.33, 2013-04-24\n TNC, NYSE, -17.14, 2013-04-23\n TCB, NYSE, -15.79, 2013-04-20\n TCO, NYSE, 7.14, 2013-04-26\n TDS, NYSE, 350.00, 2013-05-04\n TDW, NYSE, 55.74, 2013-05-22\n TDY, NYSE, 10.31, 2013-04-25\n TE, NYSE, 11.76, 2013-05-01\n TER, NYSE, 200.00, 2013-04-25\n TEVA, NYSE, 1.82, 2013-05-03\n TEX, NYSE, -17.86, 2013-04-25\n TFX, NYSE, 1.98, 2013-05-01\n TEN, NYSE, 10.77, 2013-04-30\n TKR, NYSE, 0.00, 2013-04-25\n TMK, NYSE, 1.46, 2013-04-24\n TMO, NYSE, 6.20, 2013-04-25\n TOT, NYSE, -2.38, 2013-04-27\n TM, NYSE, 80.67, 2013-05-09\n TR, NYSE, -11.76, 2013-04-25\n TRN, NYSE, 13.75, 2013-05-01\n TRP, NYSE, -8.93, 2013-04-27\n TSO, NYSE, 2.82, 2013-05-02\n TSS, NYSE, -2.94, 2013-04-24\n TTI, NYSE, -40.00, 2013-05-09\n TXT, NYSE, -14.89, 2013-04-18\n TYL, NYSE, 26.09, 2013-04-25\n TSN, NYSE, -21.74, 2013-05-07\n UDR, NYSE, 3.03, 2013-05-01\n UFI, NYSE, -43.75, 2013-04-25\n UAM, NYSE, 17.65, 2013-04-30\n UHS, NYSE, 5.17, 2013-04-25\n UIL, NYSE, 3.06, 2013-05-03\n UIS, NYSE, -145.61, 2013-04-24\n UNH, NYSE, 0.00, 2013-04-19\n KMPR, NYSE, 35.85, 2013-05-03\n UNM, NYSE, 2.56, 2013-05-02\n UNP, NYSE, 3.57, 2013-04-19\n UNT, NYSE, 6.98, 2013-05-08\n URS, NYSE, -14.29, 2013-05-08\n USG, NYSE, -88.89, 2013-04-25\n MUX, NYSE, -300.00, 2013-05-10\n USM, NYSE, 214.29, 2013-05-04\n USPH, NYSE, -3.12, 2013-05-10\n UTL, NYSE, -9.20, 2013-04-24\n UTX, NYSE, -1.54, 2013-04-24\n VMI, NYSE, 15.60, 2013-04-19\n VAR, NYSE, 2.97, 2013-04-25\n CBS, NYSE, 7.35, 2013-05-02\n VLO, NYSE, 16.83, 2013-05-01\n VMC, NYSE, -24.32, 2013-05-03\n VLY, NYSE, -11.11, 2013-04-25\n VNO, NYSE, -38.38, 2013-05-07\n VSH, NYSE, 63.64, 2013-05-01\n WTS, NYSE, -14.04, 2013-05-01\n WBS, NYSE, -2.22, 2013-04-16\n WEC, NYSE, 7.04, 2013-05-01\n WFC, NYSE, 5.75, 2013-04-13\n WG, NYSE, -2400.00, 2013-05-09\n WGL, NYSE, 19.05, 2013-05-02\n WHR, NYSE, 1.03, 2013-04-25\n WMB, NYSE, -8.33, 2013-05-08\n WNC, NYSE, 0.00, 2013-05-01\n TEG, NYSE, 10.69, 2013-05-02\n WR, NYSE, 33.33, 2013-05-09\n WRE, NYSE, -4.35, 2013-04-26\n WRI, NYSE, 4.35, 2013-05-01\n WPP, NYSE, 33.33, 2013-04-30\n WSO, NYSE, 18.18, 2013-04-19\n WST, NYSE, 1.16, 2013-05-03\n WWW, NYSE, 50.00, 2013-04-17\n WY, NYSE, 18.18, 2013-04-27\n X, NYSE, -84.21, 2013-05-01\n XL, NYSE, 38.81, 2013-05-03\n XOM, NYSE, 4.43, 2013-04-26\n XRX, NYSE, 12.50, 2013-04-24\n Y, NYSE, 53.96, 2013-05-07\n HRG, NYSE, 60.00, 2013-05-10\n CRY, NYSE, 28.57, 2013-05-01\n CHK, NYSE, 30.43, 2013-05-02\n DDR, NYSE, 0.00, 2013-05-01\n ELS, NYSE, 0.71, 2013-04-23\n ALG, NYSE, 5.56, 2013-05-02\n ETH, NYSE, -22.22, 2013-04-24\n ATR, NYSE, -3.03, 2013-04-26\n GGP, NYSE, 4.17, 2013-04-30\n MSL, NYSE, 3.70, 2013-05-01\n RCL, NYSE, 84.21, 2013-04-26\n CWEI, NYSE, -61.22, 2013-04-25\n HR, NYSE, 0.00, 2013-05-02\n RGA, NYSE, 2.48, 2013-04-26\n RIG, NYSE, -7.92, 2013-05-09\n SKT, NYSE, 2.44, 2013-05-01\n TWI, NYSE, -16.28, 2013-04-25\n BDN, NYSE, 2.94, 2013-04-25\n KGC, NYSE, 25.00, 2013-05-08\n CPT, NYSE, 2.11, 2013-05-03\n SGY, NYSE, 18.84, 2013-05-07\n BFS, NYSE, -24.49, 2013-05-01\n BWA, NYSE, 6.56, 2013-04-26\n EQR, NYSE, -1.54, 2013-05-01\n CLP, NYSE, 3.03, 2013-04-26\n KOF, NYSE, -16.24, 2013-04-25\n OKS, NYSE, -27.59, 2013-05-01\n SQM, NYSE, -6.45, 2013-05-29\n BYD, NYSE, 114.29, 2013-04-25\n CBL, NYSE, 3.92, 2013-04-30\n DECK, NYSE, 133.33, 2013-04-26\n IT, NYSE, -2.50, 2013-05-03\n HST, NYSE, 21.74, 2013-05-04\n LXP, NYSE, 0.00, 2013-05-03\n REG, NYSE, 3.23, 2013-05-08\n TUC, NYSE, -24.00, 2013-05-03\n AF, NYSE, 7.69, 2013-04-18\n BFR, NYSE, -2.56, 2013-05-11\n HHS, NYSE, 10.00, 2013-04-26\n MHO, NYSE, 28.57, 2013-04-26\n NFX, NYSE, -2.17, 2013-04-24\n SPG, NYSE, 1.99, 2013-04-27\n SU, NYSE, -1.41, 2013-04-30\n SUI, NYSE, 2.20, 2013-04-26\n TV, NYSE, -22.50, 2013-04-26\n CGI, NYSE, -26.92, 2013-04-26\n CYT, NYSE, -12.79, 2013-04-19\n EMN, NYSE, 3.18, 2013-04-26\n GRT, NYSE, 14.29, 2013-04-25\n MAA, NYSE, 5.04, 2013-05-02\n PLT, NYSE, 4.62, 2013-05-08\n BZH, NYSE, 15.38, 2013-05-03\n ELX, NYSE, 114.29, 2013-05-03\n MLM, NYSE, -69.44, 2013-05-01\n AKS, NYSE, 41.67, 2013-04-24\n ALB, NYSE, -7.00, 2013-04-18\n VRX, NYSE, 1.56, 2013-05-03\n CBR, NYSE, 0.00, 2013-05-01\n MAC, NYSE, 8.86, 2013-05-02\n RKT, NYSE, 9.80, 2013-04-24\n RYN, NYSE, 27.42, 2013-04-26\n ADC, NYSE, -2.00, 2013-04-30\nBRK.B, NYSE, 52.31, 2013-05-04\n EXP, NYSE, 5.00, 2013-05-15\n GGB, NYSE, -66.67, 2013-05-08\n SSD, NYSE, -52.38, 2013-04-26\n ESS, NYSE, -0.53, 2013-05-02\n FR, NYSE, -7.69, 2013-04-26\n HIW, NYSE, -2.90, 2013-05-01\n IMAX, NYSE, 0.00, 2013-04-26\n AIV, NYSE, 2.13, 2013-05-03\n FCH, NYSE, 0.00, 2013-05-01\n ITGR, NYSE, 2.33, 2013-04-26\n NOK, NYSE, 33.33, 2013-04-19\n GEO, NYSE, -3.51, 2013-05-09\n CLI, NYSE, 0.00, 2013-04-26\n RS, NYSE, -5.22, 2013-04-26\n CPE, NYSE, 100.00, 2013-05-10\n KNX, NYSE, 0.00, 2013-04-25\n O, NYSE, 1.69, 2013-04-26\n COF, NYSE, 17.79, 2013-04-19\n IRS, NYSE, 10.34, 2013-05-18\n MCK, NYSE, -0.43, 2013-05-08\n SWC, NYSE, 200.00, 2013-04-30\n STM, NYSE, 23.53, 2013-04-23\n TEO, NYSE, 1.30, 2013-04-30\n TRK, NYSE, -400.00, 2013-05-02\n LMT, NYSE, 23.38, 2013-04-24\n APU, NYSE, -35.48, 2013-05-16\n AGU, NYSE, -12.15, 2013-05-10\n LH, NYSE, -1.69, 2013-04-20\n DDD, NYSE, -10.00, 2013-05-01\n AFG, NYSE, 10.84, 2013-05-09\n RMD, NYSE, 3.51, 2013-04-26\n WAB, NYSE, 3.60, 2013-04-25\n CIB, NYSE, 6.78, 2013-05-08\n CAM, NYSE, -5.41, 2013-04-26\n FCX, NYSE, 1.39, 2013-04-19\n RNR, NYSE, 34.25, 2013-05-02\n AVX, NYSE, 7.14, 2013-04-25\n RWT, NYSE, 46.81, 2013-05-03\n AXE, NYSE, -6.62, 2013-04-24\n CLB, NYSE, 6.09, 2013-04-18\n MD, NYSE, 0.92, 2013-05-03\n THG, NYSE, 30.69, 2013-04-30\n BAP, NYSE, -10.94, 2013-05-07\n DO, NYSE, 10.43, 2013-04-26\n RE, NYSE, 36.11, 2013-04-23\n DST, NYSE, -6.60, 2013-04-26\n EL, NYSE, 36.36, 2013-05-03\n ESC, NYSE, -57.14, 2013-05-03\n LXK, NYSE, -7.55, 2013-04-24\n MIG, NYSE, 7.69, 2013-05-01\n WAT, NYSE, -1.83, 2013-04-24\n EME, NYSE, 2.27, 2013-04-26\n HIG, NYSE, 10.84, 2013-04-30\n ITT, NYSE, 9.30, 2013-05-03\n SPN, NYSE, 0.00, 2013-04-26\n SWM, NYSE, 8.60, 2013-05-09\n SCCO, NYSE, -4.84, 2013-04-27\n RCI, NYSE, -1.27, 2013-04-23\n EIX, NYSE, 20.31, 2013-05-01\n IRM, NYSE, 0.00, 2013-05-02\n SPH, NYSE, -4.82, 2013-05-10\n CCJ, NYSE, 0.00, 2013-05-02\n PGI, NYSE, 0.00, 2013-04-19\n CRR, NYSE, -14.61, 2013-04-26\n BVN, NYSE, -40.30, 2013-04-30\n FCN, NYSE, 13.46, 2013-05-10\n RPT, NYSE, 6.90, 2013-04-24\n TUP, NYSE, 4.42, 2013-04-25\n ASB, NYSE, 8.00, 2013-04-19\n GWR, NYSE, -10.11, 2013-05-02\n TBI, NYSE, -50.00, 2013-04-25\n FFG, NYSE, 12.66, 2013-05-03\n USNA, NYSE, 14.29, 2013-04-24\n CSV, NYSE, -3.03, 2013-05-08\n LVB, NYSE, 10.53, 2013-05-09\n ALR, NYSE, 6.25, 2013-05-10\n OCN, NYSE, 0.00, 2013-05-03\n PAA, NYSE, 37.50, 2013-05-07\n DNR, NYSE, 13.79, 2013-05-03\n HMY, NYSE, -119.23, 2013-05-04\n TGI, NYSE, 5.66, 2013-05-02\n PAG, NYSE, 1.61, 2013-04-30\n GEL, NYSE, -17.65, 2013-05-03\n IM, NYSE, 0.00, 2013-04-26\n NUS, NYSE, 13.92, 2013-05-03\n CNI, NYSE, -1.67, 2013-04-23\n LAD, NYSE, 16.67, 2013-04-25\n NSP, NYSE, 0.00, 2013-04-30\n DGX, NYSE, -14.42, 2013-04-18\n KRC, NYSE, 0.00, 2013-05-01\n MTH, NYSE, 32.00, 2013-04-25\n NCR, NYSE, 35.00, 2013-05-01\n OFG, NYSE, 2.78, 2013-04-26\n IVZ, NYSE, 10.64, 2013-05-01\n DX, NYSE, 9.68, 2013-05-02\n FBC, NYSE, -65.98, 2013-04-24\n ALV, NYSE, 1.57, 2013-04-27\n ARE, NYSE, 0.00, 2013-04-30\n BBT, NYSE, 2.99, 2013-04-19\n CGG, NYSE, 6.25, 2013-05-04\n BXP, NYSE, -0.83, 2013-05-01\n CBD, NYSE, -23.73, 2013-05-01\n MS, NYSE, 7.02, 2013-04-19\n SRT, NYSE, -314.29, 2013-05-10\n HLX, NYSE, 38.89, 2013-04-22\n FLS, NYSE, 3.61, 2013-04-25\n MT, NYSE, -400.00, 2013-05-11\n PXD, NYSE, 5.15, 2013-05-02\n SLG, NYSE, 0.83, 2013-04-24\n NAT, NYSE, -16.22, 2013-05-14\n CSU, NYSE, -36.36, 2013-05-07\n DRQ, NYSE, 22.50, 2013-05-04\n FDP, NYSE, -24.47, 2013-05-01\n NLY, NYSE, 30.56, 2013-05-02\n TLM, NYSE, -250.00, 2013-05-02\n TSM, NYSE, 13.04, 2013-04-19\n YUM, NYSE, 12.90, 2013-04-24\n AMG, NYSE, 12.38, 2013-05-01\n EPR, NYSE, -1.05, 2013-05-01\n FE, NYSE, 10.14, 2013-05-08\n LFL, NYSE, 80.00, 2013-05-15\n MTD, NYSE, 2.79, 2013-05-03\n SID, NYSE, -66.67, 2013-05-16\n IN, NYSE, -271.43, 2013-05-04\n CBZ, NYSE, 25.64, 2013-05-03\n URI, NYSE, 11.54, 2013-04-17\n INGR, NYSE, 6.82, 2013-05-03\n RAS, NYSE, 181.82, 2013-05-03\n UNS, NYSE, 35.00, 2013-04-30\n ASI, NYSE, 18.92, 2013-05-09\n ANH, NYSE, 15.38, 2013-04-30\n OFC, NYSE, 17.07, 2013-04-27\n GPX, NYSE, 0.00, 2013-05-03\n WAC, NYSE, 1427.27, 2013-05-10\n RBA, NYSE, -13.33, 2013-05-01\n WDR, NYSE, 1.61, 2013-04-24\n LHO, NYSE, 8.00, 2013-04-18\n LNT, NYSE, 18.03, 2013-05-04\n LVLT, NYSE, 7.14, 2013-04-26\n MFA, NYSE, -4.76, 2013-05-02\n OME, NYSE, 50.00, 2013-05-08\n EQY, NYSE, 6.90, 2013-05-02\n FII, NYSE, -2.38, 2013-04-26\n FMX, NYSE, -37.89, 2013-04-25\n LLL, NYSE, 3.63, 2013-04-26\n VTR, NYSE, 4.04, 2013-04-27\n WCN, NYSE, 20.00, 2013-05-02\n AVB, NYSE, 0.74, 2013-05-01\n GIL, NYSE, 5.36, 2013-05-03\n HZO, NYSE, -92.86, 2013-04-26\n AWR, NYSE, 38.00, 2013-05-11\n CLS, NYSE, 10.00, 2013-04-24\n EPD, NYSE, 16.67, 2013-05-01\n RSG, NYSE, 15.00, 2013-04-26\n WM, NYSE, -2.44, 2013-04-25\n AKR, NYSE, 3.33, 2013-04-24\n CVG, NYSE, 17.39, 2013-05-01\n RRC, NYSE, -38.89, 2013-04-26\n SAP, NYSE, 41.51, 2013-04-20\n CCI, NYSE, 0.00, 2013-04-25\n PQ, NYSE, 100.00, 2013-05-08\n WFT, NYSE, 0.00, 2013-05-03\n CAA, NYSE, 0.00, 2013-05-03\n ENB, NYSE, 13.21, 2013-05-09\n GMK, NYSE, 60.00, 2013-04-25\n MMR, NYSE, 0.00, 2013-05-07\n PB, NYSE, 2.38, 2013-04-25\n VIV, NYSE, -20.00, 2013-05-08\n AXL, NYSE, 53.33, 2013-05-04\n BP, NYSE, 33.33, 2013-05-01\n ETM, NYSE, 0.00, 2013-05-09\n HT, NYSE, 0.00, 2013-05-01\n BYI, NYSE, 10.71, 2013-04-25\n CEB, NYSE, 1.64, 2013-05-02\n INFY, NYSE, 5.41, 2013-04-13\n JLL, NYSE, 56.52, 2013-05-01\n AZN, NYSE, 5.22, 2013-04-26\n SFG, NYSE, 33.75, 2013-04-24\n TREX, NYSE, 14.68, 2013-05-04\n GS, NYSE, 11.43, 2013-04-17\n SYX, NYSE, -157.14, 2013-05-01\n WCC, NYSE, -4.27, 2013-04-19\n JNPR, NYSE, 33.33, 2013-04-24\n RDN, NYSE, 28.57, 2013-05-02\n RAI, NYSE, 4.35, 2013-04-24\n SKX, NYSE, -27.78, 2013-05-16\n WTM, NYSE, 178.02, 2013-04-30\n NCI, NYSE, 12.50, 2013-04-26\n BLT, NYSE, -17.39, 2013-05-08\n QTM, NYSE, -33.33, 2013-05-09\n BLK, NYSE, 1.67, 2013-04-17\n CIR, NYSE, 4.00, 2013-05-03\n MSO, NYSE, 12.50, 2013-05-01\n PKG, NYSE, 10.71, 2013-04-23\n PKI, NYSE, -25.00, 2013-04-26\n WWE, NYSE, -37.50, 2013-05-03\n SNN, NYSE, -2.11, 2013-05-03\n UPS, NYSE, 2.97, 2013-04-26\n XOXO, NYSE, 16.67, 2013-05-10\n SLF, NYSE, 7.25, 2013-05-09\n CDR, NYSE, 9.09, 2013-05-10\n EW, NYSE, -5.26, 2013-04-24\n MET, NYSE, 13.85, 2013-05-01\n FBR, NYSE, -89.47, 2013-04-24\n VVC, NYSE, -7.58, 2013-05-02\n BAM, NYSE, 70.00, 2013-05-10\n NVS, NYSE, 4.00, 2013-04-25\n BHLB, NYSE, -1.82, 2013-04-30\n CRL, NYSE, -2.82, 2013-05-02\n CYH, NYSE, 3.57, 2013-04-30\n MBT, NYSE, -13.04, 2013-06-08\n MTOR, NYSE, 500.00, 2013-05-01\n CNQ, NYSE, -44.19, 2013-05-03\n ERJ, NYSE, -62.79, 2013-04-30\n VZ, NYSE, 3.03, 2013-04-19\n EVC, NYSE, 0.00, 2013-05-03\n PBR, NYSE, 0.00, 2013-04-27\n XEL, NYSE, 11.63, 2013-05-03\n ALE, NYSE, 10.67, 2013-05-09\n HW, NYSE, -30.00, 2013-05-01\n POL, NYSE, 14.81, 2013-05-02\n COH, NYSE, 3.70, 2013-04-24\n CXW, NYSE, 6.38, 2013-05-09\n DVA, NYSE, 3.37, 2013-05-08\n EXC, NYSE, 4.41, 2013-05-02\n MCO, NYSE, 11.49, 2013-05-04\n BRFS, NYSE, 23.53, 2013-04-30\n TU, NYSE, 3.77, 2013-05-10\n WIT, NYSE, 0.00, 2013-04-20\n ERF, NYSE, 100.00, 2013-05-11\n GG, NYSE, -35.00, 2013-05-03\n HNT, NYSE, 34.15, 2013-04-30\n NYCB, NYSE, 3.85, 2013-04-25\n SXT, NYSE, 3.33, 2013-04-19\n CPG, NYSE, -20.00, 2013-05-10\n AMX, NYSE, 16.67, 2013-04-20\n MPX, NYSE, 0.00, 2013-04-25\n OIS, NYSE, -2.70, 2013-04-25\n MMP, NYSE, 4.08, 2013-05-03\n PES, NYSE, 33.33, 2013-05-01\n ABB, NYSE, -12.12, 2013-04-25\n KMR, NYSE, -3.28, 2013-05-02\n GEN, NYSE, -41.18, 2013-05-07\n ADS, NYSE, -2.88, 2013-04-19\n CVI, NYSE, 25.00, 2013-05-03\n FTI, NYSE, -6.52, 2013-04-24\n PRA, NYSE, 27.63, 2013-05-07\n STO, NYSE, -16.46, 2013-05-03\n BEL, NYSE, 41.67, 2013-05-02\n FIS, NYSE, 1.64, 2013-05-01\n COL, NYSE, 0.86, 2013-04-20\n KAI, NYSE, 20.51, 2013-04-30\n ABC, NYSE, -2.25, 2013-04-26\n BG, NYSE, 18.56, 2013-04-26\n FRO, NYSE, 27.08, 2013-05-31\n ECA, NYSE, 150.00, 2013-04-24\n CIG, NYSE, 108.33, 2013-05-17\n EEP, NYSE, 16.67, 2013-05-01\n CVX, NYSE, 3.25, 2013-04-27\n GXP, NYSE, 41.67, 2013-05-10\n JHX, NYSE, -2.78, 2013-05-24\n PFG, NYSE, 5.33, 2013-04-26\n PVR, NYSE, 14.29, 2013-04-26\n AAP, NYSE, 2.48, 2013-05-24\n KND, NYSE, 36.11, 2013-05-02\n WTW, NYSE, 38.10, 2013-05-03\n CNC, NYSE, 5.00, 2013-04-24\n BCH, NYSE, 3.70, 2013-05-09\n NS, NYSE, -86.67, 2013-04-25\n ITUB, NYSE, -4.88, 2013-04-26\n SXL, NYSE, 26.74, 2013-05-09\n VALE, NYSE, 50.00, 2013-04-25\n TNP, NYSE, 150.00, 2013-05-25\n LCI, NYSE, 40.00, 2013-05-09\n GTI, NYSE, 50.00, 2013-04-26\n HNR, NYSE, -26.67, 2013-06-06\n MWE, NYSE, -90.00, 2013-05-09\n NLS, NYSE, 50.00, 2013-05-07\n RGC, NYSE, -7.14, 2013-05-01\n JAH, NYSE, 30.43, 2013-04-25\n NPO, NYSE, -23.29, 2013-05-03\n TRI, NYSE, 22.58, 2013-05-01\n CAE, NYSE, 10.53, 2013-05-17\n LF, NYSE, 28.57, 2013-05-02\n SNY, NYSE, -10.11, 2013-05-03\n BANC, NYSE, 400.00, 2013-05-09\n COP, NYSE, 0.00, 2013-04-26\n CNP, NYSE, -8.11, 2013-05-03\n EEQ, NYSE, -321.43, 2013-05-02\n MRH, NYSE, 32.58, 2013-04-25\n NGS, NYSE, 23.08, 2013-05-10\n NRP, NYSE, 4.88, 2013-05-07\n PXP, NYSE, 17.98, 2013-05-03\n XEC, NYSE, -0.93, 2013-05-08\n IAG, NYSE, 7.14, 2013-05-08\n EGO, NYSE, 0.00, 2013-05-03\n JNS, NYSE, -6.25, 2013-04-24\n PFS, NYSE, 14.81, 2013-04-27\n ENH, NYSE, 74.79, 2013-05-02\n CNX, NYSE, -5.00, 2013-04-26\n AMT, NYSE, -10.42, 2013-05-02\n ABG, NYSE, 13.43, 2013-04-25\n LII, NYSE, 22.22, 2013-04-23\n SRE, NYSE, -4.90, 2013-05-03\n AEE, NYSE, -21.43, 2013-05-03\n PLD, NYSE, 0.00, 2013-04-25\n SAH, NYSE, -2.38, 2013-04-24\n GPI, NYSE, 11.54, 2013-05-03\n FIX, NYSE, 800.00, 2013-05-02\n MMS, NYSE, 1.41, 2013-05-10\n SRI, NYSE, 50.00, 2013-05-10\n RTEC, NYSE, 50.00, 2013-05-03\n NOV, NYSE, -5.84, 2013-04-27\n DF, NYSE, 11.54, 2013-05-10\n SAM, NYSE, -17.74, 2013-05-02\n RL, NYSE, 8.46, 2013-05-24\n FLR, NYSE, 6.25, 2013-05-03\n ALL, NYSE, 2.27, 2013-05-02\n ATI, NYSE, 0.00, 2013-04-25\n EE, NYSE, 72.73, 2013-05-02\n AIT, NYSE, 0.00, 2013-05-03\n CHH, NYSE, -3.70, 2013-04-30\n FMS, NYSE, -17.78, 2013-05-01\n BCO, NYSE, 16.67, 2013-04-26\n CBB, NYSE, 133.33, 2013-05-10\n MWW, NYSE, 14.29, 2013-05-03\n PSA, NYSE, -3.09, 2013-05-10\n E, NYSE, 0.00, 2013-04-25\n JPM, NYSE, 15.22, 2013-04-13\n USB, NYSE, 0.00, 2013-04-17\n HON, NYSE, 6.14, 2013-04-20\n ITG, NYSE, 50.00, 2013-05-03\n ARB, NYSE, -15.49, 2013-05-08\n APL, NYSE, -28.95, 2013-04-30\n AVA, NYSE, 0.00, 2013-05-02\n AXS, NYSE, 85.71, 2013-04-26\n MOH, NYSE, 146.15, 2013-04-26\n CVD, NYSE, 4.17, 2013-05-02\n AHT, NYSE, 2.94, 2013-05-09\n GPK, NYSE, 25.00, 2013-04-26\n CNO, NYSE, 0.00, 2013-04-25\n AUQ, NYSE, -60.00, 2013-05-10\n NFP, NYSE, -5.45, 2013-05-04\n CRI, NYSE, 12.86, 2013-05-10\n FMD, NYSE, 27.27, 2013-04-30\n FPO, NYSE, 3.45, 2013-04-26\n TRQ, NYSE, -25.00, 2013-05-14\n WLL, NYSE, 2.17, 2013-04-25\n AEL, NYSE, 11.36, 2013-05-02\n AHL, NYSE, 0.95, 2013-04-25\n AUY, NYSE, -23.81, 2013-05-01\n CMP, NYSE, 24.32, 2013-04-30\n KRO, NYSE, -800.00, 2013-05-09\n TPX, NYSE, 3.33, 2013-05-03\n UTI, NYSE, -300.00, 2013-05-01\n PJC, NYSE, 9.09, 2013-04-18\n TRW, NYSE, 3.42, 2013-05-01\n AIZ, NYSE, -14.56, 2013-04-25\n HTH, NYSE, 11.43, 2013-05-07\n ETP, NYSE, 33.33, 2013-05-09\n LSE, NYSE, 0.00, 2013-05-09\n BBD, NYSE, 0.00, 2013-04-23\n NRG, NYSE, -37.04, 2013-05-08\n HOS, NYSE, 96.67, 2013-05-02\n ABR, NYSE, 84.62, 2013-05-04\n FHN, NYSE, 0.00, 2013-04-20\n AGO, NYSE, 86.11, 2013-05-10\n HSP, NYSE, 18.18, 2013-05-02\n HNI, NYSE, 250.00, 2013-04-18\n GHL, NYSE, -34.78, 2013-04-18\n XPO, NYSE, -16.44, 2013-05-08\n CVO, NYSE, -200.00, 2013-05-09\n CHE, NYSE, 9.92, 2013-04-19\n GNW, NYSE, 11.11, 2013-05-01\n CBG, NYSE, -5.88, 2013-04-26\n SFL, NYSE, -43.33, 2013-05-31\n NEU, NYSE, 3.28, 2013-04-25\n GOL, NYSE, -1200.00, 2013-05-14\n CAB, NYSE, 18.64, 2013-04-26\n LTM, NYSE, 3.08, 2013-04-26\n VVI, NYSE, 68.00, 2013-04-27\n WCG, NYSE, -8.70, 2013-05-04\n HEP, NYSE, -36.36, 2013-05-01\n DPZ, NYSE, 5.36, 2013-05-01\n BDC, NYSE, 6.33, 2013-05-03\n ENS, NYSE, 2.56, 2013-05-29\n BMR, NYSE, 7.89, 2013-05-02\n ACC, NYSE, -1.54, 2013-04-24\n KRG, NYSE, 27.27, 2013-05-03\n WLK, NYSE, 42.64, 2013-05-07\n EXR, NYSE, 4.55, 2013-04-30\n CNS, NYSE, 7.32, 2013-04-18\n IOC, NYSE, 161.54, 2013-05-14\n STON, NYSE, -150.00, 2013-05-08\n TTM, NYSE, 60.56, 2013-05-30\n CPL, NYSE, 7.69, 2013-05-11\n TPGI, NYSE, -460.00, 2013-05-07\n SHO, NYSE, 0.00, 2013-05-07\n CUBE, NYSE, 0.00, 2013-05-03\n NRF, NYSE, -51.35, 2013-05-04\n DLR, NYSE, -1.69, 2013-04-27\n MTL, NYSE, 100.00, 2013-06-19\n NWE, NYSE, 8.60, 2013-04-26\n ORA, NYSE, 550.00, 2013-05-08\n NP, NYSE, 7.25, 2013-05-09\n SMA, NYSE, -73.33, 2013-05-03\n BBG, NYSE, -2600.00, 2013-05-03\n BXC, NYSE, 35.29, 2013-05-02\n KNL, NYSE, 8.33, 2013-04-19\n LVS, NYSE, 7.58, 2013-05-02\n HLF, NYSE, 18.69, 2013-04-30\n MIC, NYSE, -89.09, 2013-04-30\n PHH, NYSE, -81.13, 2013-05-02\n CE, NYSE, 44.30, 2013-04-19\n EDR, NYSE, 0.00, 2013-04-30\n WTI, NYSE, 34.62, 2013-05-08\n ARC, NYSE, 0.00, 2013-05-08\n PBH, NYSE, 5.88, 2013-05-17\n HUN, NYSE, 18.75, 2013-05-01\n WEX, NYSE, 3.16, 2013-05-02\n DLB, NYSE, 14.29, 2013-04-26\n DSX, NYSE, 66.67, 2013-05-23\n LAZ, NYSE, -17.65, 2013-04-27\n TGP, NYSE, 14.29, 2013-05-10\n TLP, NYSE, 7.69, 2013-05-08\n DRH, NYSE, 55.56, 2013-05-11\n HTGC, NYSE, 8.00, 2013-05-03\n KFN, NYSE, 27.78, 2013-05-02\n THS, NYSE, 5.71, 2013-05-10\n NSR, NYSE, -8.86, 2013-05-03\n WAL, NYSE, 14.29, 2013-04-19\n SLW, NYSE, -9.76, 2013-05-11\n MPW, NYSE, -3.85, 2013-04-27\n GNK, NYSE, -2.75, 2013-05-02\n MFB, NYSE, 28.57, 2013-05-09\nRDS.A, NYSE, 21.74, 2013-05-03\n ITC, NYSE, -3.45, 2013-04-24\n FTK, NYSE, -11.76, 2013-05-10\n PIKE, NYSE, -20.00, 2013-05-07\n ALJ, NYSE, 63.27, 2013-05-09\n DRC, NYSE, 2.38, 2013-04-26\n STN, NYSE, 0.00, 2013-05-10\n SSW, NYSE, -8.70, 2013-04-30\n CF, NYSE, 0.50, 2013-05-09\n HPY, NYSE, 12.50, 2013-05-01\n ROC, NYSE, 1.49, 2013-05-01\n WPZ, NYSE, -57.58, 2013-05-01\n LCC, NYSE, 29.17, 2013-04-24\n GLP, NYSE, -7.27, 2013-05-10\n AMP, NYSE, 1.27, 2013-04-23\n DHT, NYSE, 58.33, 2013-04-30\n FNF, NYSE, 5.00, 2013-05-02\n NM, NYSE, 52.38, 2013-05-22\n CCO, NYSE, -57.14, 2013-05-03\n BWP, NYSE, 5.00, 2013-04-30\n ICE, NYSE, 2.53, 2013-05-02\n BKD, NYSE, 50.00, 2013-05-02\n BAS, NYSE, 12.00, 2013-04-25\n CPA, NYSE, 21.21, 2013-05-14\n LYV, NYSE, 8.33, 2013-05-08\n WNR, NYSE, -6.93, 2013-05-03\n CMG, NYSE, 9.81, 2013-04-19\n RGP, NYSE, -50.00, 2013-05-09\n KOP, NYSE, -16.92, 2013-05-04\n TX, NYSE, 40.43, 2013-05-01\n UAL, NYSE, 10.09, 2013-04-26\n ETE, NYSE, -27.03, 2013-05-09\n RSO, NYSE, -45.00, 2013-05-08\n XCO, NYSE, 62.50, 2013-05-01\n PAC, NYSE, 30.00, 2013-04-26\n NYX, NYSE, 1.79, 2013-05-01\n TDG, NYSE, 0.61, 2013-05-08\n BMA, NYSE, 11.68, 2013-05-09\n THI, NYSE, 1.67, 2013-05-09\n BTE, NYSE, -112.00, 2013-05-10\n CNH, NYSE, 41.49, 2013-05-01\n GLA, NYSE, -82.35, 2013-05-02\n POR, NYSE, 0.00, 2013-05-02\n HIL, NYSE, 50.00, 2013-05-03\n HVB, NYSE, 12.50, 2013-04-24\n KS, NYSE, -9.30, 2013-05-08\n HK, NYSE, -28.57, 2013-05-03\n DCP, NYSE, 3.28, 2013-05-07\n DK, NYSE, 7.56, 2013-05-09\n CODI, NYSE, 0.00, 2013-05-08\n MA, NYSE, 0.65, 2013-05-02\n MWA, NYSE, 150.00, 2013-05-01\n KOG, NYSE, -21.43, 2013-05-03\n PWE, NYSE, -150.00, 2013-05-03\n PGTI, NYSE, 100.00, 2013-05-02\n AWH, NYSE, 8.45, 2013-04-25\n NSH, NYSE, -29.73, 2013-04-25\n WYN, NYSE, 7.58, 2013-04-25\n WNS, NYSE, 15.38, 2013-04-18\n PGH, NYSE, 0.00, 2013-05-02\n AYR, NYSE, 34.48, 2013-05-03\n EVR, NYSE, -24.49, 2013-04-25\n HBI, NYSE, 2.00, 2013-04-24\n WU, NYSE, 12.12, 2013-05-01\n OC, NYSE, 45.00, 2013-04-25\n DAC, NYSE, 44.44, 2013-04-30\n AWI, NYSE, -43.59, 2013-04-30\n SUSS, NYSE, 0.00, 2013-05-09\n DEI, NYSE, 5.71, 2013-05-08\n OB, NYSE, 79.31, 2013-04-30\n SBH, NYSE, -7.69, 2013-05-03\n EBS, NYSE, -144.44, 2013-05-03\n KBR, NYSE, 25.53, 2013-04-26\n AER, NYSE, 23.40, 2013-05-08\n NOA, NYSE, -442.86, 2013-06-11\n SPR, NYSE, 29.79, 2013-05-03\n ANW, NYSE, -7.14, 2013-05-16\n DCT, NYSE, 10.00, 2013-05-03\n SE, NYSE, 6.25, 2013-05-04\n TOO, NYSE, -17.86, 2013-05-10\n TSL, NYSE, -27.78, 2013-05-30\n TWC, NYSE, 2.92, 2013-04-26\n MVO, NYSE, -13.92, 2013-05-09\n CO, NYSE, 150.00, 2013-06-19\n EXK, NYSE, -18.75, 2013-05-07\n EIG, NYSE, 22.22, 2013-05-09\n HF, NYSE, -50.00, 2013-05-02\n FIG, NYSE, 33.33, 2013-05-03\n NGLS, NYSE, -20.00, 2013-05-04\n TCAP, NYSE, -1.75, 2013-05-09\n GFA, NYSE, -211.11, 2013-05-14\n BR, NYSE, 18.18, 2013-05-08\n SCR, NYSE, 12.50, 2013-05-10\n CNK, NYSE, 12.00, 2013-05-08\n DAL, NYSE, 42.86, 2013-04-24\n ORN, NYSE, 42.86, 2013-05-03\n ACM, NYSE, 3.92, 2013-05-08\n SLH, NYSE, 5.00, 2013-05-08\n CLR, NYSE, 2.63, 2013-05-09\n BGS, NYSE, -5.13, 2013-04-19\n STAR, NYSE, 26.42, 2013-05-01\n YGE, NYSE, -40.00, 2013-05-31\n DFS, NYSE, 18.75, 2013-04-24\n TEL, NYSE, 7.04, 2013-04-25\n BX, NYSE, 1.85, 2013-04-19\n SEP, NYSE, 4.65, 2013-05-04\n BZ, NYSE, -77.78, 2013-05-03\n PPO, NYSE, -41.18, 2013-05-09\n PRO, NYSE, 100.00, 2013-05-03\n WBC, NYSE, 7.34, 2013-04-26\n DHX, NYSE, 0.00, 2013-04-24\n PMC, NYSE, 23.53, 2013-05-02\n HGG, NYSE, 3.33, 2013-05-21\n OWW, NYSE, -33.33, 2013-05-10\n VR, NYSE, 35.97, 2013-04-26\n CXO, NYSE, -27.50, 2013-05-02\n G, NYSE, 5.00, 2013-05-02\n EJ, NYSE, 89.47, 2013-05-16\n WX, NYSE, 11.11, 2013-05-14\n CMLP, NYSE, -92.86, 2013-05-08\n VMW, NYSE, 10.87, 2013-04-24\n CZZ, NYSE, -40.00, 2013-06-06\n CGA, NYSE, 6.67, 2013-05-14\n TDC, NYSE, -26.92, 2013-05-03\n FLY, NYSE, 61.73, 2013-05-03\n MAIN, NYSE, 2.04, 2013-05-10\n REN, NYSE, 100.00, 2013-05-07\n TGH, NYSE, -12.90, 2013-05-08\n DFT, NYSE, -5.00, 2013-05-08\n RF, NYSE, 15.00, 2013-04-24\n PZN, NYSE, 0.00, 2013-04-25\n LL, NYSE, 29.55, 2013-04-25\n NMM, NYSE, 0.00, 2013-04-26\n OZM, NYSE, 81.25, 2013-05-03\n ES, NYSE, 12.31, 2013-05-02\n MSCI, NYSE, 5.56, 2013-05-02\n ARR, NYSE, -21.74, 2013-05-03\n KW, NYSE, 62.50, 2013-05-08\n GTS, NYSE, 52.78, 2013-05-02\n FOR, NYSE, 450.00, 2013-05-09\n LRN, NYSE, 34.78, 2013-05-04\n TNK, NYSE, -100.00, 2013-05-10\n N, NYSE, -21.43, 2013-04-26\n DAN, NYSE, -33.33, 2013-04-26\n BIP, NYSE, 0.00, 2013-05-03\n CPN, NYSE, -6.67, 2013-05-03\n SOL, NYSE, -15.38, 2013-05-17\n PM, NYSE, -4.44, 2013-04-19\n V, NYSE, 6.08, 2013-05-02\n IPI, NYSE, 5.26, 2013-05-02\n AWK, NYSE, -5.88, 2013-05-08\n HTS, NYSE, -7.46, 2013-04-23\n DPS, NYSE, 12.77, 2013-04-25\n CFX, NYSE, 8.33, 2013-04-26\n WES, NYSE, -22.50, 2013-05-02\n SB, NYSE, 0.00, 2013-05-16\n LO, NYSE, 4.76, 2013-04-25\n LPS, NYSE, 0.00, 2013-04-25\n FF, NYSE, -6.90, 2013-05-08\n NNA, NYSE, 200.00, 2013-05-03\n EPB, NYSE, 7.41, 2013-04-18\n JBT, NYSE, -17.65, 2013-05-08\n DL, NYSE, -33.33, 2013-05-22\n RAX, NYSE, -5.00, 2013-05-09\n GSL, NYSE, -50.00, 2013-05-10\n HCI, NYSE, 66.06, 2013-05-03\n EC, NYSE, -18.58, 2013-05-04\n CLW, NYSE, -98.08, 2013-04-25\n MJN, NYSE, -1.16, 2013-04-26\n EPC, NYSE, 39.53, 2013-05-02\n BPI, NYSE, 0.00, 2013-05-07\n RST, NYSE, 25.00, 2013-05-09\n DGI, NYSE, 22.22, 2013-05-08\n SWI, NYSE, 6.25, 2013-05-01\n CYS, NYSE, -45.16, 2013-04-18\n IVR, NYSE, 1.59, 2013-05-02\n BUD, NYSE, 50.65, 2013-05-01\n SLD, NYSE, -66.67, 2013-05-15\n PMT, NYSE, 11.11, 2013-04-24\n STWD, NYSE, -20.93, 2013-05-09\n CFN, NYSE, 11.32, 2013-05-10\n SPB, NYSE, 7.32, 2013-05-01\n ARI, NYSE, 33.33, 2013-05-02\n CLNY, NYSE, -26.47, 2013-05-07\n ART, NYSE, -800.00, 2013-05-07\n SEM, NYSE, -11.11, 2013-05-03\n BSBR, NYSE, -71.43, 2013-04-26\n DOLE, NYSE, -50.00, 2013-05-03\n VSI, NYSE, 2.86, 2013-05-08\n TWO, NYSE, -9.38, 2013-05-08\n CVE, NYSE, -6.38, 2013-04-25\n H, NYSE, 12.50, 2013-05-02\n LEA, NYSE, 19.27, 2013-04-26\n SVN, NYSE, -81.82, 2013-05-14\n CLD, NYSE, -59.26, 2013-05-01\n AOL, NYSE, 6.25, 2013-05-09\n CHSP, NYSE, 25.00, 2013-05-08\n PEB, NYSE, 5.88, 2013-04-26\n CIT, NYSE, -8.99, 2013-04-24\n KAR, NYSE, -3.03, 2013-05-02\n CIE, NYSE, -15.38, 2013-05-01\n TMH, NYSE, 0.00, 2013-05-01\n KRA, NYSE, -75.00, 2013-05-02\n SYA, NYSE, 8.82, 2013-04-25\n TRNO, NYSE, -11.11, 2013-05-09\n PDM, NYSE, 0.00, 2013-05-03\n GNRC, NYSE, 23.47, 2013-05-03\n ACW, NYSE, -9.68, 2013-04-24\n BALT, NYSE, -9.52, 2013-05-02\n ST, NYSE, 4.35, 2013-04-24\n SEMG, NYSE, -15.00, 2013-05-09\n CALX, NYSE, 50.00, 2013-04-26\n MXL, NYSE, 33.33, 2013-05-01\n STNG, NYSE, 60.00, 2013-04-30\n PRI, NYSE, -4.35, 2013-05-08\n SDRL, NYSE, 16.95, 2013-05-29\n CLDT, NYSE, 7.50, 2013-05-08\n EXL, NYSE, 5.00, 2013-05-02\n LYB, NYSE, 9.09, 2013-04-27\n PNG, NYSE, 4.35, 2013-05-07\n PLOW, NYSE, 13.33, 2013-05-07\n SIX, NYSE, 19.61, 2013-04-23\n NKA, NYSE, -140.00, 2013-05-10\n RRTS, NYSE, 3.57, 2013-05-02\n JKS, NYSE, 66.27, 2013-06-08\n CODE, NYSE, 7.69, 2013-05-01\n FAF, NYSE, -31.71, 2013-04-26\n QEP, NYSE, -6.67, 2013-05-01\n OAS, NYSE, 31.37, 2013-05-08\n HPP, NYSE, 18.18, 2013-05-07\n FN, NYSE, 3.70, 2013-04-30\n ECT, NYSE, 7.32, 2013-05-11\n QUAD, NYSE, -88.10, 2013-05-08\n KKR, NYSE, 4.76, 2013-04-26\n RLD, NYSE, 70.00, 2013-06-07\n AMRC, NYSE, -200.00, 2013-05-10\n GDOT, NYSE, 9.37, 2013-05-01\n AT, NYSE, 40.00, 2013-05-09\n ENV, NYSE, 0.00, 2013-05-17\n COR, NYSE, 0.00, 2013-04-25\n VC, NYSE, 75.65, 2013-05-10\n CCG, NYSE, 5.88, 2013-05-01\n EFC, NYSE, -32.00, 2013-05-07\n TOWR, NYSE, 255.56, 2013-05-03\n CHMT, NYSE, -21.05, 2013-05-03\n HBM, NYSE, 200.00, 2013-05-02\n EXAM, NYSE, 0.00, 2013-05-09\n XUE, NYSE, -25.00, 2013-05-17\n CMRE, NYSE, 26.09, 2013-04-25\n NOAH, NYSE, 112.50, 2013-05-07\n IPHI, NYSE, 18.18, 2013-05-02\n BITA, NYSE, 0.00, 2013-05-10\n BAH, NYSE, 11.43, 2013-05-23\n GM, NYSE, 19.64, 2013-05-03\n XNY, NYSE, 28.57, 2013-05-20\n TROX, NYSE, -181.25, 2013-05-09\n TRGP, NYSE, 52.38, 2013-05-04\n DANG, NYSE, 21.05, 2013-05-17\n YOKU, NYSE, 0.00, 2013-05-16\n FRC, NYSE, 0.00, 2013-04-16\n RFP, NYSE, 64.29, 2013-05-01\n ISS, NYSE, 50.00, 2013-05-18\n WD, NYSE, -45.65, 2013-05-09\n FLT, NYSE, 10.39, 2013-05-03\n GCAP, NYSE, -15.38, 2013-05-08\n FRF, NYSE, -27.27, 2013-05-14\n SWFT, NYSE, 23.53, 2013-04-23\n AG, NYSE, -8.00, 2013-05-16\n QRE, NYSE, 0.00, 2013-05-09\n AAT, NYSE, 8.57, 2013-05-01\n MCC, NYSE, -2.70, 2013-05-03\n NLSN, NYSE, 9.09, 2013-04-26\n AGRO, NYSE, -100.00, 2013-05-17\n BKU, NYSE, 4.44, 2013-04-25\n INXN, NYSE, -7.14, 2013-05-09\n NPTN, NYSE, 10.00, 2013-05-10\n INN, NYSE, 5.88, 2013-05-07\n KMI, NYSE, -12.50, 2013-04-18\n HCA, NYSE, -4.82, 2013-05-03\n MX, NYSE, 13.04, 2013-05-01\n HII, NYSE, 0.00, 2013-05-09\n QIHU, NYSE, 100.00, 2013-05-20\n APO, NYSE, 56.20, 2013-05-07\n GNC, NYSE, 1.39, 2013-04-27\n SDT, NYSE, 16.07, 2013-05-11\n UAN, NYSE, 4.26, 2013-05-02\n ARCO, NYSE, -142.86, 2013-05-01\n ELLI, NYSE, -16.67, 2013-05-01\n TMS, NYSE, -12.00, 2013-04-26\n SQNS, NYSE, 0.00, 2013-04-26\n STAG, NYSE, 3.13, 2013-05-07\n AL, NYSE, 5.13, 2013-05-10\n TLLP, NYSE, -14.89, 2013-05-07\n RENN, NYSE, 85.71, 2013-05-14\n NQ, NYSE, -16.67, 2013-05-16\n KOS, NYSE, -37.50, 2013-05-10\n RLJ, NYSE, 10.81, 2013-05-09\n NGL, NYSE, -62.86, 2013-06-15\n FENG, NYSE, 60.00, 2013-05-15\n LNKD, NYSE, 340.00, 2013-05-03\n NMFC, NYSE, -2.86, 2013-05-07\n ACTV, NYSE, 32.14, 2013-05-03\n FIO, NYSE, 20.00, 2013-04-25\n TAOM, NYSE, -25.00, 2013-05-24\n RATE, NYSE, 10.00, 2013-05-01\n VHS, NYSE, 8.33, 2013-05-01\n MPC, NYSE, 0.00, 2013-05-01\n MITT, NYSE, -9.64, 2013-05-07\n OILT, NYSE, 17.07, 2013-05-09\n SXC, NYSE, -40.00, 2013-04-26\n AMTG, NYSE, 14.06, 2013-05-07\n AMID, NYSE, -200.00, 2013-05-14\n WAIR, NYSE, 22.22, 2013-04-30\n PER, NYSE, -7.58, 2013-05-11\n PPP, NYSE, 260.00, 2013-05-09\n FSM, NYSE, -28.57, 2013-05-08\n FBHS, NYSE, 41.18, 2013-05-03\n XLS, NYSE, 73.91, 2013-05-04\n XYL, NYSE, -3.57, 2013-05-01\n GNE, NYSE, -550.00, 2013-05-08\n NDRO, NYSE, -8.11, 2013-05-04\n RNF, NYSE, -29.63, 2013-05-10\n VAC, NYSE, 10.20, 2013-04-26\n CHKR, NYSE, -2.90, 2013-05-10\n PACD, NYSE, 250.00, 2013-05-07\n INVN, NYSE, -13.33, 2013-05-03\n DLPH, NYSE, 11.46, 2013-05-02\n MN, NYSE, 0.00, 2013-05-02\n RRMS, NYSE, 51.28, 2013-05-10\n WPX, NYSE, -4.17, 2013-05-03\n LPI, NYSE, -15.38, 2013-05-10\n SN, NYSE, -82.61, 2013-05-08\n KORS, NYSE, 35.14, 2013-05-30\n BCEI, NYSE, -20.93, 2013-05-10\n BOXC, NYSE, 2.56, 2013-04-23\n PVG, NYSE, -25.00, 2013-05-11\n POST, NYSE, -29.63, 2013-05-14\n SLCA, NYSE, -2.78, 2013-05-01\n MTDR, NYSE, 0.00, 2013-05-09\n GWAY, NYSE, -120.00, 2013-05-07\n EPAM, NYSE, -14.71, 2013-05-09\n RNDY, NYSE, -9.52, 2013-05-10\n PRLB, NYSE, 0.00, 2013-04-26\n YELP, NYSE, -40.00, 2013-05-02\n NSM, NYSE, 23.19, 2013-05-08\n ALSN, NYSE, 95.24, 2013-04-30\n DWRE, NYSE, -22.73, 2013-05-08\n VNTV, NYSE, 3.70, 2013-05-07\n ET, NYSE, 0.00, 2013-05-10\n VCRA, NYSE, -160.00, 2013-05-03\n RM, NYSE, -1.82, 2013-05-03\n BNNY, NYSE, 3.57, 2013-06-11\n MM, NYSE, 25.00, 2013-05-09\n RXN, NYSE, 0.00, 2013-05-22\n GLOG, NYSE, -16.67, 2013-05-16\n RPAI, NYSE, 9.52, 2013-05-07\n OAK, NYSE, 39.86, 2013-05-08\n FET, NYSE, 3.03, 2013-04-26\n MRC, NYSE, 4.65, 2013-05-03\n PSX, NYSE, 17.74, 2013-05-02\n TUMI, NYSE, 6.67, 2013-05-09\n ACRE, NYSE, -5.88, 2013-05-16\n EVER, NYSE, 13.79, 2013-04-25\n PDH, NYSE, -13.24, 2013-04-25\n ROYT, NYSE, 10.00, 2013-05-11\n WMC, NYSE, -2.15, 2013-05-16\n WAGE, NYSE, 35.71, 2013-05-10\n HTA, NYSE, 6.67, 2013-05-08\n ALEX, NYSE, -28.57, 2013-05-10\n BKW, NYSE, 0.00, 2013-04-27\n CNCO, NYSE, -88.24, 2013-05-31\n EQM, NYSE, 41.30, 2013-04-26\n NOW, NYSE, 0.00, 2013-04-25\n EGL, NYSE, -11.24, 2013-05-14\n NGVC, NYSE, 7.69, 2013-05-10\n NTI, NYSE, 3.51, 2013-05-14\n AMRE, NYSE, 4.00, 2013-05-08\n GMED, NYSE, 5.00, 2013-05-03\n MANU, NYSE, -25.00, 2013-05-03\n HCLP, NYSE, -23.08, 2013-05-15\n ADT, NYSE, -4.65, 2013-05-02\n TRLA, NYSE, -75.00, 2013-05-01\n SRC, NYSE, 19.44, 2013-05-09\n NBHC, NYSE, -50.00, 2013-04-30\n BSMX, NYSE, 30.43, 2013-04-27\n HY, NYSE, 67.05, 2013-05-02\n SMLP, NYSE, -10.71, 2013-05-14\n DYN, NYSE, -254.55, 2013-05-03\n LXFR, NYSE, 0.00, 2013-05-08\n LOCK, NYSE, 25.00, 2013-05-02\n JMI, NYSE, 224.44, 2013-05-08\n BERY, NYSE, 16.67, 2013-05-03\n FLTX, NYSE, 8.33, 2013-05-09\n ANFI, NYSE, 0.00, 2013-06-11\n SSTK, NYSE, 23.08, 2013-05-09\n RLGY, NYSE, -13.33, 2013-05-02\n SDLP, NYSE, 88.64, 2013-05-29\n MPLX, NYSE, -7.14, 2013-05-01\n WWAV, NYSE, 6.67, 2013-05-10\n SXE, NYSE, -44.44, 2013-05-09\n DKL, NYSE, 31.58, 2013-05-08\n SCM, NYSE, -8.82, 2013-05-10\n RKUS, NYSE, -100.00, 2013-05-07\n ALDW, NYSE, -1.32, 2013-05-08\n WGP, NYSE, 0.00, 2013-05-02\n ABBV, NYSE, 3.03, 2013-04-27\n PBF, NYSE, -54.72, 2013-05-03\n SBY, NYSE, -433.33, 2013-05-14\n RIOM, NYSE, 0.00, 2013-05-15\n USAC, NYSE, -30.00, 2013-05-10\n CVRR, NYSE, -2.56, 2013-05-03\n SXCP, NYSE, -9.76, 2013-04-26\n BFAM, NYSE, 81.82, 2013-05-10\n TPH, NYSE, 200.00, 2013-05-15\n ZTS, NYSE, 5.88, 2013-05-01\n BCC, NYSE, 146.15, 2013-04-23\n AGI, NYSE, 0.00, 2013-04-26\n APAM, NYSE, -11.32, 2013-05-02\n SSNI, NYSE, -1211.77, 2013-05-02\n MODN, NYSE, 0.00, 2013-05-08\n AVIV, NYSE, 150.00, 2013-05-08\n OAKS, NYSE, 509.09, 2013-05-04\n MRIN, NYSE, -7.50, 2013-05-09\n PF, NYSE, 17.24, 2013-05-16\n TMHC, NYSE, -66.67, 2013-05-16\n ARPI, NYSE, -600.00, 2013-06-25\n CSTM, NYSE, -105.08, 2013-06-18\n DDC, NYSE, -80.00, 2013-06-06\n ABM, NYSE, 9.09, 2013-06-04\n ANN, NYSE, 4.76, 2013-06-07\n BBY, NYSE, 28.00, 2013-05-22\n BF.B, NYSE, -2.17, 2013-06-06\n BKE, NYSE, -4.88, 2013-05-24\n NCS, NYSE, -21.74, 2013-06-05\n BNS, NYSE, -0.83, 2013-05-29\n BRC, NYSE, -6.78, 2013-05-17\n CATO, NYSE, 1.94, 2013-05-24\n COO, NYSE, 9.49, 2013-06-07\n CPB, NYSE, 10.71, 2013-05-21\n CFI, NYSE, 10.81, 2013-06-13\n DCI, NYSE, -4.17, 2013-05-18\n DDS, NYSE, 15.38, 2013-05-15\n DE, NYSE, 0.73, 2013-05-16\n DY, NYSE, 0.00, 2013-05-22\n EV, NYSE, 0.00, 2013-05-23\n ESL, NYSE, -11.81, 2013-05-31\n M, NYSE, 3.77, 2013-05-16\n GCO, NYSE, 11.90, 2013-06-01\n GPS, NYSE, 2.90, 2013-05-24\n HD, NYSE, 7.79, 2013-05-22\n HEI, NYSE, 10.00, 2013-05-23\n HOV, NYSE, 120.00, 2013-06-06\n HRB, NYSE, -1.93, 2013-06-13\n HRL, NYSE, 0.00, 2013-05-24\n HPQ, NYSE, 7.41, 2013-05-23\n JCP, NYSE, -12.93, 2013-05-17\n KR, NYSE, 4.55, 2013-06-21\n KSS, NYSE, 15.79, 2013-05-17\n LB, NYSE, 4.35, 2013-05-23\n LOW, NYSE, -3.92, 2013-05-23\n LZB, NYSE, 7.14, 2013-06-19\n MDT, NYSE, 6.80, 2013-05-22\n MEI, NYSE, 60.00, 2013-06-21\n MPR, NYSE, -33.33, 2013-06-07\n NAV, NYSE, -302.75, 2013-06-11\n JWN, NYSE, -3.95, 2013-05-17\n OXM, NYSE, 5.13, 2013-06-12\n PBY, NYSE, -85.71, 2013-06-11\n PLL, NYSE, 1.37, 2013-05-31\n PNY, NYSE, 0.00, 2013-06-08\n PVH, NYSE, 39.42, 2013-06-13\n THO, NYSE, -7.87, 2013-06-07\n TIF, NYSE, 32.08, 2013-05-29\n TJX, NYSE, 0.00, 2013-05-22\n TOL, NYSE, 0.00, 2013-05-23\n TTC, NYSE, 10.92, 2013-05-24\n VAL, NYSE, 2.25, 2013-05-15\n JW.A, NYSE, -16.47, 2013-06-19\n TGT, NYSE, 23.53, 2013-05-23\n WMT, NYSE, -0.87, 2013-05-17\n WSM, NYSE, 11.11, 2013-05-24\n FL, NYSE, 3.41, 2013-05-25\n CHS, NYSE, -11.11, 2013-05-30\n BKS, NYSE, 52.22, 2013-06-26\n CAL, NYSE, 45.45, 2013-05-30\n SIG, NYSE, 0.89, 2013-05-24\n ZLC, NYSE, 1200.00, 2013-05-23\n AEO, NYSE, 5.88, 2013-05-23\n FGP, NYSE, 15.69, 2013-06-07\n BMO, NYSE, -4.73, 2013-05-30\n RY, NYSE, -2.34, 2013-05-31\n GEF, NYSE, 1.45, 2013-06-06\n SKS, NYSE, 0.00, 2013-05-22\n TD, NYSE, 1.09, 2013-05-24\n ANF, NYSE, -80.00, 2013-05-25\n CIEN, NYSE, 20.00, 2013-06-07\n KMG, NYSE, 8.70, 2013-06-11\n IRET, NYSE, 11.76, 2013-07-02\n CM, NYSE, 0.00, 2013-05-31\n UBA, NYSE, 12.00, 2013-06-08\n KFY, NYSE, 3.23, 2013-06-18\n KKD, NYSE, 25.00, 2013-05-31\n MVC, NYSE, -37.50, 2013-06-11\n CBK, NYSE, 150.00, 2013-06-08\n SJM, NYSE, 12.17, 2013-06-07\n BIG, NYSE, 0.00, 2013-05-31\n JOY, NYSE, 11.61, 2013-05-31\n SSI, NYSE, -122.22, 2013-05-18\n GME, NYSE, 15.00, 2013-05-24\n DKS, NYSE, 0.00, 2013-05-22\n A, NYSE, 14.93, 2013-05-15\n MTN, NYSE, -3.62, 2013-06-07\n GES, NYSE, 75.00, 2013-05-31\n CRM, NYSE, -600.00, 2013-05-24\n NWY, NYSE, 128.57, 2013-05-24\n PAY, NYSE, -7.69, 2013-06-06\n DSW, NYSE, 11.11, 2013-05-30\n NX, NYSE, -300.00, 2013-06-08\n DG, NYSE, -1.39, 2013-06-05\n EXPR, NYSE, 5.56, 2013-05-31\n P, NYSE, 0.00, 2013-05-23\n GWRE, NYSE, 44.44, 2013-05-29\n BLOX, NYSE, 100.00, 2013-05-24\n TLYS, NYSE, 14.29, 2013-05-30\n PANW, NYSE, -900.00, 2013-05-31\n WDAY, NYSE, 13.04, 2013-05-23\n RH, NYSE, 50.00, 2013-06-14\n RALY, NYSE, 14.78, 2013-06-07\n AIR, NYSE, 13.64, 2013-07-26\n ATU, NYSE, -1.59, 2013-06-20\n AZO, NYSE, 0.69, 2013-05-22\n AZZ, NYSE, -8.20, 2013-06-29\n CAG, NYSE, 1.69, 2013-06-28\n CLC, NYSE, -1.49, 2013-06-20\n CMC, NYSE, -15.79, 2013-06-28\n FC, NYSE, 18.18, 2013-07-10\n FDO, NYSE, 1.94, 2013-07-11\n FDX, NYSE, 8.67, 2013-06-20\n FUL, NYSE, -5.63, 2013-06-27\n GIS, NYSE, -1.85, 2013-06-27\n KBH, NYSE, 20.00, 2013-06-28\n LEN, NYSE, 30.30, 2013-06-26\n LNN, NYSE, 12.92, 2013-06-27\n MKC, NYSE, 0.00, 2013-06-28\n RT, NYSE, -36.84, 2013-07-25\n MCS, NYSE, -6.25, 2013-07-26\n MSM, NYSE, 9.37, 2013-07-11\n NKE, NYSE, 2.70, 2013-06-28\n ORCL, NYSE, 0.00, 2013-06-21\n PIR, NYSE, 0.00, 2013-06-21\n PKE, NYSE, -13.79, 2013-06-27\n RAD, NYSE, 0.00, 2013-06-21\n RPM, NYSE, 7.46, 2013-07-23\n SVU, NYSE, 250.00, 2013-07-19\n TISI, NYSE, 0.00, 2013-08-07\n TXI, NYSE, 116.00, 2013-07-11\n UNF, NYSE, 2.88, 2013-06-27\n WGO, NYSE, 0.00, 2013-06-28\n WOR, NYSE, -7.46, 2013-06-28\n JBL, NYSE, 4.35, 2013-06-20\n GBX, NYSE, -5.66, 2013-07-03\n DRI, NYSE, -1.94, 2013-06-22\n FDS, NYSE, -1.71, 2013-06-19\n KMX, NYSE, 12.28, 2013-06-22\n SCS, NYSE, 0.00, 2013-06-20\n SJR, NYSE, 16.28, 2013-06-29\n RHT, NYSE, 9.09, 2013-06-20\n OMN, NYSE, 14.29, 2013-06-28\n MON, NYSE, 3.75, 2013-06-27\n GPN, NYSE, -3.92, 2013-07-26\n AYI, NYSE, 7.78, 2013-07-03\n CCL, NYSE, 50.00, 2013-06-26\n CUK, NYSE, 50.00, 2013-06-26\n STZ, NYSE, -7.32, 2013-07-03\n ACN, NYSE, 0.00, 2013-06-28\n SNX, NYSE, 0.00, 2013-06-26\n TAL, NYSE, 66.67, 2013-07-23\n IHS, NYSE, 1.45, 2013-06-21\n EDU, NYSE, 20.00, 2013-07-24\n ZEP, NYSE, -31.71, 2013-07-03\n MG, NYSE, -5.88, 2013-08-08\n MOS, NYSE, -0.88, 2013-07-16\n ABT, NYSE, 4.55, 2013-07-18\n ABX, NYSE, 17.86, 2013-08-02\n AB, NYSE, 7.89, 2013-08-01\n TAP, NYSE, 8.63, 2013-08-07\n ACO, NYSE, 1.79, 2013-07-27\n ADM, NYSE, 9.52, 2013-08-07\n AEM, NYSE, -85.71, 2013-07-25\n AEP, NYSE, -5.19, 2013-07-26\n AES, NYSE, 23.08, 2013-08-09\n AET, NYSE, 9.35, 2013-07-31\n AFL, NYSE, 6.58, 2013-07-31\n AGCO, NYSE, 18.78, 2013-08-01\n AGN, NYSE, 1.01, 2013-07-26\n HES, NYSE, 7.09, 2013-08-01\n AIG, NYSE, 31.76, 2013-08-02\n AIN, NYSE, -23.08, 2013-08-01\n AJG, NYSE, 5.80, 2013-07-31\n ALU, NYSE, 33.33, 2013-07-31\n MATX, NYSE, 6.82, 2013-08-08\n ALK, NYSE, -0.68, 2013-07-26\n BEAM, NYSE, 6.67, 2013-08-09\n AME, NYSE, 0.00, 2013-08-08\n TWX, NYSE, 10.67, 2013-08-08\n AVD, NYSE, -17.14, 2013-08-06\n AMN, NYSE, 20.00, 2013-08-02\n AN, NYSE, -1.35, 2013-07-19\n AON, NYSE, 0.91, 2013-07-27\n APA, NYSE, -0.50, 2013-08-02\n APC, NYSE, 16.67, 2013-07-30\n APD, NYSE, 0.00, 2013-07-24\n APH, NYSE, 1.06, 2013-07-19\n ARG, NYSE, -0.87, 2013-07-26\n AAN, NYSE, 0.00, 2013-07-25\n ARW, NYSE, 8.74, 2013-07-25\n ASGN, NYSE, 14.29, 2013-07-25\n ASH, NYSE, -8.29, 2013-07-26\n ASR, NYSE, 21.90, 2013-07-23\n GAS, NYSE, 51.85, 2013-08-01\n ATO, NYSE, 13.51, 2013-08-07\n ATW, NYSE, 0.74, 2013-08-01\n AVP, NYSE, 11.54, 2013-08-02\n AVT, NYSE, 3.16, 2013-08-08\n AVY, NYSE, 2.90, 2013-07-24\n AXP, NYSE, 4.96, 2013-07-18\n B, NYSE, 0.00, 2013-07-27\n BA, NYSE, 5.70, 2013-07-25\n BAC, NYSE, 28.00, 2013-07-18\n BAX, NYSE, 2.65, 2013-07-19\n BC, NYSE, 13.89, 2013-07-26\n OMX, NYSE, -33.33, 2013-08-07\n BCE, NYSE, -2.67, 2013-08-09\n BCR, NYSE, 2.90, 2013-07-24\n BDX, NYSE, 7.48, 2013-08-02\n BEN, NYSE, 1.18, 2013-07-30\n BGG, NYSE, 15.79, 2013-08-16\n BHE, NYSE, 10.71, 2013-07-26\n BHI, NYSE, -6.15, 2013-07-20\n BID, NYSE, -9.56, 2013-08-07\n BIO, NYSE, 7.14, 2013-08-07\n BK, NYSE, 6.90, 2013-07-18\n BKH, NYSE, -2.38, 2013-08-06\n WRB, NYSE, -2.99, 2013-07-23\n BLC, NYSE, 9.09, 2013-07-31\n BLL, NYSE, 1.19, 2013-07-26\n BLX, NYSE, 5.56, 2013-07-19\n BMI, NYSE, -20.00, 2013-07-19\n BMS, NYSE, 1.67, 2013-07-26\n BMY, NYSE, 0.00, 2013-07-26\n BOH, NYSE, 2.41, 2013-07-23\n BXS, NYSE, 10.00, 2013-07-23\n BPL, NYSE, -8.86, 2013-08-03\nBRK.A, NYSE, 176.30, 2013-08-03\n BRO, NYSE, 2.86, 2013-07-16\n BSX, NYSE, 12.50, 2013-07-26\n BT, NYSE, 6.17, 2013-07-26\n MTRN, NYSE, 7.50, 2013-07-27\n CAI, NYSE, -8.54, 2013-07-31\n CAT, NYSE, -15.20, 2013-07-25\n CB, NYSE, 19.27, 2013-07-24\n CBI, NYSE, 0.00, 2013-07-31\n CBM, NYSE, -64.29, 2013-08-02\n CBU, NYSE, 4.00, 2013-07-24\n CBT, NYSE, -4.35, 2013-08-01\n CCC, NYSE, 14.29, 2013-08-07\n CCE, NYSE, 2.67, 2013-07-26\n C, NYSE, 5.93, 2013-07-16\n CCK, NYSE, 3.23, 2013-07-18\n CCU, NYSE, 25.00, 2013-08-08\n CDE, NYSE, -1100.00, 2013-08-09\n CDI, NYSE, 6.25, 2013-08-02\n CAH, NYSE, 2.60, 2013-08-02\n CFR, NYSE, 0.00, 2013-07-25\n CHD, NYSE, 1.67, 2013-08-03\n CKP, NYSE, -15.38, 2013-08-07\n CPK, NYSE, -7.02, 2013-08-10\n CI, NYSE, 11.95, 2013-08-02\n CKH, NYSE, 51.67, 2013-07-31\n CL, NYSE, 0.00, 2013-07-26\n CLF, NYSE, 85.25, 2013-07-26\n CLH, NYSE, -25.00, 2013-08-08\n CLX, NYSE, 2.99, 2013-08-02\n CMA, NYSE, 8.57, 2013-07-17\n CMO, NYSE, -15.63, 2013-07-25\n CRK, NYSE, -6.67, 2013-07-30\n CMS, NYSE, -14.71, 2013-07-26\n CNA, NYSE, 17.19, 2013-07-31\n CNW, NYSE, 13.56, 2013-08-01\n CNL, NYSE, -6.06, 2013-08-01\n COG, NYSE, 35.48, 2013-07-25\n COT, NYSE, -4.76, 2013-08-02\n CP, NYSE, -4.14, 2013-07-25\n CPF, NYSE, 25.93, 2013-07-26\n CQB, NYSE, 43.48, 2013-08-09\n CR, NYSE, 0.00, 2013-07-23\nCRD.B, NYSE, 42.86, 2013-08-06\n CRS, NYSE, 11.59, 2013-07-31\n CSC, NYSE, 42.19, 2013-08-07\n CSL, NYSE, -14.93, 2013-07-24\n CTB, NYSE, -38.20, 2013-08-09\n CTL, NYSE, 2.99, 2013-08-08\n CTS, NYSE, 33.33, 2013-07-23\n CUB, NYSE, 9.52, 2013-08-02\n CMI, NYSE, 11.11, 2013-07-31\n CUZ, NYSE, 9.09, 2013-07-30\n CVC, NYSE, 80.00, 2013-08-03\n CW, NYSE, 6.06, 2013-08-01\n CWT, NYSE, 0.00, 2013-08-01\n CX, NYSE, 0.00, 2013-07-26\n CYN, NYSE, 8.33, 2013-07-19\n D, NYSE, -4.62, 2013-08-07\n DBD, NYSE, 0.00, 2013-08-15\n DCO, NYSE, 30.77, 2013-08-06\n DD, NYSE, 0.79, 2013-07-24\n CVA, NYSE, 150.00, 2013-07-18\n DHR, NYSE, 2.35, 2013-07-19\n DIS, NYSE, 0.00, 2013-08-07\n DLX, NYSE, 10.34, 2013-07-26\n DNB, NYSE, 2.00, 2013-08-08\n RRD, NYSE, 4.65, 2013-07-30\n DOV, NYSE, 5.43, 2013-07-19\n DOW, NYSE, 1.59, 2013-07-26\n DRE, NYSE, 0.00, 2013-08-01\n DHI, NYSE, 23.53, 2013-07-26\n UFS, NYSE, -25.00, 2013-07-26\n DTE, NYSE, -21.52, 2013-07-27\n DUK, NYSE, -6.45, 2013-08-08\n DVN, NYSE, 28.72, 2013-08-08\n DV, NYSE, 31.71, 2013-08-09\n EAT, NYSE, 4.05, 2013-08-03\n ECL, NYSE, 2.38, 2013-07-31\n ED, NYSE, -5.26, 2013-08-02\n EDE, NYSE, 8.00, 2013-07-26\n EFX, NYSE, 2.22, 2013-07-25\n EGN, NYSE, 8.20, 2013-08-01\n EGP, NYSE, 2.56, 2013-07-19\n ELP, NYSE, 17.65, 2013-08-16\n ELY, NYSE, 20.00, 2013-07-26\n EMC, NYSE, 2.94, 2013-07-25\n EMR, NYSE, -2.02, 2013-08-07\n EOG, NYSE, 19.32, 2013-08-07\n EQT, NYSE, 3.64, 2013-07-26\n ESE, NYSE, -41.07, 2013-08-09\n ESV, NYSE, 3.33, 2013-07-30\n ETN, NYSE, -1.80, 2013-08-03\n ETR, NYSE, 3.06, 2013-07-31\n EXAR, NYSE, 14.29, 2013-07-25\n F, NYSE, 21.62, 2013-07-25\n CLGX, NYSE, 13.64, 2013-07-25\n FNB, NYSE, 0.00, 2013-07-24\n FCF, NYSE, -50.00, 2013-07-25\n FBP, NYSE, -11.11, 2013-07-25\n FICO, NYSE, 6.35, 2013-07-31\n FLO, NYSE, 4.35, 2013-08-14\n FMC, NYSE, 0.00, 2013-07-30\n FOE, NYSE, 27.27, 2013-08-01\n S, NYSE, 6.06, 2013-07-31\n NEE, NYSE, 13.18, 2013-07-31\n FRT, NYSE, 0.88, 2013-08-01\n FRX, NYSE, 300.00, 2013-07-24\n FSS, NYSE, 64.29, 2013-08-10\n FUN, NYSE, 2.41, 2013-08-09\n FUR, NYSE, -48.15, 2013-08-02\n GBL, NYSE, 17.20, 2013-08-07\n GVA, NYSE, -78.13, 2013-08-02\n BGC, NYSE, 23.21, 2013-08-01\n GD, NYSE, 11.73, 2013-07-25\n GE, NYSE, 0.00, 2013-07-20\n RHP, NYSE, -26.85, 2013-08-07\n AXLL, NYSE, 2.59, 2013-08-01\n GGG, NYSE, 9.52, 2013-07-25\n GHM, NYSE, 52.00, 2013-07-26\n GIB, NYSE, 10.71, 2013-08-01\n GLT, NYSE, 20.00, 2013-07-31\n GLW, NYSE, 3.23, 2013-07-31\n GSK, NYSE, -5.88, 2013-07-25\n GLF, NYSE, 25.71, 2013-07-23\n GPC, NYSE, 14.88, 2013-07-19\n GRA, NYSE, 2.75, 2013-07-26\n GTY, NYSE, 36.00, 2013-08-08\n GWW, NYSE, 2.71, 2013-07-18\n HAE, NYSE, 0.00, 2013-07-30\n HAL, NYSE, 1.39, 2013-07-23\n HAR, NYSE, 4.60, 2013-08-07\n HVT, NYSE, 31.25, 2013-08-01\n HRC, NYSE, 0.00, 2013-07-25\n HCC, NYSE, 21.69, 2013-07-31\n HCN, NYSE, 1.09, 2013-08-07\n HCP, NYSE, -2.70, 2013-07-31\n HOG, NYSE, 3.42, 2013-07-26\n HE, NYSE, 7.89, 2013-08-09\n HMA, NYSE, -46.15, 2013-08-10\n HMN, NYSE, 30.00, 2013-07-25\n HFC, NYSE, 0.00, 2013-08-08\n HOT, NYSE, 8.22, 2013-07-26\n HP, NYSE, 6.67, 2013-07-27\n HLS, NYSE, 18.60, 2013-07-26\n HRS, NYSE, 23.68, 2013-07-31\n HSC, NYSE, -11.76, 2013-08-09\n HSY, NYSE, 1.41, 2013-07-26\n HUBB, NYSE, 5.38, 2013-07-19\n HUM, NYSE, 6.91, 2013-08-01\n HXL, NYSE, 2.13, 2013-07-23\n IBM, NYSE, 3.44, 2013-07-18\n IDA, NYSE, 33.82, 2013-08-02\n IEX, NYSE, 2.70, 2013-07-23\n IFF, NYSE, -3.39, 2013-08-07\n DIN, NYSE, 12.09, 2013-07-31\n INT, NYSE, 11.76, 2013-08-01\n IP, NYSE, -5.45, 2013-07-26\n IPG, NYSE, -14.29, 2013-07-20\n IO, NYSE, -100.00, 2013-08-08\n IR, NYSE, 5.56, 2013-07-20\n IRF, NYSE, 81.82, 2013-08-20\n ITW, NYSE, -0.92, 2013-07-24\n JEC, NYSE, -1.19, 2013-07-30\n JNJ, NYSE, 5.71, 2013-07-17\n JNY, NYSE, 116.67, 2013-08-01\n K, NYSE, 3.09, 2013-08-02\n KAMN, NYSE, 13.56, 2013-07-30\n KDN, NYSE, 10.53, 2013-07-26\n KEX, NYSE, 0.94, 2013-07-25\n KEY, NYSE, 5.00, 2013-07-19\n KIM, NYSE, 6.06, 2013-07-30\n KMB, NYSE, 1.44, 2013-07-23\n KEM, NYSE, -95.00, 2013-07-26\n KMT, NYSE, 4.11, 2013-07-26\n KO, NYSE, 0.00, 2013-07-17\n KSU, NYSE, 1.05, 2013-07-20\n LDR, NYSE, -19.64, 2013-08-06\n LEG, NYSE, 0.00, 2013-07-26\n LLY, NYSE, 13.73, 2013-07-25\n LM, NYSE, -1.45, 2013-07-26\n LNC, NYSE, 10.43, 2013-08-01\n LPX, NYSE, 32.26, 2013-08-07\n LXU, NYSE, 29.17, 2013-08-09\n LTC, NYSE, -3.39, 2013-08-09\n L, NYSE, -5.48, 2013-07-30\n LUV, NYSE, -2.56, 2013-07-26\n LUX, NYSE, -1.67, 2013-07-26\n MKL, NYSE, 7.46, 2013-08-08\n MAN, NYSE, 17.98, 2013-07-20\n MTW, NYSE, 25.00, 2013-07-30\n SM, NYSE, 0.00, 2013-07-31\n MAS, NYSE, 21.05, 2013-07-30\n MTZ, NYSE, 2.33, 2013-08-02\n MCD, NYSE, -1.43, 2013-07-23\n MDC, NYSE, 38.18, 2013-07-31\n MDP, NYSE, 5.63, 2013-07-26\n MDR, NYSE, -1966.67, 2013-08-06\n MDU, NYSE, -3.85, 2013-08-01\n MED, NYSE, 2.00, 2013-08-07\n CVS, NYSE, 1.04, 2013-08-07\n MFC, NYSE, -3.12, 2013-08-09\n MGA, NYSE, 11.25, 2013-08-10\n MGM, NYSE, 300.00, 2013-08-07\n MMC, NYSE, 2.94, 2013-08-08\n MMM, NYSE, 0.59, 2013-07-26\n MSA, NYSE, 0.00, 2013-07-25\n MNR, NYSE, -27.78, 2013-08-07\n MO, NYSE, -1.59, 2013-07-24\n MOD, NYSE, 145.45, 2013-08-02\nMOG.A, NYSE, 8.43, 2013-07-27\n MHK, NYSE, 10.84, 2013-08-02\n MSI, NYSE, 11.96, 2013-07-25\n MCY, NYSE, 3.28, 2013-07-30\n MRK, NYSE, 2.44, 2013-07-31\n MRO, NYSE, -5.63, 2013-08-07\n POWR, NYSE, 20.00, 2013-08-08\n MTG, NYSE, 118.75, 2013-07-24\n MTB, NYSE, 26.19, 2013-07-18\n MTX, NYSE, 8.62, 2013-07-26\n MUR, NYSE, 12.90, 2013-08-01\n MYE, NYSE, 19.05, 2013-07-19\n NBL, NYSE, -5.48, 2013-07-26\n NBR, NYSE, -11.11, 2013-07-24\n NE, NYSE, 12.50, 2013-07-18\n NEM, NYSE, -124.39, 2013-07-27\n NFG, NYSE, 6.15, 2013-08-09\n NHI, NYSE, -1.14, 2013-08-07\n NI, NYSE, -4.17, 2013-08-01\n NJR, NYSE, 15.00, 2013-08-08\n THC, NYSE, -4.35, 2013-08-07\n NNN, NYSE, 0.00, 2013-08-02\n NOC, NYSE, 20.59, 2013-07-25\n NR, NYSE, -5.26, 2013-07-26\n NSC, NYSE, -2.67, 2013-07-24\n NUE, NYSE, -10.00, 2013-07-19\n NVR, NYSE, -18.34, 2013-07-23\n NWL, NYSE, 2.04, 2013-07-27\n NWN, NYSE, -11.11, 2013-08-08\n NYT, NYSE, 16.67, 2013-08-02\n OCR, NYSE, 4.65, 2013-07-25\n OGE, NYSE, -2.13, 2013-08-09\n OHI, NYSE, 1.64, 2013-08-01\n OI, NYSE, 2.53, 2013-07-25\n OII, NYSE, 8.33, 2013-07-25\n OKE, NYSE, -225.93, 2013-07-31\n OLN, NYSE, 3.85, 2013-07-26\n BRS, NYSE, 1.01, 2013-08-06\n OMC, NYSE, 0.00, 2013-07-19\n OMI, NYSE, 0.00, 2013-07-30\n ORB, NYSE, 17.39, 2013-07-19\n ORI, NYSE, 1750.00, 2013-07-26\n OSK, NYSE, 53.21, 2013-07-31\n OXY, NYSE, -1.86, 2013-07-31\n FCFS, NYSE, 1.79, 2013-07-18\n PBI, NYSE, 15.56, 2013-07-31\n PCG, NYSE, 9.72, 2013-08-01\n PCL, NYSE, 21.74, 2013-07-30\n PCP, NYSE, -0.69, 2013-07-26\n TPC, NYSE, -11.11, 2013-08-10\n PEG, NYSE, 4.35, 2013-07-31\n PEI, NYSE, 7.69, 2013-07-24\n PEP, NYSE, 10.08, 2013-07-25\n PFE, NYSE, 3.70, 2013-07-31\n PG, NYSE, 2.60, 2013-08-02\n PGR, NYSE, -2.44, 2013-07-12\n PH, NYSE, -8.72, 2013-08-07\n PHM, NYSE, -10.34, 2013-07-26\n PKD, NYSE, 0.00, 2013-08-07\n PKY, NYSE, 0.00, 2013-08-06\n PNC, NYSE, 21.34, 2013-07-18\n PNM, NYSE, 15.15, 2013-08-03\n PNR, NYSE, 2.22, 2013-07-24\n PNW, NYSE, 3.51, 2013-08-03\n POM, NYSE, -8.33, 2013-08-08\n POT, NYSE, -10.98, 2013-07-26\n PPG, NYSE, 4.70, 2013-07-19\n PPL, NYSE, 0.00, 2013-08-02' if __name__ == "__main__": main()
true
true
f7001f50544c64f723e9cc46eb3ac8d4d5544cb0
10,958
py
Python
neutron/tests/unit/services/metering/agents/test_metering_agent.py
acdc-cloud/neutron
2510836886555179f9e9e39b1fdbf94296befc51
[ "Apache-2.0" ]
null
null
null
neutron/tests/unit/services/metering/agents/test_metering_agent.py
acdc-cloud/neutron
2510836886555179f9e9e39b1fdbf94296befc51
[ "Apache-2.0" ]
null
null
null
neutron/tests/unit/services/metering/agents/test_metering_agent.py
acdc-cloud/neutron
2510836886555179f9e9e39b1fdbf94296befc51
[ "Apache-2.0" ]
null
null
null
# Copyright (C) 2013 eNovance SAS <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from oslo_utils import fixture as utils_fixture from oslo_utils import timeutils from oslo_utils import uuidutils from neutron.conf.services import metering_agent as metering_agent_config from neutron.services.metering.agents import metering_agent from neutron.tests import base from neutron.tests import fake_notifier _uuid = uuidutils.generate_uuid TENANT_ID = _uuid() LABEL_ID = _uuid() ROUTERS = [{'status': 'ACTIVE', 'name': 'router1', 'gw_port_id': None, 'admin_state_up': True, 'tenant_id': TENANT_ID, '_metering_labels': [{'rules': [], 'id': LABEL_ID}], 'id': _uuid()}] ROUTERS_WITH_RULE = [{'status': 'ACTIVE', 'name': 'router1', 'gw_port_id': None, 'admin_state_up': True, 'tenant_id': TENANT_ID, '_metering_labels': [{'rule': {}, 'id': LABEL_ID}], 'id': _uuid()}] class TestMeteringOperations(base.BaseTestCase): def setUp(self): super(TestMeteringOperations, self).setUp() metering_agent_config.register_metering_agent_opts() self.noop_driver = ('neutron.services.metering.drivers.noop.' 'noop_driver.NoopMeteringDriver') cfg.CONF.set_override('driver', 'noop') cfg.CONF.set_override('measure_interval', 0) cfg.CONF.set_override('report_interval', 0) self.setup_notification_driver() metering_rpc = ('neutron.services.metering.agents.metering_agent.' 'MeteringPluginRpc._get_sync_data_metering') self.metering_rpc_patch = mock.patch(metering_rpc, return_value=[]) self.metering_rpc_patch.start() self.driver_patch = mock.patch(self.noop_driver, spec=True) self.driver_patch.start() loopingcall_patch = mock.patch( 'oslo_service.loopingcall.FixedIntervalLoopingCall') loopingcall_patch.start() self.agent = metering_agent.MeteringAgent('my agent', cfg.CONF) self.driver = self.agent.metering_driver def test_add_metering_label(self): self.agent.add_metering_label(None, ROUTERS) self.assertEqual(1, self.driver.add_metering_label.call_count) def test_remove_metering_label(self): self.agent.remove_metering_label(None, ROUTERS) self.assertEqual(1, self.driver.remove_metering_label.call_count) def test_update_metering_label_rule(self): self.agent.update_metering_label_rules(None, ROUTERS) self.assertEqual(1, self.driver.update_metering_label_rules.call_count) def test_add_metering_label_rule(self): self.agent.add_metering_label_rule(None, ROUTERS_WITH_RULE) self.assertEqual(1, self.driver.add_metering_label_rule.call_count) def test_remove_metering_label_rule(self): self.agent.remove_metering_label_rule(None, ROUTERS_WITH_RULE) self.assertEqual(1, self.driver.remove_metering_label_rule.call_count) def test_routers_updated(self): self.agent.routers_updated(None, ROUTERS) self.assertEqual(1, self.driver.update_routers.call_count) def test_get_traffic_counters(self): self.agent._get_traffic_counters(None, ROUTERS) self.assertEqual(1, self.driver.get_traffic_counters.call_count) def test_sync_router_namespaces(self): self.agent._sync_router_namespaces(None, ROUTERS) self.assertEqual(1, self.driver.sync_router_namespaces.call_count) def test_notification_report(self): self.agent.routers_updated(None, ROUTERS) self.driver.get_traffic_counters.return_value = {LABEL_ID: {'pkts': 88, 'bytes': 444}} self.agent._metering_loop() self.assertNotEqual(len(fake_notifier.NOTIFICATIONS), 0) for n in fake_notifier.NOTIFICATIONS: if n['event_type'] == 'l3.meter': break self.assertEqual('l3.meter', n['event_type']) payload = n['payload'] self.assertEqual(TENANT_ID, payload['tenant_id']) self.assertEqual(LABEL_ID, payload['label_id']) self.assertEqual(88, payload['pkts']) self.assertEqual(444, payload['bytes']) def test_notification_report_interval(self): measure_interval = 30 report_interval = 600 now = timeutils.utcnow() time_fixture = self.useFixture(utils_fixture.TimeFixture(now)) self.agent.routers_updated(None, ROUTERS) self.driver.get_traffic_counters.return_value = {LABEL_ID: {'pkts': 889, 'bytes': 4440}} cfg.CONF.set_override('measure_interval', measure_interval) cfg.CONF.set_override('report_interval', report_interval) for i in range(report_interval): self.agent._metering_loop() count = 0 if len(fake_notifier.NOTIFICATIONS) > 1: for n in fake_notifier.NOTIFICATIONS: if n['event_type'] == 'l3.meter': # skip the first notification because the time is 0 count += 1 if count > 1: break time_fixture.advance_time_seconds(measure_interval) self.assertEqual('l3.meter', n['event_type']) payload = n['payload'] self.assertEqual(TENANT_ID, payload['tenant_id']) self.assertEqual(LABEL_ID, payload['label_id']) self.assertLess((payload['time'] - report_interval), measure_interval, payload) interval = (payload['last_update'] - payload['first_update']) \ - report_interval self.assertLess(interval, measure_interval, payload) def test_router_deleted(self): label_id = _uuid() self.driver.get_traffic_counters = mock.MagicMock() self.driver.get_traffic_counters.return_value = {label_id: {'pkts': 44, 'bytes': 222}} self.agent._add_metering_info = mock.MagicMock() self.agent.routers_updated(None, ROUTERS) self.agent.router_deleted(None, ROUTERS[0]['id']) self.assertEqual(1, self.agent._add_metering_info.call_count) self.assertEqual(1, self.driver.remove_router.call_count) self.agent._add_metering_info.assert_called_with(label_id, 44, 222) @mock.patch('time.time') def _test_purge_metering_info(self, current_timestamp, is_empty, mock_time): mock_time.return_value = current_timestamp self.agent.metering_infos = {'fake': {'last_update': 1}} self.config(report_interval=1) self.agent._purge_metering_info() self.assertEqual(0 if is_empty else 1, len(self.agent.metering_infos)) self.assertEqual(1, mock_time.call_count) def test_purge_metering_info(self): # 1 < 2 - 1 -> False self._test_purge_metering_info(2, False) def test_purge_metering_info_delete(self): # 1 < 3 - 1 -> False self._test_purge_metering_info(3, True) @mock.patch('time.time') def _test_add_metering_info(self, expected_info, current_timestamp, mock_time): mock_time.return_value = current_timestamp actual_info = self.agent._add_metering_info('fake_label_id', 1, 1) self.assertEqual(1, len(self.agent.metering_infos)) self.assertEqual(expected_info, actual_info) self.assertEqual(expected_info, self.agent.metering_infos['fake_label_id']) self.assertEqual(1, mock_time.call_count) def test_add_metering_info_create(self): expected_info = {'bytes': 1, 'pkts': 1, 'time': 0, 'first_update': 1, 'last_update': 1} self._test_add_metering_info(expected_info, 1) def test_add_metering_info_update(self): expected_info = {'bytes': 1, 'pkts': 1, 'time': 0, 'first_update': 1, 'last_update': 1} self.agent.metering_infos = {'fake_label_id': expected_info} expected_info.update({'bytes': 2, 'pkts': 2, 'time': 1, 'last_update': 2}) self._test_add_metering_info(expected_info, 2) def test_metering_agent_host_value(self): expected_host = 'my agent' self.assertEqual(expected_host, self.agent.host) class TestMeteringDriver(base.BaseTestCase): def setUp(self): super(TestMeteringDriver, self).setUp() metering_agent_config.register_metering_agent_opts() cfg.CONF.set_override('driver', 'noop') self.agent = metering_agent.MeteringAgent('my agent', cfg.CONF) self.driver = mock.Mock() self.agent.metering_driver = self.driver def test_add_metering_label_with_bad_driver_impl(self): del self.driver.add_metering_label with mock.patch.object(metering_agent, 'LOG') as log: self.agent.add_metering_label(None, ROUTERS) log.exception.assert_called_with(mock.ANY, {'driver': 'noop', 'func': 'add_metering_label'}) def test_add_metering_label_runtime_error(self): self.driver.add_metering_label.side_effect = RuntimeError with mock.patch.object(metering_agent, 'LOG') as log: self.agent.add_metering_label(None, ROUTERS) log.exception.assert_called_with(mock.ANY, {'driver': 'noop', 'func': 'add_metering_label'}) def test_init_chain(self): with mock.patch('oslo_service.' 'periodic_task.PeriodicTasks.__init__') as init: metering_agent.MeteringAgent('my agent', cfg.CONF) init.assert_called_once_with(cfg.CONF)
40.435424
79
0.62484
import mock from oslo_config import cfg from oslo_utils import fixture as utils_fixture from oslo_utils import timeutils from oslo_utils import uuidutils from neutron.conf.services import metering_agent as metering_agent_config from neutron.services.metering.agents import metering_agent from neutron.tests import base from neutron.tests import fake_notifier _uuid = uuidutils.generate_uuid TENANT_ID = _uuid() LABEL_ID = _uuid() ROUTERS = [{'status': 'ACTIVE', 'name': 'router1', 'gw_port_id': None, 'admin_state_up': True, 'tenant_id': TENANT_ID, '_metering_labels': [{'rules': [], 'id': LABEL_ID}], 'id': _uuid()}] ROUTERS_WITH_RULE = [{'status': 'ACTIVE', 'name': 'router1', 'gw_port_id': None, 'admin_state_up': True, 'tenant_id': TENANT_ID, '_metering_labels': [{'rule': {}, 'id': LABEL_ID}], 'id': _uuid()}] class TestMeteringOperations(base.BaseTestCase): def setUp(self): super(TestMeteringOperations, self).setUp() metering_agent_config.register_metering_agent_opts() self.noop_driver = ('neutron.services.metering.drivers.noop.' 'noop_driver.NoopMeteringDriver') cfg.CONF.set_override('driver', 'noop') cfg.CONF.set_override('measure_interval', 0) cfg.CONF.set_override('report_interval', 0) self.setup_notification_driver() metering_rpc = ('neutron.services.metering.agents.metering_agent.' 'MeteringPluginRpc._get_sync_data_metering') self.metering_rpc_patch = mock.patch(metering_rpc, return_value=[]) self.metering_rpc_patch.start() self.driver_patch = mock.patch(self.noop_driver, spec=True) self.driver_patch.start() loopingcall_patch = mock.patch( 'oslo_service.loopingcall.FixedIntervalLoopingCall') loopingcall_patch.start() self.agent = metering_agent.MeteringAgent('my agent', cfg.CONF) self.driver = self.agent.metering_driver def test_add_metering_label(self): self.agent.add_metering_label(None, ROUTERS) self.assertEqual(1, self.driver.add_metering_label.call_count) def test_remove_metering_label(self): self.agent.remove_metering_label(None, ROUTERS) self.assertEqual(1, self.driver.remove_metering_label.call_count) def test_update_metering_label_rule(self): self.agent.update_metering_label_rules(None, ROUTERS) self.assertEqual(1, self.driver.update_metering_label_rules.call_count) def test_add_metering_label_rule(self): self.agent.add_metering_label_rule(None, ROUTERS_WITH_RULE) self.assertEqual(1, self.driver.add_metering_label_rule.call_count) def test_remove_metering_label_rule(self): self.agent.remove_metering_label_rule(None, ROUTERS_WITH_RULE) self.assertEqual(1, self.driver.remove_metering_label_rule.call_count) def test_routers_updated(self): self.agent.routers_updated(None, ROUTERS) self.assertEqual(1, self.driver.update_routers.call_count) def test_get_traffic_counters(self): self.agent._get_traffic_counters(None, ROUTERS) self.assertEqual(1, self.driver.get_traffic_counters.call_count) def test_sync_router_namespaces(self): self.agent._sync_router_namespaces(None, ROUTERS) self.assertEqual(1, self.driver.sync_router_namespaces.call_count) def test_notification_report(self): self.agent.routers_updated(None, ROUTERS) self.driver.get_traffic_counters.return_value = {LABEL_ID: {'pkts': 88, 'bytes': 444}} self.agent._metering_loop() self.assertNotEqual(len(fake_notifier.NOTIFICATIONS), 0) for n in fake_notifier.NOTIFICATIONS: if n['event_type'] == 'l3.meter': break self.assertEqual('l3.meter', n['event_type']) payload = n['payload'] self.assertEqual(TENANT_ID, payload['tenant_id']) self.assertEqual(LABEL_ID, payload['label_id']) self.assertEqual(88, payload['pkts']) self.assertEqual(444, payload['bytes']) def test_notification_report_interval(self): measure_interval = 30 report_interval = 600 now = timeutils.utcnow() time_fixture = self.useFixture(utils_fixture.TimeFixture(now)) self.agent.routers_updated(None, ROUTERS) self.driver.get_traffic_counters.return_value = {LABEL_ID: {'pkts': 889, 'bytes': 4440}} cfg.CONF.set_override('measure_interval', measure_interval) cfg.CONF.set_override('report_interval', report_interval) for i in range(report_interval): self.agent._metering_loop() count = 0 if len(fake_notifier.NOTIFICATIONS) > 1: for n in fake_notifier.NOTIFICATIONS: if n['event_type'] == 'l3.meter': count += 1 if count > 1: break time_fixture.advance_time_seconds(measure_interval) self.assertEqual('l3.meter', n['event_type']) payload = n['payload'] self.assertEqual(TENANT_ID, payload['tenant_id']) self.assertEqual(LABEL_ID, payload['label_id']) self.assertLess((payload['time'] - report_interval), measure_interval, payload) interval = (payload['last_update'] - payload['first_update']) \ - report_interval self.assertLess(interval, measure_interval, payload) def test_router_deleted(self): label_id = _uuid() self.driver.get_traffic_counters = mock.MagicMock() self.driver.get_traffic_counters.return_value = {label_id: {'pkts': 44, 'bytes': 222}} self.agent._add_metering_info = mock.MagicMock() self.agent.routers_updated(None, ROUTERS) self.agent.router_deleted(None, ROUTERS[0]['id']) self.assertEqual(1, self.agent._add_metering_info.call_count) self.assertEqual(1, self.driver.remove_router.call_count) self.agent._add_metering_info.assert_called_with(label_id, 44, 222) @mock.patch('time.time') def _test_purge_metering_info(self, current_timestamp, is_empty, mock_time): mock_time.return_value = current_timestamp self.agent.metering_infos = {'fake': {'last_update': 1}} self.config(report_interval=1) self.agent._purge_metering_info() self.assertEqual(0 if is_empty else 1, len(self.agent.metering_infos)) self.assertEqual(1, mock_time.call_count) def test_purge_metering_info(self): self._test_purge_metering_info(2, False) def test_purge_metering_info_delete(self): self._test_purge_metering_info(3, True) @mock.patch('time.time') def _test_add_metering_info(self, expected_info, current_timestamp, mock_time): mock_time.return_value = current_timestamp actual_info = self.agent._add_metering_info('fake_label_id', 1, 1) self.assertEqual(1, len(self.agent.metering_infos)) self.assertEqual(expected_info, actual_info) self.assertEqual(expected_info, self.agent.metering_infos['fake_label_id']) self.assertEqual(1, mock_time.call_count) def test_add_metering_info_create(self): expected_info = {'bytes': 1, 'pkts': 1, 'time': 0, 'first_update': 1, 'last_update': 1} self._test_add_metering_info(expected_info, 1) def test_add_metering_info_update(self): expected_info = {'bytes': 1, 'pkts': 1, 'time': 0, 'first_update': 1, 'last_update': 1} self.agent.metering_infos = {'fake_label_id': expected_info} expected_info.update({'bytes': 2, 'pkts': 2, 'time': 1, 'last_update': 2}) self._test_add_metering_info(expected_info, 2) def test_metering_agent_host_value(self): expected_host = 'my agent' self.assertEqual(expected_host, self.agent.host) class TestMeteringDriver(base.BaseTestCase): def setUp(self): super(TestMeteringDriver, self).setUp() metering_agent_config.register_metering_agent_opts() cfg.CONF.set_override('driver', 'noop') self.agent = metering_agent.MeteringAgent('my agent', cfg.CONF) self.driver = mock.Mock() self.agent.metering_driver = self.driver def test_add_metering_label_with_bad_driver_impl(self): del self.driver.add_metering_label with mock.patch.object(metering_agent, 'LOG') as log: self.agent.add_metering_label(None, ROUTERS) log.exception.assert_called_with(mock.ANY, {'driver': 'noop', 'func': 'add_metering_label'}) def test_add_metering_label_runtime_error(self): self.driver.add_metering_label.side_effect = RuntimeError with mock.patch.object(metering_agent, 'LOG') as log: self.agent.add_metering_label(None, ROUTERS) log.exception.assert_called_with(mock.ANY, {'driver': 'noop', 'func': 'add_metering_label'}) def test_init_chain(self): with mock.patch('oslo_service.' 'periodic_task.PeriodicTasks.__init__') as init: metering_agent.MeteringAgent('my agent', cfg.CONF) init.assert_called_once_with(cfg.CONF)
true
true
f7001f655f7daed3946ab54e83b3f126b421761b
825
py
Python
commands/utils.py
j4p/JeBB
2bc1915170ad99e95f383733909aa47d00496ecd
[ "MIT" ]
7
2019-02-25T05:56:09.000Z
2020-07-29T03:18:30.000Z
commands/utils.py
winbotscript/JeBB
63cab9259fe22a10d91bf20e277a83b49ae8ab4f
[ "MIT" ]
1
2020-12-17T09:46:04.000Z
2020-12-17T09:46:04.000Z
commands/utils.py
winbotscript/JeBB
63cab9259fe22a10d91bf20e277a83b49ae8ab4f
[ "MIT" ]
7
2018-04-06T14:56:47.000Z
2020-06-15T15:15:10.000Z
from PIL import ImageChops, Image as PILImage from http.client import HTTPConnection from time import sleep from traceback import format_stack, print_exc def Tint(image, color): return ImageChops.blend(image, PILImage.new('RGB', image.size, color), 0.36) def GetStatusCode(host, path="/"): """ This function retreives the status code of a website by requesting HEAD data from the host. This means that it only requests the headers. If the host cannot be reached or something else goes wrong, it returns None instead. """ try: conn = HTTPConnection(host) conn.request("HEAD", path) return conn.getresponse().status except Exception: return None def WaitOK(host, path="/"): while GetStatusCode(host, path) != 200: sleep(5)
31.730769
80
0.675152
from PIL import ImageChops, Image as PILImage from http.client import HTTPConnection from time import sleep from traceback import format_stack, print_exc def Tint(image, color): return ImageChops.blend(image, PILImage.new('RGB', image.size, color), 0.36) def GetStatusCode(host, path="/"): try: conn = HTTPConnection(host) conn.request("HEAD", path) return conn.getresponse().status except Exception: return None def WaitOK(host, path="/"): while GetStatusCode(host, path) != 200: sleep(5)
true
true
f7002044d0369ad65533164d260b2c8f91cb7841
22,513
py
Python
zipline/data/history_loader.py
SJCosgrove/quantoipian
70f8d14778a16f771d8c2ee196a5dba0788e920a
[ "Apache-2.0" ]
412
2017-04-30T14:35:47.000Z
2022-03-29T02:58:33.000Z
zipline/data/history_loader.py
waijay1992/zipline
8beba055aa4211dc2debc5c3083077cbd19d0bbc
[ "Apache-2.0" ]
116
2017-05-15T04:45:45.000Z
2020-05-30T19:09:00.000Z
zipline/data/history_loader.py
waijay1992/zipline
8beba055aa4211dc2debc5c3083077cbd19d0bbc
[ "Apache-2.0" ]
80
2017-05-03T13:17:33.000Z
2021-02-08T15:42:09.000Z
# Copyright 2016 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from abc import ( ABCMeta, abstractmethod, abstractproperty, ) from numpy import concatenate from lru import LRU from pandas import isnull from pandas.tslib import normalize_date from toolz import sliding_window from six import with_metaclass from zipline.assets import Equity, Future from zipline.assets.continuous_futures import ContinuousFuture from zipline.lib._int64window import AdjustedArrayWindow as Int64Window from zipline.lib._float64window import AdjustedArrayWindow as Float64Window from zipline.lib.adjustment import Float64Multiply, Float64Add from zipline.utils.cache import ExpiringCache from zipline.utils.math_utils import number_of_decimal_places from zipline.utils.memoize import lazyval from zipline.utils.numpy_utils import float64_dtype from zipline.utils.pandas_utils import find_in_sorted_index # Default number of decimal places used for rounding asset prices. DEFAULT_ASSET_PRICE_DECIMALS = 3 class HistoryCompatibleUSEquityAdjustmentReader(object): def __init__(self, adjustment_reader): self._adjustments_reader = adjustment_reader def load_adjustments(self, columns, dts, assets): """ Returns ------- adjustments : list[dict[int -> Adjustment]] A list, where each element corresponds to the `columns`, of mappings from index to adjustment objects to apply at that index. """ out = [None] * len(columns) for i, column in enumerate(columns): adjs = {} for asset in assets: adjs.update(self._get_adjustments_in_range( asset, dts, column)) out[i] = adjs return out def _get_adjustments_in_range(self, asset, dts, field): """ Get the Float64Multiply objects to pass to an AdjustedArrayWindow. For the use of AdjustedArrayWindow in the loader, which looks back from current simulation time back to a window of data the dictionary is structured with: - the key into the dictionary for adjustments is the location of the day from which the window is being viewed. - the start of all multiply objects is always 0 (in each window all adjustments are overlapping) - the end of the multiply object is the location before the calendar location of the adjustment action, making all days before the event adjusted. Parameters ---------- asset : Asset The assets for which to get adjustments. dts : iterable of datetime64-like The dts for which adjustment data is needed. field : str OHLCV field for which to get the adjustments. Returns ------- out : dict[loc -> Float64Multiply] The adjustments as a dict of loc -> Float64Multiply """ sid = int(asset) start = normalize_date(dts[0]) end = normalize_date(dts[-1]) adjs = {} if field != 'volume': mergers = self._adjustments_reader.get_adjustments_for_sid( 'mergers', sid) for m in mergers: dt = m[0] if start < dt <= end: end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, m[1]) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] divs = self._adjustments_reader.get_adjustments_for_sid( 'dividends', sid) for d in divs: dt = d[0] if start < dt <= end: end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, d[1]) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] splits = self._adjustments_reader.get_adjustments_for_sid( 'splits', sid) for s in splits: dt = s[0] if start < dt <= end: if field == 'volume': ratio = 1.0 / s[1] else: ratio = s[1] end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, ratio) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] return adjs class ContinuousFutureAdjustmentReader(object): """ Calculates adjustments for continuous futures, based on the close and open of the contracts on the either side of each roll. """ def __init__(self, trading_calendar, asset_finder, bar_reader, roll_finders, frequency): self._trading_calendar = trading_calendar self._asset_finder = asset_finder self._bar_reader = bar_reader self._roll_finders = roll_finders self._frequency = frequency def load_adjustments(self, columns, dts, assets): """ Returns ------- adjustments : list[dict[int -> Adjustment]] A list, where each element corresponds to the `columns`, of mappings from index to adjustment objects to apply at that index. """ out = [None] * len(columns) for i, column in enumerate(columns): adjs = {} for asset in assets: adjs.update(self._get_adjustments_in_range( asset, dts, column)) out[i] = adjs return out def _make_adjustment(self, adjustment_type, front_close, back_close, end_loc): adj_base = back_close - front_close if adjustment_type == 'mul': adj_value = 1.0 + adj_base / front_close adj_class = Float64Multiply elif adjustment_type == 'add': adj_value = adj_base adj_class = Float64Add return adj_class(0, end_loc, 0, 0, adj_value) def _get_adjustments_in_range(self, cf, dts, field): if field == 'volume' or field == 'sid': return {} if cf.adjustment is None: return {} rf = self._roll_finders[cf.roll_style] partitions = [] rolls = rf.get_rolls(cf.root_symbol, dts[0], dts[-1], cf.offset) tc = self._trading_calendar adjs = {} for front, back in sliding_window(2, rolls): front_sid, roll_dt = front back_sid = back[0] dt = tc.previous_session_label(roll_dt) if self._frequency == 'minute': dt = tc.open_and_close_for_session(dt)[1] roll_dt = tc.open_and_close_for_session(roll_dt)[0] partitions.append((front_sid, back_sid, dt, roll_dt)) for partition in partitions: front_sid, back_sid, dt, roll_dt = partition last_front_dt = self._bar_reader.get_last_traded_dt( self._asset_finder.retrieve_asset(front_sid), dt) last_back_dt = self._bar_reader.get_last_traded_dt( self._asset_finder.retrieve_asset(back_sid), dt) if isnull(last_front_dt) or isnull(last_back_dt): continue front_close = self._bar_reader.get_value( front_sid, last_front_dt, 'close') back_close = self._bar_reader.get_value( back_sid, last_back_dt, 'close') adj_loc = dts.searchsorted(roll_dt) end_loc = adj_loc - 1 adj = self._make_adjustment(cf.adjustment, front_close, back_close, end_loc) try: adjs[adj_loc].append(adj) except KeyError: adjs[adj_loc] = [adj] return adjs class SlidingWindow(object): """ Wrapper around an AdjustedArrayWindow which supports monotonically increasing (by datetime) requests for a sized window of data. Parameters ---------- window : AdjustedArrayWindow Window of pricing data with prefetched values beyond the current simulation dt. cal_start : int Index in the overall calendar at which the window starts. """ def __init__(self, window, size, cal_start, offset): self.window = window self.cal_start = cal_start self.current = next(window) self.offset = offset self.most_recent_ix = self.cal_start + size def get(self, end_ix): """ Returns ------- out : A np.ndarray of the equity pricing up to end_ix after adjustments and rounding have been applied. """ if self.most_recent_ix == end_ix: return self.current target = end_ix - self.cal_start - self.offset + 1 self.current = self.window.seek(target) self.most_recent_ix = end_ix return self.current class HistoryLoader(with_metaclass(ABCMeta)): """ Loader for sliding history windows, with support for adjustments. Parameters ---------- trading_calendar: TradingCalendar Contains the grouping logic needed to assign minutes to periods. reader : DailyBarReader, MinuteBarReader Reader for pricing bars. adjustment_reader : SQLiteAdjustmentReader Reader for adjustment data. """ FIELDS = ('open', 'high', 'low', 'close', 'volume', 'sid') def __init__(self, trading_calendar, reader, equity_adjustment_reader, asset_finder, roll_finders=None, sid_cache_size=1000, prefetch_length=0): self.trading_calendar = trading_calendar self._asset_finder = asset_finder self._reader = reader self._adjustment_readers = {} if equity_adjustment_reader is not None: self._adjustment_readers[Equity] = \ HistoryCompatibleUSEquityAdjustmentReader( equity_adjustment_reader) if roll_finders: self._adjustment_readers[ContinuousFuture] =\ ContinuousFutureAdjustmentReader(trading_calendar, asset_finder, reader, roll_finders, self._frequency) self._window_blocks = { field: ExpiringCache(LRU(sid_cache_size)) for field in self.FIELDS } self._prefetch_length = prefetch_length @abstractproperty def _frequency(self): pass @abstractproperty def _calendar(self): pass @abstractmethod def _array(self, start, end, assets, field): pass def _decimal_places_for_asset(self, asset, reference_date): if isinstance(asset, Future) and asset.tick_size: return number_of_decimal_places(asset.tick_size) elif isinstance(asset, ContinuousFuture): # Tick size should be the same for all contracts of a continuous # future, so arbitrarily get the contract with next upcoming auto # close date. oc = self._asset_finder.get_ordered_contracts(asset.root_symbol) contract_sid = oc.contract_before_auto_close(reference_date.value) if contract_sid is not None: contract = self._asset_finder.retrieve_asset(contract_sid) if contract.tick_size: return number_of_decimal_places(contract.tick_size) return DEFAULT_ASSET_PRICE_DECIMALS def _ensure_sliding_windows(self, assets, dts, field, is_perspective_after): """ Ensure that there is a Float64Multiply window for each asset that can provide data for the given parameters. If the corresponding window for the (assets, len(dts), field) does not exist, then create a new one. If a corresponding window does exist for (assets, len(dts), field), but can not provide data for the current dts range, then create a new one and replace the expired window. Parameters ---------- assets : iterable of Assets The assets in the window dts : iterable of datetime64-like The datetimes for which to fetch data. Makes an assumption that all dts are present and contiguous, in the calendar. field : str The OHLCV field for which to retrieve data. is_perspective_after : bool see: `PricingHistoryLoader.history` Returns ------- out : list of Float64Window with sufficient data so that each asset's window can provide `get` for the index corresponding with the last value in `dts` """ end = dts[-1] size = len(dts) asset_windows = {} needed_assets = [] cal = self._calendar assets = self._asset_finder.retrieve_all(assets) end_ix = find_in_sorted_index(cal, end) for asset in assets: try: window = self._window_blocks[field].get( (asset, size, is_perspective_after), end) except KeyError: needed_assets.append(asset) else: if end_ix < window.most_recent_ix: # Window needs reset. Requested end index occurs before the # end index from the previous history call for this window. # Grab new window instead of rewinding adjustments. needed_assets.append(asset) else: asset_windows[asset] = window if needed_assets: offset = 0 start_ix = find_in_sorted_index(cal, dts[0]) prefetch_end_ix = min(end_ix + self._prefetch_length, len(cal) - 1) prefetch_end = cal[prefetch_end_ix] prefetch_dts = cal[start_ix:prefetch_end_ix + 1] if is_perspective_after: adj_end_ix = min(prefetch_end_ix + 1, len(cal) - 1) adj_dts = cal[start_ix:adj_end_ix + 1] else: adj_dts = prefetch_dts prefetch_len = len(prefetch_dts) array = self._array(prefetch_dts, needed_assets, field) if field == 'sid': window_type = Int64Window else: window_type = Float64Window view_kwargs = {} if field == 'volume': array = array.astype(float64_dtype) for i, asset in enumerate(needed_assets): adj_reader = None try: adj_reader = self._adjustment_readers[type(asset)] except KeyError: adj_reader = None if adj_reader is not None: adjs = adj_reader.load_adjustments( [field], adj_dts, [asset])[0] else: adjs = {} window = window_type( array[:, i].reshape(prefetch_len, 1), view_kwargs, adjs, offset, size, int(is_perspective_after), self._decimal_places_for_asset(asset, dts[-1]), ) sliding_window = SlidingWindow(window, size, start_ix, offset) asset_windows[asset] = sliding_window self._window_blocks[field].set( (asset, size, is_perspective_after), sliding_window, prefetch_end) return [asset_windows[asset] for asset in assets] def history(self, assets, dts, field, is_perspective_after): """ A window of pricing data with adjustments applied assuming that the end of the window is the day before the current simulation time. Parameters ---------- assets : iterable of Assets The assets in the window. dts : iterable of datetime64-like The datetimes for which to fetch data. Makes an assumption that all dts are present and contiguous, in the calendar. field : str The OHLCV field for which to retrieve data. is_perspective_after : bool True, if the window is being viewed immediately after the last dt in the sliding window. False, if the window is viewed on the last dt. This flag is used for handling the case where the last dt in the requested window immediately precedes a corporate action, e.g.: - is_perspective_after is True When the viewpoint is after the last dt in the window, as when a daily history window is accessed from a simulation that uses a minute data frequency, the history call to this loader will not include the current simulation dt. At that point in time, the raw data for the last day in the window will require adjustment, so the most recent adjustment with respect to the simulation time is applied to the last dt in the requested window. An example equity which has a 0.5 split ratio dated for 05-27, with the dts for a history call of 5 bars with a '1d' frequency at 05-27 9:31. Simulation frequency is 'minute'. (In this case this function is called with 4 daily dts, and the calling function is responsible for stitching back on the 'current' dt) | | | | | last dt | <-- viewer is here | | | 05-23 | 05-24 | 05-25 | 05-26 | 05-27 9:31 | | raw | 10.10 | 10.20 | 10.30 | 10.40 | | | adj | 5.05 | 5.10 | 5.15 | 5.25 | | The adjustment is applied to the last dt, 05-26, and all previous dts. - is_perspective_after is False, daily When the viewpoint is the same point in time as the last dt in the window, as when a daily history window is accessed from a simulation that uses a daily data frequency, the history call will include the current dt. At that point in time, the raw data for the last day in the window will be post-adjustment, so no adjustment is applied to the last dt. An example equity which has a 0.5 split ratio dated for 05-27, with the dts for a history call of 5 bars with a '1d' frequency at 05-27 0:00. Simulation frequency is 'daily'. | | | | | | <-- viewer is here | | | | | | | last dt | | | 05-23 | 05-24 | 05-25 | 05-26 | 05-27 | | raw | 10.10 | 10.20 | 10.30 | 10.40 | 5.25 | | adj | 5.05 | 5.10 | 5.15 | 5.20 | 5.25 | Adjustments are applied 05-23 through 05-26 but not to the last dt, 05-27 Returns ------- out : np.ndarray with shape(len(days between start, end), len(assets)) """ block = self._ensure_sliding_windows(assets, dts, field, is_perspective_after) end_ix = self._calendar.searchsorted(dts[-1]) return concatenate( [window.get(end_ix) for window in block], axis=1, ) class DailyHistoryLoader(HistoryLoader): @property def _frequency(self): return 'daily' @property def _calendar(self): return self._reader.sessions def _array(self, dts, assets, field): return self._reader.load_raw_arrays( [field], dts[0], dts[-1], assets, )[0] class MinuteHistoryLoader(HistoryLoader): @property def _frequency(self): return 'minute' @lazyval def _calendar(self): mm = self.trading_calendar.all_minutes start = mm.searchsorted(self._reader.first_trading_day) end = mm.searchsorted(self._reader.last_available_dt, side='right') return mm[start:end] def _array(self, dts, assets, field): return self._reader.load_raw_arrays( [field], dts[0], dts[-1], assets, )[0]
37.647157
79
0.550171
from abc import ( ABCMeta, abstractmethod, abstractproperty, ) from numpy import concatenate from lru import LRU from pandas import isnull from pandas.tslib import normalize_date from toolz import sliding_window from six import with_metaclass from zipline.assets import Equity, Future from zipline.assets.continuous_futures import ContinuousFuture from zipline.lib._int64window import AdjustedArrayWindow as Int64Window from zipline.lib._float64window import AdjustedArrayWindow as Float64Window from zipline.lib.adjustment import Float64Multiply, Float64Add from zipline.utils.cache import ExpiringCache from zipline.utils.math_utils import number_of_decimal_places from zipline.utils.memoize import lazyval from zipline.utils.numpy_utils import float64_dtype from zipline.utils.pandas_utils import find_in_sorted_index DEFAULT_ASSET_PRICE_DECIMALS = 3 class HistoryCompatibleUSEquityAdjustmentReader(object): def __init__(self, adjustment_reader): self._adjustments_reader = adjustment_reader def load_adjustments(self, columns, dts, assets): out = [None] * len(columns) for i, column in enumerate(columns): adjs = {} for asset in assets: adjs.update(self._get_adjustments_in_range( asset, dts, column)) out[i] = adjs return out def _get_adjustments_in_range(self, asset, dts, field): sid = int(asset) start = normalize_date(dts[0]) end = normalize_date(dts[-1]) adjs = {} if field != 'volume': mergers = self._adjustments_reader.get_adjustments_for_sid( 'mergers', sid) for m in mergers: dt = m[0] if start < dt <= end: end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, m[1]) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] divs = self._adjustments_reader.get_adjustments_for_sid( 'dividends', sid) for d in divs: dt = d[0] if start < dt <= end: end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, d[1]) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] splits = self._adjustments_reader.get_adjustments_for_sid( 'splits', sid) for s in splits: dt = s[0] if start < dt <= end: if field == 'volume': ratio = 1.0 / s[1] else: ratio = s[1] end_loc = dts.searchsorted(dt) adj_loc = end_loc mult = Float64Multiply(0, end_loc - 1, 0, 0, ratio) try: adjs[adj_loc].append(mult) except KeyError: adjs[adj_loc] = [mult] return adjs class ContinuousFutureAdjustmentReader(object): def __init__(self, trading_calendar, asset_finder, bar_reader, roll_finders, frequency): self._trading_calendar = trading_calendar self._asset_finder = asset_finder self._bar_reader = bar_reader self._roll_finders = roll_finders self._frequency = frequency def load_adjustments(self, columns, dts, assets): out = [None] * len(columns) for i, column in enumerate(columns): adjs = {} for asset in assets: adjs.update(self._get_adjustments_in_range( asset, dts, column)) out[i] = adjs return out def _make_adjustment(self, adjustment_type, front_close, back_close, end_loc): adj_base = back_close - front_close if adjustment_type == 'mul': adj_value = 1.0 + adj_base / front_close adj_class = Float64Multiply elif adjustment_type == 'add': adj_value = adj_base adj_class = Float64Add return adj_class(0, end_loc, 0, 0, adj_value) def _get_adjustments_in_range(self, cf, dts, field): if field == 'volume' or field == 'sid': return {} if cf.adjustment is None: return {} rf = self._roll_finders[cf.roll_style] partitions = [] rolls = rf.get_rolls(cf.root_symbol, dts[0], dts[-1], cf.offset) tc = self._trading_calendar adjs = {} for front, back in sliding_window(2, rolls): front_sid, roll_dt = front back_sid = back[0] dt = tc.previous_session_label(roll_dt) if self._frequency == 'minute': dt = tc.open_and_close_for_session(dt)[1] roll_dt = tc.open_and_close_for_session(roll_dt)[0] partitions.append((front_sid, back_sid, dt, roll_dt)) for partition in partitions: front_sid, back_sid, dt, roll_dt = partition last_front_dt = self._bar_reader.get_last_traded_dt( self._asset_finder.retrieve_asset(front_sid), dt) last_back_dt = self._bar_reader.get_last_traded_dt( self._asset_finder.retrieve_asset(back_sid), dt) if isnull(last_front_dt) or isnull(last_back_dt): continue front_close = self._bar_reader.get_value( front_sid, last_front_dt, 'close') back_close = self._bar_reader.get_value( back_sid, last_back_dt, 'close') adj_loc = dts.searchsorted(roll_dt) end_loc = adj_loc - 1 adj = self._make_adjustment(cf.adjustment, front_close, back_close, end_loc) try: adjs[adj_loc].append(adj) except KeyError: adjs[adj_loc] = [adj] return adjs class SlidingWindow(object): def __init__(self, window, size, cal_start, offset): self.window = window self.cal_start = cal_start self.current = next(window) self.offset = offset self.most_recent_ix = self.cal_start + size def get(self, end_ix): if self.most_recent_ix == end_ix: return self.current target = end_ix - self.cal_start - self.offset + 1 self.current = self.window.seek(target) self.most_recent_ix = end_ix return self.current class HistoryLoader(with_metaclass(ABCMeta)): FIELDS = ('open', 'high', 'low', 'close', 'volume', 'sid') def __init__(self, trading_calendar, reader, equity_adjustment_reader, asset_finder, roll_finders=None, sid_cache_size=1000, prefetch_length=0): self.trading_calendar = trading_calendar self._asset_finder = asset_finder self._reader = reader self._adjustment_readers = {} if equity_adjustment_reader is not None: self._adjustment_readers[Equity] = \ HistoryCompatibleUSEquityAdjustmentReader( equity_adjustment_reader) if roll_finders: self._adjustment_readers[ContinuousFuture] =\ ContinuousFutureAdjustmentReader(trading_calendar, asset_finder, reader, roll_finders, self._frequency) self._window_blocks = { field: ExpiringCache(LRU(sid_cache_size)) for field in self.FIELDS } self._prefetch_length = prefetch_length @abstractproperty def _frequency(self): pass @abstractproperty def _calendar(self): pass @abstractmethod def _array(self, start, end, assets, field): pass def _decimal_places_for_asset(self, asset, reference_date): if isinstance(asset, Future) and asset.tick_size: return number_of_decimal_places(asset.tick_size) elif isinstance(asset, ContinuousFuture): oc = self._asset_finder.get_ordered_contracts(asset.root_symbol) contract_sid = oc.contract_before_auto_close(reference_date.value) if contract_sid is not None: contract = self._asset_finder.retrieve_asset(contract_sid) if contract.tick_size: return number_of_decimal_places(contract.tick_size) return DEFAULT_ASSET_PRICE_DECIMALS def _ensure_sliding_windows(self, assets, dts, field, is_perspective_after): end = dts[-1] size = len(dts) asset_windows = {} needed_assets = [] cal = self._calendar assets = self._asset_finder.retrieve_all(assets) end_ix = find_in_sorted_index(cal, end) for asset in assets: try: window = self._window_blocks[field].get( (asset, size, is_perspective_after), end) except KeyError: needed_assets.append(asset) else: if end_ix < window.most_recent_ix: needed_assets.append(asset) else: asset_windows[asset] = window if needed_assets: offset = 0 start_ix = find_in_sorted_index(cal, dts[0]) prefetch_end_ix = min(end_ix + self._prefetch_length, len(cal) - 1) prefetch_end = cal[prefetch_end_ix] prefetch_dts = cal[start_ix:prefetch_end_ix + 1] if is_perspective_after: adj_end_ix = min(prefetch_end_ix + 1, len(cal) - 1) adj_dts = cal[start_ix:adj_end_ix + 1] else: adj_dts = prefetch_dts prefetch_len = len(prefetch_dts) array = self._array(prefetch_dts, needed_assets, field) if field == 'sid': window_type = Int64Window else: window_type = Float64Window view_kwargs = {} if field == 'volume': array = array.astype(float64_dtype) for i, asset in enumerate(needed_assets): adj_reader = None try: adj_reader = self._adjustment_readers[type(asset)] except KeyError: adj_reader = None if adj_reader is not None: adjs = adj_reader.load_adjustments( [field], adj_dts, [asset])[0] else: adjs = {} window = window_type( array[:, i].reshape(prefetch_len, 1), view_kwargs, adjs, offset, size, int(is_perspective_after), self._decimal_places_for_asset(asset, dts[-1]), ) sliding_window = SlidingWindow(window, size, start_ix, offset) asset_windows[asset] = sliding_window self._window_blocks[field].set( (asset, size, is_perspective_after), sliding_window, prefetch_end) return [asset_windows[asset] for asset in assets] def history(self, assets, dts, field, is_perspective_after): block = self._ensure_sliding_windows(assets, dts, field, is_perspective_after) end_ix = self._calendar.searchsorted(dts[-1]) return concatenate( [window.get(end_ix) for window in block], axis=1, ) class DailyHistoryLoader(HistoryLoader): @property def _frequency(self): return 'daily' @property def _calendar(self): return self._reader.sessions def _array(self, dts, assets, field): return self._reader.load_raw_arrays( [field], dts[0], dts[-1], assets, )[0] class MinuteHistoryLoader(HistoryLoader): @property def _frequency(self): return 'minute' @lazyval def _calendar(self): mm = self.trading_calendar.all_minutes start = mm.searchsorted(self._reader.first_trading_day) end = mm.searchsorted(self._reader.last_available_dt, side='right') return mm[start:end] def _array(self, dts, assets, field): return self._reader.load_raw_arrays( [field], dts[0], dts[-1], assets, )[0]
true
true
f7002061c7f0f3fa019227cac800feb6c0e23672
1,398
py
Python
tescolabsapi.py
moreati/tescolabsAPI
bf4d8f863373032b1d76672793261a72c095e332
[ "Apache-2.0" ]
3
2016-02-09T23:01:42.000Z
2020-04-27T15:56:17.000Z
tescolabsapi.py
moreati/tescolabsapi
bf4d8f863373032b1d76672793261a72c095e332
[ "Apache-2.0" ]
null
null
null
tescolabsapi.py
moreati/tescolabsapi
bf4d8f863373032b1d76672793261a72c095e332
[ "Apache-2.0" ]
null
null
null
import requests API_URL = 'https://secure.techfortesco.com/tescolabsapi/restservice.aspx' class TescoLabsApi(object): def __init__(self, url, developerkey, applicationkey): self.url = url self.developerkey = developerkey self.applicationkey = applicationkey res = requests.get(self.url, params={'command': 'login', 'email': '', 'password': '', 'developerkey': self.developerkey, 'applicationkey': self.applicationkey, }) self.sessionkey = res.json()['SessionKey'] def _command(self, command, **kwargs): params = kwargs params.update({'command': command, 'sessionkey': self.sessionkey}) res = requests.get(self.url, params=params) return res def listproductcategories(self): return self._command('listproductcategories') def listproductsincategory(self, category): return self._command('listproductsincategory', category=category) def listproductoffers(self): return self._command('listproductoffers') def productsearch(self, searchtext, page=1, extendedinfo=False): return self._command('productsearch', searchtext=searchtext, page=page, extendedinfo=extendedinfo)
36.789474
74
0.600858
import requests API_URL = 'https://secure.techfortesco.com/tescolabsapi/restservice.aspx' class TescoLabsApi(object): def __init__(self, url, developerkey, applicationkey): self.url = url self.developerkey = developerkey self.applicationkey = applicationkey res = requests.get(self.url, params={'command': 'login', 'email': '', 'password': '', 'developerkey': self.developerkey, 'applicationkey': self.applicationkey, }) self.sessionkey = res.json()['SessionKey'] def _command(self, command, **kwargs): params = kwargs params.update({'command': command, 'sessionkey': self.sessionkey}) res = requests.get(self.url, params=params) return res def listproductcategories(self): return self._command('listproductcategories') def listproductsincategory(self, category): return self._command('listproductsincategory', category=category) def listproductoffers(self): return self._command('listproductoffers') def productsearch(self, searchtext, page=1, extendedinfo=False): return self._command('productsearch', searchtext=searchtext, page=page, extendedinfo=extendedinfo)
true
true
f70020b4615ef66843d3d64a999da443cca1e88c
418
py
Python
catalog/migrations/0003_item_tags.py
yeezy-na-izi/YlDjango
6fd0763183d76e4f7ca4a9686170d0665d7c04e9
[ "MIT" ]
6
2022-03-06T10:43:06.000Z
2022-03-24T13:00:12.000Z
catalog/migrations/0003_item_tags.py
yeezy-na-izi/YlDjango
6fd0763183d76e4f7ca4a9686170d0665d7c04e9
[ "MIT" ]
6
2022-03-09T13:22:41.000Z
2022-03-25T09:21:37.000Z
catalog/migrations/0003_item_tags.py
yeezy-na-izi/YlDjango
6fd0763183d76e4f7ca4a9686170d0665d7c04e9
[ "MIT" ]
null
null
null
# Generated by Django 3.2.12 on 2022-03-21 09:04 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('catalog', '0002_tag'), ] operations = [ migrations.AddField( model_name='item', name='tags', field=models.ManyToManyField(related_name='items', to='catalog.Tag', verbose_name='Теги'), ), ]
22
102
0.598086
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('catalog', '0002_tag'), ] operations = [ migrations.AddField( model_name='item', name='tags', field=models.ManyToManyField(related_name='items', to='catalog.Tag', verbose_name='Теги'), ), ]
true
true
f70020de9b544bc4917fa95a981799188beb21b9
7,205
py
Python
airflow/task/task_runner/base_task_runner.py
ChaseKnowlden/airflow
6b71eac1997a7c0db3b8e3aed6b4e65d01871440
[ "Apache-2.0" ]
2
2021-07-30T16:57:37.000Z
2021-08-03T13:51:47.000Z
airflow/task/task_runner/base_task_runner.py
ChaseKnowlden/airflow
6b71eac1997a7c0db3b8e3aed6b4e65d01871440
[ "Apache-2.0" ]
null
null
null
airflow/task/task_runner/base_task_runner.py
ChaseKnowlden/airflow
6b71eac1997a7c0db3b8e3aed6b4e65d01871440
[ "Apache-2.0" ]
1
2020-10-01T08:48:37.000Z
2020-10-01T08:48:37.000Z
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Base task runner""" import os import subprocess import threading from pwd import getpwnam from tempfile import NamedTemporaryFile from typing import Optional, Union from airflow.configuration import conf from airflow.exceptions import AirflowConfigException from airflow.models.taskinstance import load_error_file from airflow.utils.configuration import tmp_configuration_copy from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname from airflow.utils.platform import getuser PYTHONPATH_VAR = 'PYTHONPATH' class BaseTaskRunner(LoggingMixin): """ Runs Airflow task instances by invoking the `airflow tasks run` command with raw mode enabled in a subprocess. :param local_task_job: The local task job associated with running the associated task instance. :type local_task_job: airflow.jobs.local_task_job.LocalTaskJob """ def __init__(self, local_task_job): # Pass task instance context into log handlers to setup the logger. super().__init__(local_task_job.task_instance) self._task_instance = local_task_job.task_instance popen_prepend = [] if self._task_instance.run_as_user: self.run_as_user = self._task_instance.run_as_user else: try: self.run_as_user = conf.get('core', 'default_impersonation') except AirflowConfigException: self.run_as_user = None # Add sudo commands to change user if we need to. Needed to handle SubDagOperator # case using a SequentialExecutor. self.log.debug("Planning to run as the %s user", self.run_as_user) if self.run_as_user and (self.run_as_user != getuser()): # We want to include any environment variables now, as we won't # want to have to specify them in the sudo call - they would show # up in `ps` that way! And run commands now, as the other user # might not be able to run the cmds to get credentials cfg_path = tmp_configuration_copy(chmod=0o600) # Give ownership of file to user; only they can read and write subprocess.call(['sudo', 'chown', self.run_as_user, cfg_path], close_fds=True) # propagate PYTHONPATH environment variable pythonpath_value = os.environ.get(PYTHONPATH_VAR, '') popen_prepend = ['sudo', '-E', '-H', '-u', self.run_as_user] if pythonpath_value: popen_prepend.append(f'{PYTHONPATH_VAR}={pythonpath_value}') else: # Always provide a copy of the configuration file settings. Since # we are running as the same user, and can pass through environment # variables then we don't need to include those in the config copy # - the runner can read/execute those values as it needs cfg_path = tmp_configuration_copy(chmod=0o600) self._error_file = NamedTemporaryFile(delete=True) if self.run_as_user: try: os.chown(self._error_file.name, getpwnam(self.run_as_user).pw_uid, -1) except KeyError: # No user `run_as_user` found pass self._cfg_path = cfg_path self._command = ( popen_prepend + self._task_instance.command_as_list( raw=True, pickle_id=local_task_job.pickle_id, mark_success=local_task_job.mark_success, job_id=local_task_job.id, pool=local_task_job.pool, cfg_path=cfg_path, ) + ["--error-file", self._error_file.name] ) self.process = None def deserialize_run_error(self) -> Optional[Union[str, Exception]]: """Return task runtime error if its written to provided error file.""" return load_error_file(self._error_file) def _read_task_logs(self, stream): while True: line = stream.readline() if isinstance(line, bytes): line = line.decode('utf-8') if not line: break self.log.info( 'Job %s: Subtask %s %s', self._task_instance.job_id, self._task_instance.task_id, line.rstrip('\n'), ) def run_command(self, run_with=None): """ Run the task command. :param run_with: list of tokens to run the task command with e.g. ``['bash', '-c']`` :type run_with: list :return: the process that was run :rtype: subprocess.Popen """ run_with = run_with or [] full_cmd = run_with + self._command self.log.info("Running on host: %s", get_hostname()) self.log.info('Running: %s', full_cmd) proc = subprocess.Popen( full_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, close_fds=True, env=os.environ.copy(), preexec_fn=os.setsid, ) # Start daemon thread to read subprocess logging output log_reader = threading.Thread( target=self._read_task_logs, args=(proc.stdout,), ) log_reader.daemon = True log_reader.start() return proc def start(self): """Start running the task instance in a subprocess.""" raise NotImplementedError() def return_code(self) -> Optional[int]: """ :return: The return code associated with running the task instance or None if the task is not yet done. :rtype: int """ raise NotImplementedError() def terminate(self) -> None: """Force kill the running task instance.""" raise NotImplementedError() def on_finish(self) -> None: """A callback that should be called when this is done running.""" if self._cfg_path and os.path.isfile(self._cfg_path): if self.run_as_user: subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True) else: os.remove(self._cfg_path) try: self._error_file.close() except FileNotFoundError: # The subprocess has deleted this file before we do # so we ignore pass
37.526042
92
0.632061
import os import subprocess import threading from pwd import getpwnam from tempfile import NamedTemporaryFile from typing import Optional, Union from airflow.configuration import conf from airflow.exceptions import AirflowConfigException from airflow.models.taskinstance import load_error_file from airflow.utils.configuration import tmp_configuration_copy from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname from airflow.utils.platform import getuser PYTHONPATH_VAR = 'PYTHONPATH' class BaseTaskRunner(LoggingMixin): def __init__(self, local_task_job): super().__init__(local_task_job.task_instance) self._task_instance = local_task_job.task_instance popen_prepend = [] if self._task_instance.run_as_user: self.run_as_user = self._task_instance.run_as_user else: try: self.run_as_user = conf.get('core', 'default_impersonation') except AirflowConfigException: self.run_as_user = None self.log.debug("Planning to run as the %s user", self.run_as_user) if self.run_as_user and (self.run_as_user != getuser()): # want to have to specify them in the sudo call - they would show # up in `ps` that way! And run commands now, as the other user # might not be able to run the cmds to get credentials cfg_path = tmp_configuration_copy(chmod=0o600) # Give ownership of file to user; only they can read and write subprocess.call(['sudo', 'chown', self.run_as_user, cfg_path], close_fds=True) # propagate PYTHONPATH environment variable pythonpath_value = os.environ.get(PYTHONPATH_VAR, '') popen_prepend = ['sudo', '-E', '-H', '-u', self.run_as_user] if pythonpath_value: popen_prepend.append(f'{PYTHONPATH_VAR}={pythonpath_value}') else: # Always provide a copy of the configuration file settings. Since # we are running as the same user, and can pass through environment # variables then we don't need to include those in the config copy cfg_path = tmp_configuration_copy(chmod=0o600) self._error_file = NamedTemporaryFile(delete=True) if self.run_as_user: try: os.chown(self._error_file.name, getpwnam(self.run_as_user).pw_uid, -1) except KeyError: pass self._cfg_path = cfg_path self._command = ( popen_prepend + self._task_instance.command_as_list( raw=True, pickle_id=local_task_job.pickle_id, mark_success=local_task_job.mark_success, job_id=local_task_job.id, pool=local_task_job.pool, cfg_path=cfg_path, ) + ["--error-file", self._error_file.name] ) self.process = None def deserialize_run_error(self) -> Optional[Union[str, Exception]]: return load_error_file(self._error_file) def _read_task_logs(self, stream): while True: line = stream.readline() if isinstance(line, bytes): line = line.decode('utf-8') if not line: break self.log.info( 'Job %s: Subtask %s %s', self._task_instance.job_id, self._task_instance.task_id, line.rstrip('\n'), ) def run_command(self, run_with=None): run_with = run_with or [] full_cmd = run_with + self._command self.log.info("Running on host: %s", get_hostname()) self.log.info('Running: %s', full_cmd) proc = subprocess.Popen( full_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, close_fds=True, env=os.environ.copy(), preexec_fn=os.setsid, ) log_reader = threading.Thread( target=self._read_task_logs, args=(proc.stdout,), ) log_reader.daemon = True log_reader.start() return proc def start(self): raise NotImplementedError() def return_code(self) -> Optional[int]: raise NotImplementedError() def terminate(self) -> None: raise NotImplementedError() def on_finish(self) -> None: if self._cfg_path and os.path.isfile(self._cfg_path): if self.run_as_user: subprocess.call(['sudo', 'rm', self._cfg_path], close_fds=True) else: os.remove(self._cfg_path) try: self._error_file.close() except FileNotFoundError: pass
true
true
f700212351b1109f477cd1595558da0f8a296141
2,130
py
Python
string_2/end_other.py
nhutnamhcmus/coding-bat-solutions
5f780a4027a6c3523a72961db1bad547c997fdc6
[ "MIT" ]
1
2020-09-19T18:02:13.000Z
2020-09-19T18:02:13.000Z
string_2/end_other.py
nhutnamhcmus/coding-bat-solutions
5f780a4027a6c3523a72961db1bad547c997fdc6
[ "MIT" ]
null
null
null
string_2/end_other.py
nhutnamhcmus/coding-bat-solutions
5f780a4027a6c3523a72961db1bad547c997fdc6
[ "MIT" ]
null
null
null
# ======================================================================================================================================= # VNU-HCM, University of Science # Department Computer Science, Faculty of Information Technology # Authors: Nhut-Nam Le (Tich Phan Suy Rong) # © 2020 import unittest """ Given two strings, return True if either of the strings appears at the very end of the other string, ignoring upper/lower case differences (in other words, the computation should not be "case sensitive"). Note: s.lower() returns the lowercase version of a string. end_other('Hiabc', 'abc') → True end_other('AbC', 'HiaBc') → True end_other('abc', 'abXabc') → True """ def end_other(a, b): a = a.lower() b = b.lower() return (b[(len(b) - len(a)):] == a, a[(len(a) - len(b)):] == b)[len(a) >= len(b)] class TestEndOther(unittest.TestCase): def test_case_00(self): self.assertEqual(end_other('Hiabc', 'abc'), True) def test_case_01(self): self.assertEqual(end_other('AbC', 'HiaBc'), True) def test_case_02(self): self.assertEqual(end_other('abc', 'abXabc'), True) def test_case_03(self): self.assertEqual(end_other('Hiabc', 'abcd'), False) def test_case_04(self): self.assertEqual(end_other('Hiabc', 'bc'), True) def test_case_05(self): self.assertEqual(end_other('Hiabcx', 'bc'), False) def test_case_06(self): self.assertEqual(end_other('abc', 'abc'), True) def test_case_07(self): self.assertEqual(end_other('xyz', '12xyz'), True) def test_case_08(self): self.assertEqual(end_other('yz', '12xz'), False) def test_case_09(self): self.assertEqual(end_other('Z', '12xz'), True) def test_case_10(self): self.assertEqual(end_other('12', '12'), True) def test_case_11(self): self.assertEqual(end_other('abcXYZ', 'abcDEF'), False) def test_case_12(self): self.assertEqual(end_other('ab', 'ab12'), False) def test_case_13(self): self.assertEqual(end_other('ab', '12ab'), True) if __name__ == "__main__": unittest.main()
29.178082
263
0.606103
import unittest def end_other(a, b): a = a.lower() b = b.lower() return (b[(len(b) - len(a)):] == a, a[(len(a) - len(b)):] == b)[len(a) >= len(b)] class TestEndOther(unittest.TestCase): def test_case_00(self): self.assertEqual(end_other('Hiabc', 'abc'), True) def test_case_01(self): self.assertEqual(end_other('AbC', 'HiaBc'), True) def test_case_02(self): self.assertEqual(end_other('abc', 'abXabc'), True) def test_case_03(self): self.assertEqual(end_other('Hiabc', 'abcd'), False) def test_case_04(self): self.assertEqual(end_other('Hiabc', 'bc'), True) def test_case_05(self): self.assertEqual(end_other('Hiabcx', 'bc'), False) def test_case_06(self): self.assertEqual(end_other('abc', 'abc'), True) def test_case_07(self): self.assertEqual(end_other('xyz', '12xyz'), True) def test_case_08(self): self.assertEqual(end_other('yz', '12xz'), False) def test_case_09(self): self.assertEqual(end_other('Z', '12xz'), True) def test_case_10(self): self.assertEqual(end_other('12', '12'), True) def test_case_11(self): self.assertEqual(end_other('abcXYZ', 'abcDEF'), False) def test_case_12(self): self.assertEqual(end_other('ab', 'ab12'), False) def test_case_13(self): self.assertEqual(end_other('ab', '12ab'), True) if __name__ == "__main__": unittest.main()
true
true
f70021b57f240a3d1691e8e411a6514c4b90cc8a
1,760
py
Python
pyglet/text/formats/__init__.py
bitcraft/pyglet
144257c365ca85528c6a4c5bed8141e683d7a9b6
[ "BSD-3-Clause" ]
15
2015-01-21T12:29:01.000Z
2018-12-09T09:17:33.000Z
pyglet/text/formats/__init__.py
bitcraft/pyglet
144257c365ca85528c6a4c5bed8141e683d7a9b6
[ "BSD-3-Clause" ]
null
null
null
pyglet/text/formats/__init__.py
bitcraft/pyglet
144257c365ca85528c6a4c5bed8141e683d7a9b6
[ "BSD-3-Clause" ]
9
2015-12-12T09:12:46.000Z
2021-12-26T13:29:14.000Z
# ---------------------------------------------------------------------------- # pyglet # Copyright (c) 2006-2008 Alex Holkner # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of pyglet nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- """Document formats. :since: pyglet 1.1 """
46.315789
78
0.701136
true
true
f70021d9c1eb3a4cd124e04b0ba9fd752601517e
793
py
Python
config.py
WILLGENIUS15/car_blog
343eb87ed560ac8dc4200b33933f14ab40121e29
[ "Unlicense" ]
null
null
null
config.py
WILLGENIUS15/car_blog
343eb87ed560ac8dc4200b33933f14ab40121e29
[ "Unlicense" ]
null
null
null
config.py
WILLGENIUS15/car_blog
343eb87ed560ac8dc4200b33933f14ab40121e29
[ "Unlicense" ]
null
null
null
import os from dotenv import load_dotenv load_dotenv() class Config: SECRET_KEY = os.environ.get('SECRET_KEY') SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://fidel:fidel@localhost/blog' UPLOADED_PHOTOS_DEST = 'app/static/photos' QUOTES_URL = 'http://quotes.stormconsultancy.co.uk/random.json' MAIL_SERVER = 'smtp.googlemail.com' MAIL_PORT = 587 MAIL_USE_TLS = True MAIL_USERNAME = os.environ.get("MAIL_USERNAME") MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD") class ProdConfig(Config): SQLALCHEMY_DATABASE_URI =os.environ.get('DATABASE_URL') class DevConfig(Config): #SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://fidel:fidel@localhost/blog' DEBUG = True config_options = { 'development':DevConfig, 'production':ProdConfig }
28.321429
81
0.737705
import os from dotenv import load_dotenv load_dotenv() class Config: SECRET_KEY = os.environ.get('SECRET_KEY') SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://fidel:fidel@localhost/blog' UPLOADED_PHOTOS_DEST = 'app/static/photos' QUOTES_URL = 'http://quotes.stormconsultancy.co.uk/random.json' MAIL_SERVER = 'smtp.googlemail.com' MAIL_PORT = 587 MAIL_USE_TLS = True MAIL_USERNAME = os.environ.get("MAIL_USERNAME") MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD") class ProdConfig(Config): SQLALCHEMY_DATABASE_URI =os.environ.get('DATABASE_URL') class DevConfig(Config): DEBUG = True config_options = { 'development':DevConfig, 'production':ProdConfig }
true
true
f7002200d716bdb40628ec755274e51d50f19425
1,328
bzl
Python
third_party/tensorflow/tensorflow_configure.bzl
waveflow-team/waveflow
b540a6c670e29071118ba773c26c2e2bd3839ecb
[ "Apache-2.0" ]
11
2018-03-20T16:06:35.000Z
2020-12-09T12:21:34.000Z
third_party/tensorflow/tensorflow_configure.bzl
kelu124/waveflow
b540a6c670e29071118ba773c26c2e2bd3839ecb
[ "Apache-2.0" ]
4
2018-04-16T13:09:17.000Z
2018-05-21T06:12:07.000Z
third_party/tensorflow/tensorflow_configure.bzl
kelu124/waveflow
b540a6c670e29071118ba773c26c2e2bd3839ecb
[ "Apache-2.0" ]
4
2018-11-28T20:49:18.000Z
2021-08-10T07:30:50.000Z
_TF_INCLUDE_PATH = "TF_INCLUDE_PATH" _TF_LIB_PATH = "TF_LIB_PATH" def _get_env_var_with_default(repository_ctx, env_var): """Returns evironment variable value.""" if env_var in repository_ctx.os.environ: value = repository_ctx.os.environ[env_var] return value else: fail("Environment variable '%s' was not set." % env_var) def _get_tf_conf(repository_ctx): """Returns structure containing all required information about tensorflow configuration on host platform. """ include_path = _get_env_var_with_default(repository_ctx, _TF_INCLUDE_PATH) lib_path = _get_env_var_with_default(repository_ctx, _TF_LIB_PATH) return struct( include_path = include_path, lib_path = lib_path ) def _tensorflow_autoconf_impl(repository_ctx): """Implementation of tensorflow autoconf. rule.""" tf_conf = _get_tf_conf(repository_ctx) print("Using %s=%s" % (_TF_INCLUDE_PATH, tf_conf.include_path)) print("Using %s=%s" % (_TF_LIB_PATH, tf_conf.lib_path)) repository_ctx.symlink(tf_conf.include_path, 'include') repository_ctx.symlink(tf_conf.lib_path, 'lib') repository_ctx.template('BUILD', Label("//third_party/tensorflow:tensorflow.BUILD")) tensorflow_configure = repository_rule( implementation = _tensorflow_autoconf_impl, environ = [ _TF_INCLUDE_PATH, _TF_LIB_PATH ] )
33.2
86
0.76506
_TF_INCLUDE_PATH = "TF_INCLUDE_PATH" _TF_LIB_PATH = "TF_LIB_PATH" def _get_env_var_with_default(repository_ctx, env_var): if env_var in repository_ctx.os.environ: value = repository_ctx.os.environ[env_var] return value else: fail("Environment variable '%s' was not set." % env_var) def _get_tf_conf(repository_ctx): include_path = _get_env_var_with_default(repository_ctx, _TF_INCLUDE_PATH) lib_path = _get_env_var_with_default(repository_ctx, _TF_LIB_PATH) return struct( include_path = include_path, lib_path = lib_path ) def _tensorflow_autoconf_impl(repository_ctx): tf_conf = _get_tf_conf(repository_ctx) print("Using %s=%s" % (_TF_INCLUDE_PATH, tf_conf.include_path)) print("Using %s=%s" % (_TF_LIB_PATH, tf_conf.lib_path)) repository_ctx.symlink(tf_conf.include_path, 'include') repository_ctx.symlink(tf_conf.lib_path, 'lib') repository_ctx.template('BUILD', Label("//third_party/tensorflow:tensorflow.BUILD")) tensorflow_configure = repository_rule( implementation = _tensorflow_autoconf_impl, environ = [ _TF_INCLUDE_PATH, _TF_LIB_PATH ] )
true
true
f700222e7f53b12d78b2c42bfd61c27c28fd2f7e
326
py
Python
class3/exercises/yaml_ex5.py
ksannedhi/pyplus_course
fc3499f2dfef472dc49fe6caddf2e6e2be160f4b
[ "Apache-2.0" ]
39
2019-03-03T18:16:55.000Z
2022-02-17T17:05:18.000Z
class3/exercises/yaml_ex5.py
ksannedhi/pyplus_course
fc3499f2dfef472dc49fe6caddf2e6e2be160f4b
[ "Apache-2.0" ]
1
2020-06-17T22:39:28.000Z
2020-06-17T22:39:28.000Z
class3/exercises/yaml_ex5.py
ksannedhi/pyplus_course
fc3499f2dfef472dc49fe6caddf2e6e2be160f4b
[ "Apache-2.0" ]
77
2019-01-25T10:41:23.000Z
2022-03-14T21:35:59.000Z
import yaml from os import path from netmiko import ConnectHandler home_dir = path.expanduser("~") filename = path.join(home_dir, ".netmiko.yml") with open(filename) as f: yaml_out = yaml.safe_load(f) cisco3 = yaml_out["cisco3"] net_connect = ConnectHandler(**cisco3) print() print(net_connect.find_prompt()) print()
18.111111
46
0.742331
import yaml from os import path from netmiko import ConnectHandler home_dir = path.expanduser("~") filename = path.join(home_dir, ".netmiko.yml") with open(filename) as f: yaml_out = yaml.safe_load(f) cisco3 = yaml_out["cisco3"] net_connect = ConnectHandler(**cisco3) print() print(net_connect.find_prompt()) print()
true
true
f7002370a443669d856313ff04fd39f77597e80e
30
py
Python
active_directory2/__init__.py
tjguk/active_directory2
0338ea9ea168fd37869689c108fe08f716408c95
[ "MIT" ]
2
2016-05-30T14:15:42.000Z
2021-05-15T03:26:22.000Z
active_directory2/__init__.py
tjguk/active_directory2
0338ea9ea168fd37869689c108fe08f716408c95
[ "MIT" ]
null
null
null
active_directory2/__init__.py
tjguk/active_directory2
0338ea9ea168fd37869689c108fe08f716408c95
[ "MIT" ]
null
null
null
# -*- coding: iso-8859-1 -*-
15
29
0.466667
true
true
f70023ec2bdd19efb694e6452ef3524c3321b6c3
683
py
Python
QcloudApi/modules/trade.py
snowxmas/tencentcloud-sdk-python
fb527dcfc6b52a210e79d581f85cb8cde1ea9c85
[ "Apache-2.0" ]
465
2018-04-27T09:54:59.000Z
2022-03-29T02:18:01.000Z
QcloudApi/modules/trade.py
snowxmas/tencentcloud-sdk-python
fb527dcfc6b52a210e79d581f85cb8cde1ea9c85
[ "Apache-2.0" ]
91
2018-04-27T09:48:11.000Z
2022-03-12T08:04:04.000Z
QcloudApi/modules/trade.py
snowxmas/tencentcloud-sdk-python
fb527dcfc6b52a210e79d581f85cb8cde1ea9c85
[ "Apache-2.0" ]
232
2018-05-02T08:02:46.000Z
2022-03-30T08:02:48.000Z
# Copyright 1999-2017 Tencent Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from QcloudApi.modules import base class Trade(base.Base): requestHost = 'trade.api.qcloud.com'
34.15
74
0.759883
from QcloudApi.modules import base class Trade(base.Base): requestHost = 'trade.api.qcloud.com'
true
true
f70024c3adadb6d6e062f9fabbda77d016fda559
9,099
py
Python
custom_components/ics_calendar/calendar.py
franc6/ics_calendar
8bfd05d9d06b80acabb075531f3b62f75668cd64
[ "Apache-2.0" ]
29
2019-08-21T08:46:34.000Z
2022-03-10T07:17:32.000Z
custom_components/ics_calendar/calendar.py
franc6/ics_calendar
8bfd05d9d06b80acabb075531f3b62f75668cd64
[ "Apache-2.0" ]
46
2019-08-21T11:12:20.000Z
2022-03-31T12:29:54.000Z
custom_components/ics_calendar/calendar.py
franc6/ics_calendar
8bfd05d9d06b80acabb075531f3b62f75668cd64
[ "Apache-2.0" ]
10
2019-08-21T08:47:52.000Z
2021-07-03T16:56:29.000Z
"""Support for ICS Calendar.""" import copy import logging from datetime import datetime, timedelta from urllib.error import ContentTooShortError, HTTPError, URLError from urllib.request import ( HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler, build_opener, install_opener, urlopen, ) import voluptuous as vol from homeassistant.components.calendar import ( ENTITY_ID_FORMAT, PLATFORM_SCHEMA, CalendarEventDevice, calculate_offset, is_offset_reached, ) from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import generate_entity_id from homeassistant.util import Throttle from .icalendarparser import ICalendarParser VERSION = "2.0.0" _LOGGER = logging.getLogger(__name__) CONF_DEVICE_ID = "device_id" CONF_CALENDARS = "calendars" CONF_CALENDAR = "calendar" CONF_INCLUDE_ALL_DAY = "includeAllDay" CONF_PARSER = "parser" OFFSET = "!!" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { # pylint: disable=no-value-for-parameter vol.Optional(CONF_CALENDARS, default=[]): vol.All( cv.ensure_list, vol.Schema( [ vol.Schema( { vol.Required(CONF_URL): vol.Url(), vol.Required(CONF_NAME): cv.string, vol.Optional( CONF_INCLUDE_ALL_DAY, default=False ): cv.boolean, vol.Optional(CONF_USERNAME, default=""): cv.string, vol.Optional(CONF_PASSWORD, default=""): cv.string, vol.Optional(CONF_PARSER, default="icalevents"): cv.string, } ) ] ), ) } ) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) # MIN_TIME_BETWEEN_DOWNLOADS is smaller than MIN_TIME_BETWEEN_UPDATES so that # it won't be skipped if an explicit update is called. Eventually, if these are # configurable, we'll let end users worry about if they mean to have it happen # that way. MIN_TIME_BETWEEN_DOWNLOADS = timedelta(minutes=10) def setup_platform(hass, config, add_entities, _=None): """Set up the ICS Calendar platform""" _LOGGER.debug("Setting up ics calendars") calendar_devices = [] for calendar in config.get(CONF_CALENDARS): device_data = { CONF_NAME: calendar.get(CONF_NAME), CONF_URL: calendar.get(CONF_URL), CONF_INCLUDE_ALL_DAY: calendar.get(CONF_INCLUDE_ALL_DAY), CONF_USERNAME: calendar.get(CONF_USERNAME), CONF_PASSWORD: calendar.get(CONF_PASSWORD), CONF_PARSER: calendar.get(CONF_PARSER), } device_id = "{}".format(device_data[CONF_NAME]) entity_id = generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) calendar_devices.append(ICSCalendarEventDevice(entity_id, device_data)) add_entities(calendar_devices) class ICSCalendarEventDevice(CalendarEventDevice): """A device for getting the next Task from an ICS Calendar""" def __init__(self, entity_id, device_data): _LOGGER.debug("Initializing calendar: %s", device_data[CONF_NAME]) self.data = ICSCalendarData(device_data) self.entity_id = entity_id self._event = None self._name = device_data[CONF_NAME] self._offset_reached = False self._last_call = None self._last_event_list = None @property def device_state_attributes(self): """Return the calendar entity's state attributes.""" return {"offset_reached": self._offset_reached} @property def event(self): """Returns the current event for the calendar entity or None""" return self._event @property def name(self): """Returns the name of the calendar entity""" return self._name async def async_get_events(self, hass, start_date, end_date): """Get all events in a specific time frame.""" if ( self._last_event_list is None or self._last_call is None or (datetime.now() - self._last_call) > MIN_TIME_BETWEEN_UPDATES ): self._last_call = datetime.now() self._last_event_list = await self.data.async_get_events( hass, start_date, end_date ) return self._last_event_list def update(self): """Update event data.""" self.data.update() event = copy.deepcopy(self.data.event) if event is None: self._event = event return event = calculate_offset(event, OFFSET) self._offset_reached = is_offset_reached(event) self._event = event class ICSCalendarData: """Calss to use the calendar ICS client object to get next event.""" def __init__(self, device_data): """Set up how we are going to connect to the ICS Calendar""" self.name = device_data[CONF_NAME] self.url = device_data[CONF_URL] self.include_all_day = device_data[CONF_INCLUDE_ALL_DAY] self.parser = ICalendarParser.get_instance(device_data[CONF_PARSER]) self.event = None self._calendar_data = None self._last_download = None if device_data[CONF_USERNAME] != "" and device_data[CONF_PASSWORD] != "": passman = HTTPPasswordMgrWithDefaultRealm() passman.add_password( None, self.url, device_data[CONF_USERNAME], device_data[CONF_PASSWORD] ) basic_auth_handler = HTTPBasicAuthHandler(passman) digest_auth_handler = HTTPDigestAuthHandler(passman) opener = build_opener(digest_auth_handler, basic_auth_handler) install_opener(opener) def _download_calendar(self): if ( self._calendar_data is None or self._last_download is None or (datetime.now() - self._last_download) > MIN_TIME_BETWEEN_DOWNLOADS ): self._last_download = datetime.now() self._calendar_data = None try: with urlopen(self.url) as conn: self._calendar_data = conn.read().decode().replace("\0", "") except HTTPError as http_error: _LOGGER.error(f"{self.name}: Failed to open url: {http_error.reason}") except ContentTooShortError as content_too_short_error: _LOGGER.error( f"{self.name}: Could not download calendar data: {content_too_short_error.reason}" ) except URLError as url_error: _LOGGER.error(f"{self.name}: Failed to open url: {url_error.reason}") except: _LOGGER.error(f"{self.name}: Failed to open url!") return async def async_get_events(self, hass, start_date, end_date): """Get all events in a specific time frame.""" event_list = [] await hass.async_add_job(self._download_calendar) try: events = self.parser.get_event_list( content=self._calendar_data, start=start_date, end=end_date, include_all_day=self.include_all_day, ) event_list = list(map(self.format_dates, events)) except: _LOGGER.error(f"{self.name}: Failed to parse ICS!") event_list = [] return event_list @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data.""" self._download_calendar() try: self.event = self.parser.get_current_event( content=self._calendar_data, include_all_day=self.include_all_day ) self.event["start"] = self.get_hass_date( self.event["start"], self.event["all_day"] ) self.event["end"] = self.get_hass_date( self.event["end"], self.event["all_day"] ) return True except: _LOGGER.error(f"{self.name}: Failed to parse ICS!") return False def format_dates(self, event): event["start"] = self.get_date_formatted(event["start"], event["all_day"]) event["end"] = self.get_date_formatted(event["end"], event["all_day"]) return event def get_date_formatted(self, dt, is_all_day): """Return the formatted date""" # Note that all day events should have a time of 0, and the timezone # must be local. if is_all_day: return dt.strftime("%Y-%m-%d") return dt.strftime("%Y-%m-%dT%H:%M:%S.%f%z") def get_hass_date(self, dt, is_all_day): """Return the wrapped and formatted date""" if is_all_day: return {"date": self.parser.get_date_formatted(dt, is_all_day)} return {"dateTime": self.parser.get_date_formatted(dt, is_all_day)}
36.396
102
0.621057
import copy import logging from datetime import datetime, timedelta from urllib.error import ContentTooShortError, HTTPError, URLError from urllib.request import ( HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler, HTTPDigestAuthHandler, build_opener, install_opener, urlopen, ) import voluptuous as vol from homeassistant.components.calendar import ( ENTITY_ID_FORMAT, PLATFORM_SCHEMA, CalendarEventDevice, calculate_offset, is_offset_reached, ) from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import generate_entity_id from homeassistant.util import Throttle from .icalendarparser import ICalendarParser VERSION = "2.0.0" _LOGGER = logging.getLogger(__name__) CONF_DEVICE_ID = "device_id" CONF_CALENDARS = "calendars" CONF_CALENDAR = "calendar" CONF_INCLUDE_ALL_DAY = "includeAllDay" CONF_PARSER = "parser" OFFSET = "!!" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Optional(CONF_CALENDARS, default=[]): vol.All( cv.ensure_list, vol.Schema( [ vol.Schema( { vol.Required(CONF_URL): vol.Url(), vol.Required(CONF_NAME): cv.string, vol.Optional( CONF_INCLUDE_ALL_DAY, default=False ): cv.boolean, vol.Optional(CONF_USERNAME, default=""): cv.string, vol.Optional(CONF_PASSWORD, default=""): cv.string, vol.Optional(CONF_PARSER, default="icalevents"): cv.string, } ) ] ), ) } ) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) # configurable, we'll let end users worry about if they mean to have it happen MIN_TIME_BETWEEN_DOWNLOADS = timedelta(minutes=10) def setup_platform(hass, config, add_entities, _=None): _LOGGER.debug("Setting up ics calendars") calendar_devices = [] for calendar in config.get(CONF_CALENDARS): device_data = { CONF_NAME: calendar.get(CONF_NAME), CONF_URL: calendar.get(CONF_URL), CONF_INCLUDE_ALL_DAY: calendar.get(CONF_INCLUDE_ALL_DAY), CONF_USERNAME: calendar.get(CONF_USERNAME), CONF_PASSWORD: calendar.get(CONF_PASSWORD), CONF_PARSER: calendar.get(CONF_PARSER), } device_id = "{}".format(device_data[CONF_NAME]) entity_id = generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass) calendar_devices.append(ICSCalendarEventDevice(entity_id, device_data)) add_entities(calendar_devices) class ICSCalendarEventDevice(CalendarEventDevice): def __init__(self, entity_id, device_data): _LOGGER.debug("Initializing calendar: %s", device_data[CONF_NAME]) self.data = ICSCalendarData(device_data) self.entity_id = entity_id self._event = None self._name = device_data[CONF_NAME] self._offset_reached = False self._last_call = None self._last_event_list = None @property def device_state_attributes(self): return {"offset_reached": self._offset_reached} @property def event(self): return self._event @property def name(self): return self._name async def async_get_events(self, hass, start_date, end_date): if ( self._last_event_list is None or self._last_call is None or (datetime.now() - self._last_call) > MIN_TIME_BETWEEN_UPDATES ): self._last_call = datetime.now() self._last_event_list = await self.data.async_get_events( hass, start_date, end_date ) return self._last_event_list def update(self): self.data.update() event = copy.deepcopy(self.data.event) if event is None: self._event = event return event = calculate_offset(event, OFFSET) self._offset_reached = is_offset_reached(event) self._event = event class ICSCalendarData: def __init__(self, device_data): self.name = device_data[CONF_NAME] self.url = device_data[CONF_URL] self.include_all_day = device_data[CONF_INCLUDE_ALL_DAY] self.parser = ICalendarParser.get_instance(device_data[CONF_PARSER]) self.event = None self._calendar_data = None self._last_download = None if device_data[CONF_USERNAME] != "" and device_data[CONF_PASSWORD] != "": passman = HTTPPasswordMgrWithDefaultRealm() passman.add_password( None, self.url, device_data[CONF_USERNAME], device_data[CONF_PASSWORD] ) basic_auth_handler = HTTPBasicAuthHandler(passman) digest_auth_handler = HTTPDigestAuthHandler(passman) opener = build_opener(digest_auth_handler, basic_auth_handler) install_opener(opener) def _download_calendar(self): if ( self._calendar_data is None or self._last_download is None or (datetime.now() - self._last_download) > MIN_TIME_BETWEEN_DOWNLOADS ): self._last_download = datetime.now() self._calendar_data = None try: with urlopen(self.url) as conn: self._calendar_data = conn.read().decode().replace("\0", "") except HTTPError as http_error: _LOGGER.error(f"{self.name}: Failed to open url: {http_error.reason}") except ContentTooShortError as content_too_short_error: _LOGGER.error( f"{self.name}: Could not download calendar data: {content_too_short_error.reason}" ) except URLError as url_error: _LOGGER.error(f"{self.name}: Failed to open url: {url_error.reason}") except: _LOGGER.error(f"{self.name}: Failed to open url!") return async def async_get_events(self, hass, start_date, end_date): event_list = [] await hass.async_add_job(self._download_calendar) try: events = self.parser.get_event_list( content=self._calendar_data, start=start_date, end=end_date, include_all_day=self.include_all_day, ) event_list = list(map(self.format_dates, events)) except: _LOGGER.error(f"{self.name}: Failed to parse ICS!") event_list = [] return event_list @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): self._download_calendar() try: self.event = self.parser.get_current_event( content=self._calendar_data, include_all_day=self.include_all_day ) self.event["start"] = self.get_hass_date( self.event["start"], self.event["all_day"] ) self.event["end"] = self.get_hass_date( self.event["end"], self.event["all_day"] ) return True except: _LOGGER.error(f"{self.name}: Failed to parse ICS!") return False def format_dates(self, event): event["start"] = self.get_date_formatted(event["start"], event["all_day"]) event["end"] = self.get_date_formatted(event["end"], event["all_day"]) return event def get_date_formatted(self, dt, is_all_day): if is_all_day: return dt.strftime("%Y-%m-%d") return dt.strftime("%Y-%m-%dT%H:%M:%S.%f%z") def get_hass_date(self, dt, is_all_day): if is_all_day: return {"date": self.parser.get_date_formatted(dt, is_all_day)} return {"dateTime": self.parser.get_date_formatted(dt, is_all_day)}
true
true
f70024e5f14d8c48a9b1684bda03d5b19a8c5e49
16,642
py
Python
model_zoo/official/cv/ssd/src/dataset.py
taroxd/mindspore
9bb620ff2caaac7f1c53c4b104935f22352cb88f
[ "Apache-2.0" ]
null
null
null
model_zoo/official/cv/ssd/src/dataset.py
taroxd/mindspore
9bb620ff2caaac7f1c53c4b104935f22352cb88f
[ "Apache-2.0" ]
null
null
null
model_zoo/official/cv/ssd/src/dataset.py
taroxd/mindspore
9bb620ff2caaac7f1c53c4b104935f22352cb88f
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """SSD dataset""" from __future__ import division import os import json import xml.etree.ElementTree as et import numpy as np import cv2 import mindspore.dataset as de import mindspore.dataset.vision.c_transforms as C from mindspore.mindrecord import FileWriter from .config import config from .box_utils import jaccard_numpy, ssd_bboxes_encode def _rand(a=0., b=1.): """Generate random.""" return np.random.rand() * (b - a) + a def get_imageId_from_fileName(filename, id_iter): """Get imageID from fileName if fileName is int, else return id_iter.""" filename = os.path.splitext(filename)[0] if filename.isdigit(): return int(filename) return id_iter def random_sample_crop(image, boxes): """Random Crop the image and boxes""" height, width, _ = image.shape min_iou = np.random.choice([None, 0.1, 0.3, 0.5, 0.7, 0.9]) if min_iou is None: return image, boxes # max trails (50) for _ in range(50): image_t = image w = _rand(0.3, 1.0) * width h = _rand(0.3, 1.0) * height # aspect ratio constraint b/t .5 & 2 if h / w < 0.5 or h / w > 2: continue left = _rand() * (width - w) top = _rand() * (height - h) rect = np.array([int(top), int(left), int(top + h), int(left + w)]) overlap = jaccard_numpy(boxes, rect) # dropout some boxes drop_mask = overlap > 0 if not drop_mask.any(): continue if overlap[drop_mask].min() < min_iou and overlap[drop_mask].max() > (min_iou + 0.2): continue image_t = image_t[rect[0]:rect[2], rect[1]:rect[3], :] centers = (boxes[:, :2] + boxes[:, 2:4]) / 2.0 m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1]) m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1]) # mask in that both m1 and m2 are true mask = m1 * m2 * drop_mask # have any valid boxes? try again if not if not mask.any(): continue # take only matching gt boxes boxes_t = boxes[mask, :].copy() boxes_t[:, :2] = np.maximum(boxes_t[:, :2], rect[:2]) boxes_t[:, :2] -= rect[:2] boxes_t[:, 2:4] = np.minimum(boxes_t[:, 2:4], rect[2:4]) boxes_t[:, 2:4] -= rect[:2] return image_t, boxes_t return image, boxes def preprocess_fn(img_id, image, box, is_training): """Preprocess function for dataset.""" cv2.setNumThreads(2) def _infer_data(image, input_shape): img_h, img_w, _ = image.shape input_h, input_w = input_shape image = cv2.resize(image, (input_w, input_h)) # When the channels of image is 1 if len(image.shape) == 2: image = np.expand_dims(image, axis=-1) image = np.concatenate([image, image, image], axis=-1) return img_id, image, np.array((img_h, img_w), np.float32) def _data_aug(image, box, is_training, image_size=(300, 300)): """Data augmentation function.""" ih, iw, _ = image.shape w, h = image_size if not is_training: return _infer_data(image, image_size) # Random crop box = box.astype(np.float32) image, box = random_sample_crop(image, box) ih, iw, _ = image.shape # Resize image image = cv2.resize(image, (w, h)) # Flip image or not flip = _rand() < .5 if flip: image = cv2.flip(image, 1, dst=None) # When the channels of image is 1 if len(image.shape) == 2: image = np.expand_dims(image, axis=-1) image = np.concatenate([image, image, image], axis=-1) box[:, [0, 2]] = box[:, [0, 2]] / ih box[:, [1, 3]] = box[:, [1, 3]] / iw if flip: box[:, [1, 3]] = 1 - box[:, [3, 1]] box, label, num_match = ssd_bboxes_encode(box) return image, box, label, num_match return _data_aug(image, box, is_training, image_size=config.img_shape) def create_voc_label(is_training): """Get image path and annotation from VOC.""" voc_root = config.voc_root cls_map = {name: i for i, name in enumerate(config.classes)} sub_dir = 'train' if is_training else 'eval' voc_dir = os.path.join(voc_root, sub_dir) if not os.path.isdir(voc_dir): raise ValueError(f'Cannot find {sub_dir} dataset path.') image_dir = anno_dir = voc_dir if os.path.isdir(os.path.join(voc_dir, 'Images')): image_dir = os.path.join(voc_dir, 'Images') if os.path.isdir(os.path.join(voc_dir, 'Annotations')): anno_dir = os.path.join(voc_dir, 'Annotations') if not is_training: json_file = os.path.join(config.voc_root, config.voc_json) file_dir = os.path.split(json_file)[0] if not os.path.isdir(file_dir): os.makedirs(file_dir) json_dict = {"images": [], "type": "instances", "annotations": [], "categories": []} bnd_id = 1 image_files_dict = {} image_anno_dict = {} images = [] id_iter = 0 for anno_file in os.listdir(anno_dir): print(anno_file) if not anno_file.endswith('xml'): continue tree = et.parse(os.path.join(anno_dir, anno_file)) root_node = tree.getroot() file_name = root_node.find('filename').text img_id = get_imageId_from_fileName(file_name, id_iter) id_iter += 1 image_path = os.path.join(image_dir, file_name) print(image_path) if not os.path.isfile(image_path): print(f'Cannot find image {file_name} according to annotations.') continue labels = [] for obj in root_node.iter('object'): cls_name = obj.find('name').text if cls_name not in cls_map: print(f'Label "{cls_name}" not in "{config.classes}"') continue bnd_box = obj.find('bndbox') x_min = int(bnd_box.find('xmin').text) - 1 y_min = int(bnd_box.find('ymin').text) - 1 x_max = int(bnd_box.find('xmax').text) - 1 y_max = int(bnd_box.find('ymax').text) - 1 labels.append([y_min, x_min, y_max, x_max, cls_map[cls_name]]) if not is_training: o_width = abs(x_max - x_min) o_height = abs(y_max - y_min) ann = {'area': o_width * o_height, 'iscrowd': 0, 'image_id': \ img_id, 'bbox': [x_min, y_min, o_width, o_height], \ 'category_id': cls_map[cls_name], 'id': bnd_id, \ 'ignore': 0, \ 'segmentation': []} json_dict['annotations'].append(ann) bnd_id = bnd_id + 1 if labels: images.append(img_id) image_files_dict[img_id] = image_path image_anno_dict[img_id] = np.array(labels) if not is_training: size = root_node.find("size") width = int(size.find('width').text) height = int(size.find('height').text) image = {'file_name': file_name, 'height': height, 'width': width, 'id': img_id} json_dict['images'].append(image) if not is_training: for cls_name, cid in cls_map.items(): cat = {'supercategory': 'none', 'id': cid, 'name': cls_name} json_dict['categories'].append(cat) json_fp = open(json_file, 'w') json_str = json.dumps(json_dict) json_fp.write(json_str) json_fp.close() return images, image_files_dict, image_anno_dict def create_coco_label(is_training): """Get image path and annotation from COCO.""" from pycocotools.coco import COCO coco_root = config.coco_root data_type = config.val_data_type if is_training: data_type = config.train_data_type # Classes need to train or test. train_cls = config.classes train_cls_dict = {} for i, cls in enumerate(train_cls): train_cls_dict[cls] = i anno_json = os.path.join(coco_root, config.instances_set.format(data_type)) coco = COCO(anno_json) classs_dict = {} cat_ids = coco.loadCats(coco.getCatIds()) for cat in cat_ids: classs_dict[cat["id"]] = cat["name"] image_ids = coco.getImgIds() images = [] image_path_dict = {} image_anno_dict = {} for img_id in image_ids: image_info = coco.loadImgs(img_id) file_name = image_info[0]["file_name"] anno_ids = coco.getAnnIds(imgIds=img_id, iscrowd=None) anno = coco.loadAnns(anno_ids) image_path = os.path.join(coco_root, data_type, file_name) annos = [] iscrowd = False for label in anno: bbox = label["bbox"] class_name = classs_dict[label["category_id"]] iscrowd = iscrowd or label["iscrowd"] if class_name in train_cls: x_min, x_max = bbox[0], bbox[0] + bbox[2] y_min, y_max = bbox[1], bbox[1] + bbox[3] annos.append(list(map(round, [y_min, x_min, y_max, x_max])) + [train_cls_dict[class_name]]) if not is_training and iscrowd: continue if len(annos) >= 1: images.append(img_id) image_path_dict[img_id] = image_path image_anno_dict[img_id] = np.array(annos) return images, image_path_dict, image_anno_dict def anno_parser(annos_str): """Parse annotation from string to list.""" annos = [] for anno_str in annos_str: anno = list(map(int, anno_str.strip().split(','))) annos.append(anno) return annos def filter_valid_data(image_dir, anno_path): """Filter valid image file, which both in image_dir and anno_path.""" images = [] image_path_dict = {} image_anno_dict = {} if not os.path.isdir(image_dir): raise RuntimeError("Path given is not valid.") if not os.path.isfile(anno_path): raise RuntimeError("Annotation file is not valid.") with open(anno_path, "rb") as f: lines = f.readlines() for img_id, line in enumerate(lines): line_str = line.decode("utf-8").strip() line_split = str(line_str).split(' ') file_name = line_split[0] image_path = os.path.join(image_dir, file_name) if os.path.isfile(image_path): images.append(img_id) image_path_dict[img_id] = image_path image_anno_dict[img_id] = anno_parser(line_split[1:]) return images, image_path_dict, image_anno_dict def voc_data_to_mindrecord(mindrecord_dir, is_training, prefix="ssd.mindrecord", file_num=8): """Create MindRecord file by image_dir and anno_path.""" mindrecord_path = os.path.join(mindrecord_dir, prefix) writer = FileWriter(mindrecord_path, file_num) images, image_path_dict, image_anno_dict = create_voc_label(is_training) ssd_json = { "img_id": {"type": "int32", "shape": [1]}, "image": {"type": "bytes"}, "annotation": {"type": "int32", "shape": [-1, 5]}, } writer.add_schema(ssd_json, "ssd_json") for img_id in images: image_path = image_path_dict[img_id] with open(image_path, 'rb') as f: img = f.read() annos = np.array(image_anno_dict[img_id], dtype=np.int32) img_id = np.array([img_id], dtype=np.int32) row = {"img_id": img_id, "image": img, "annotation": annos} writer.write_raw_data([row]) writer.commit() def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="ssd.mindrecord", file_num=8): """Create MindRecord file.""" mindrecord_dir = config.mindrecord_dir mindrecord_path = os.path.join(mindrecord_dir, prefix) writer = FileWriter(mindrecord_path, file_num) if dataset == "coco": images, image_path_dict, image_anno_dict = create_coco_label(is_training) else: images, image_path_dict, image_anno_dict = filter_valid_data(config.image_dir, config.anno_path) ssd_json = { "img_id": {"type": "int32", "shape": [1]}, "image": {"type": "bytes"}, "annotation": {"type": "int32", "shape": [-1, 5]}, } writer.add_schema(ssd_json, "ssd_json") for img_id in images: image_path = image_path_dict[img_id] with open(image_path, 'rb') as f: img = f.read() annos = np.array(image_anno_dict[img_id], dtype=np.int32) img_id = np.array([img_id], dtype=np.int32) row = {"img_id": img_id, "image": img, "annotation": annos} writer.write_raw_data([row]) writer.commit() def create_ssd_dataset(mindrecord_file, batch_size=32, repeat_num=10, device_num=1, rank=0, is_training=True, num_parallel_workers=4, use_multiprocessing=True): """Create SSD dataset with MindDataset.""" ds = de.MindDataset(mindrecord_file, columns_list=["img_id", "image", "annotation"], num_shards=device_num, shard_id=rank, num_parallel_workers=num_parallel_workers, shuffle=is_training) decode = C.Decode() ds = ds.map(operations=decode, input_columns=["image"]) change_swap_op = C.HWC2CHW() normalize_op = C.Normalize(mean=[0.485 * 255, 0.456 * 255, 0.406 * 255], std=[0.229 * 255, 0.224 * 255, 0.225 * 255]) color_adjust_op = C.RandomColorAdjust(brightness=0.4, contrast=0.4, saturation=0.4) compose_map_func = (lambda img_id, image, annotation: preprocess_fn(img_id, image, annotation, is_training)) if is_training: output_columns = ["image", "box", "label", "num_match"] trans = [color_adjust_op, normalize_op, change_swap_op] else: output_columns = ["img_id", "image", "image_shape"] trans = [normalize_op, change_swap_op] ds = ds.map(operations=compose_map_func, input_columns=["img_id", "image", "annotation"], output_columns=output_columns, column_order=output_columns, python_multiprocessing=use_multiprocessing, num_parallel_workers=num_parallel_workers) ds = ds.map(operations=trans, input_columns=["image"], python_multiprocessing=use_multiprocessing, num_parallel_workers=num_parallel_workers) ds = ds.batch(batch_size, drop_remainder=True) ds = ds.repeat(repeat_num) return ds def create_mindrecord(dataset="coco", prefix="ssd.mindrecord", is_training=True): print("Start create dataset!") # It will generate mindrecord file in config.mindrecord_dir, # and the file name is ssd.mindrecord0, 1, ... file_num. mindrecord_dir = config.mindrecord_dir mindrecord_file = os.path.join(mindrecord_dir, prefix + "0") if not os.path.exists(mindrecord_file): if not os.path.isdir(mindrecord_dir): os.makedirs(mindrecord_dir) if dataset == "coco": if os.path.isdir(config.coco_root): print("Create Mindrecord.") data_to_mindrecord_byte_image("coco", is_training, prefix) print("Create Mindrecord Done, at {}".format(mindrecord_dir)) else: print("coco_root not exits.") elif dataset == "voc": if os.path.isdir(config.voc_root): print("Create Mindrecord.") voc_data_to_mindrecord(mindrecord_dir, is_training, prefix) print("Create Mindrecord Done, at {}".format(mindrecord_dir)) else: print("voc_root not exits.") else: if os.path.isdir(config.image_dir) and os.path.exists(config.anno_path): print("Create Mindrecord.") data_to_mindrecord_byte_image("other", is_training, prefix) print("Create Mindrecord Done, at {}".format(mindrecord_dir)) else: print("image_dir or anno_path not exits.") return mindrecord_file
36.575824
112
0.606057
from __future__ import division import os import json import xml.etree.ElementTree as et import numpy as np import cv2 import mindspore.dataset as de import mindspore.dataset.vision.c_transforms as C from mindspore.mindrecord import FileWriter from .config import config from .box_utils import jaccard_numpy, ssd_bboxes_encode def _rand(a=0., b=1.): return np.random.rand() * (b - a) + a def get_imageId_from_fileName(filename, id_iter): filename = os.path.splitext(filename)[0] if filename.isdigit(): return int(filename) return id_iter def random_sample_crop(image, boxes): height, width, _ = image.shape min_iou = np.random.choice([None, 0.1, 0.3, 0.5, 0.7, 0.9]) if min_iou is None: return image, boxes for _ in range(50): image_t = image w = _rand(0.3, 1.0) * width h = _rand(0.3, 1.0) * height if h / w < 0.5 or h / w > 2: continue left = _rand() * (width - w) top = _rand() * (height - h) rect = np.array([int(top), int(left), int(top + h), int(left + w)]) overlap = jaccard_numpy(boxes, rect) drop_mask = overlap > 0 if not drop_mask.any(): continue if overlap[drop_mask].min() < min_iou and overlap[drop_mask].max() > (min_iou + 0.2): continue image_t = image_t[rect[0]:rect[2], rect[1]:rect[3], :] centers = (boxes[:, :2] + boxes[:, 2:4]) / 2.0 m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1]) m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1]) mask = m1 * m2 * drop_mask if not mask.any(): continue boxes_t = boxes[mask, :].copy() boxes_t[:, :2] = np.maximum(boxes_t[:, :2], rect[:2]) boxes_t[:, :2] -= rect[:2] boxes_t[:, 2:4] = np.minimum(boxes_t[:, 2:4], rect[2:4]) boxes_t[:, 2:4] -= rect[:2] return image_t, boxes_t return image, boxes def preprocess_fn(img_id, image, box, is_training): cv2.setNumThreads(2) def _infer_data(image, input_shape): img_h, img_w, _ = image.shape input_h, input_w = input_shape image = cv2.resize(image, (input_w, input_h)) if len(image.shape) == 2: image = np.expand_dims(image, axis=-1) image = np.concatenate([image, image, image], axis=-1) return img_id, image, np.array((img_h, img_w), np.float32) def _data_aug(image, box, is_training, image_size=(300, 300)): ih, iw, _ = image.shape w, h = image_size if not is_training: return _infer_data(image, image_size) box = box.astype(np.float32) image, box = random_sample_crop(image, box) ih, iw, _ = image.shape image = cv2.resize(image, (w, h)) flip = _rand() < .5 if flip: image = cv2.flip(image, 1, dst=None) if len(image.shape) == 2: image = np.expand_dims(image, axis=-1) image = np.concatenate([image, image, image], axis=-1) box[:, [0, 2]] = box[:, [0, 2]] / ih box[:, [1, 3]] = box[:, [1, 3]] / iw if flip: box[:, [1, 3]] = 1 - box[:, [3, 1]] box, label, num_match = ssd_bboxes_encode(box) return image, box, label, num_match return _data_aug(image, box, is_training, image_size=config.img_shape) def create_voc_label(is_training): voc_root = config.voc_root cls_map = {name: i for i, name in enumerate(config.classes)} sub_dir = 'train' if is_training else 'eval' voc_dir = os.path.join(voc_root, sub_dir) if not os.path.isdir(voc_dir): raise ValueError(f'Cannot find {sub_dir} dataset path.') image_dir = anno_dir = voc_dir if os.path.isdir(os.path.join(voc_dir, 'Images')): image_dir = os.path.join(voc_dir, 'Images') if os.path.isdir(os.path.join(voc_dir, 'Annotations')): anno_dir = os.path.join(voc_dir, 'Annotations') if not is_training: json_file = os.path.join(config.voc_root, config.voc_json) file_dir = os.path.split(json_file)[0] if not os.path.isdir(file_dir): os.makedirs(file_dir) json_dict = {"images": [], "type": "instances", "annotations": [], "categories": []} bnd_id = 1 image_files_dict = {} image_anno_dict = {} images = [] id_iter = 0 for anno_file in os.listdir(anno_dir): print(anno_file) if not anno_file.endswith('xml'): continue tree = et.parse(os.path.join(anno_dir, anno_file)) root_node = tree.getroot() file_name = root_node.find('filename').text img_id = get_imageId_from_fileName(file_name, id_iter) id_iter += 1 image_path = os.path.join(image_dir, file_name) print(image_path) if not os.path.isfile(image_path): print(f'Cannot find image {file_name} according to annotations.') continue labels = [] for obj in root_node.iter('object'): cls_name = obj.find('name').text if cls_name not in cls_map: print(f'Label "{cls_name}" not in "{config.classes}"') continue bnd_box = obj.find('bndbox') x_min = int(bnd_box.find('xmin').text) - 1 y_min = int(bnd_box.find('ymin').text) - 1 x_max = int(bnd_box.find('xmax').text) - 1 y_max = int(bnd_box.find('ymax').text) - 1 labels.append([y_min, x_min, y_max, x_max, cls_map[cls_name]]) if not is_training: o_width = abs(x_max - x_min) o_height = abs(y_max - y_min) ann = {'area': o_width * o_height, 'iscrowd': 0, 'image_id': \ img_id, 'bbox': [x_min, y_min, o_width, o_height], \ 'category_id': cls_map[cls_name], 'id': bnd_id, \ 'ignore': 0, \ 'segmentation': []} json_dict['annotations'].append(ann) bnd_id = bnd_id + 1 if labels: images.append(img_id) image_files_dict[img_id] = image_path image_anno_dict[img_id] = np.array(labels) if not is_training: size = root_node.find("size") width = int(size.find('width').text) height = int(size.find('height').text) image = {'file_name': file_name, 'height': height, 'width': width, 'id': img_id} json_dict['images'].append(image) if not is_training: for cls_name, cid in cls_map.items(): cat = {'supercategory': 'none', 'id': cid, 'name': cls_name} json_dict['categories'].append(cat) json_fp = open(json_file, 'w') json_str = json.dumps(json_dict) json_fp.write(json_str) json_fp.close() return images, image_files_dict, image_anno_dict def create_coco_label(is_training): from pycocotools.coco import COCO coco_root = config.coco_root data_type = config.val_data_type if is_training: data_type = config.train_data_type train_cls = config.classes train_cls_dict = {} for i, cls in enumerate(train_cls): train_cls_dict[cls] = i anno_json = os.path.join(coco_root, config.instances_set.format(data_type)) coco = COCO(anno_json) classs_dict = {} cat_ids = coco.loadCats(coco.getCatIds()) for cat in cat_ids: classs_dict[cat["id"]] = cat["name"] image_ids = coco.getImgIds() images = [] image_path_dict = {} image_anno_dict = {} for img_id in image_ids: image_info = coco.loadImgs(img_id) file_name = image_info[0]["file_name"] anno_ids = coco.getAnnIds(imgIds=img_id, iscrowd=None) anno = coco.loadAnns(anno_ids) image_path = os.path.join(coco_root, data_type, file_name) annos = [] iscrowd = False for label in anno: bbox = label["bbox"] class_name = classs_dict[label["category_id"]] iscrowd = iscrowd or label["iscrowd"] if class_name in train_cls: x_min, x_max = bbox[0], bbox[0] + bbox[2] y_min, y_max = bbox[1], bbox[1] + bbox[3] annos.append(list(map(round, [y_min, x_min, y_max, x_max])) + [train_cls_dict[class_name]]) if not is_training and iscrowd: continue if len(annos) >= 1: images.append(img_id) image_path_dict[img_id] = image_path image_anno_dict[img_id] = np.array(annos) return images, image_path_dict, image_anno_dict def anno_parser(annos_str): annos = [] for anno_str in annos_str: anno = list(map(int, anno_str.strip().split(','))) annos.append(anno) return annos def filter_valid_data(image_dir, anno_path): images = [] image_path_dict = {} image_anno_dict = {} if not os.path.isdir(image_dir): raise RuntimeError("Path given is not valid.") if not os.path.isfile(anno_path): raise RuntimeError("Annotation file is not valid.") with open(anno_path, "rb") as f: lines = f.readlines() for img_id, line in enumerate(lines): line_str = line.decode("utf-8").strip() line_split = str(line_str).split(' ') file_name = line_split[0] image_path = os.path.join(image_dir, file_name) if os.path.isfile(image_path): images.append(img_id) image_path_dict[img_id] = image_path image_anno_dict[img_id] = anno_parser(line_split[1:]) return images, image_path_dict, image_anno_dict def voc_data_to_mindrecord(mindrecord_dir, is_training, prefix="ssd.mindrecord", file_num=8): mindrecord_path = os.path.join(mindrecord_dir, prefix) writer = FileWriter(mindrecord_path, file_num) images, image_path_dict, image_anno_dict = create_voc_label(is_training) ssd_json = { "img_id": {"type": "int32", "shape": [1]}, "image": {"type": "bytes"}, "annotation": {"type": "int32", "shape": [-1, 5]}, } writer.add_schema(ssd_json, "ssd_json") for img_id in images: image_path = image_path_dict[img_id] with open(image_path, 'rb') as f: img = f.read() annos = np.array(image_anno_dict[img_id], dtype=np.int32) img_id = np.array([img_id], dtype=np.int32) row = {"img_id": img_id, "image": img, "annotation": annos} writer.write_raw_data([row]) writer.commit() def data_to_mindrecord_byte_image(dataset="coco", is_training=True, prefix="ssd.mindrecord", file_num=8): mindrecord_dir = config.mindrecord_dir mindrecord_path = os.path.join(mindrecord_dir, prefix) writer = FileWriter(mindrecord_path, file_num) if dataset == "coco": images, image_path_dict, image_anno_dict = create_coco_label(is_training) else: images, image_path_dict, image_anno_dict = filter_valid_data(config.image_dir, config.anno_path) ssd_json = { "img_id": {"type": "int32", "shape": [1]}, "image": {"type": "bytes"}, "annotation": {"type": "int32", "shape": [-1, 5]}, } writer.add_schema(ssd_json, "ssd_json") for img_id in images: image_path = image_path_dict[img_id] with open(image_path, 'rb') as f: img = f.read() annos = np.array(image_anno_dict[img_id], dtype=np.int32) img_id = np.array([img_id], dtype=np.int32) row = {"img_id": img_id, "image": img, "annotation": annos} writer.write_raw_data([row]) writer.commit() def create_ssd_dataset(mindrecord_file, batch_size=32, repeat_num=10, device_num=1, rank=0, is_training=True, num_parallel_workers=4, use_multiprocessing=True): ds = de.MindDataset(mindrecord_file, columns_list=["img_id", "image", "annotation"], num_shards=device_num, shard_id=rank, num_parallel_workers=num_parallel_workers, shuffle=is_training) decode = C.Decode() ds = ds.map(operations=decode, input_columns=["image"]) change_swap_op = C.HWC2CHW() normalize_op = C.Normalize(mean=[0.485 * 255, 0.456 * 255, 0.406 * 255], std=[0.229 * 255, 0.224 * 255, 0.225 * 255]) color_adjust_op = C.RandomColorAdjust(brightness=0.4, contrast=0.4, saturation=0.4) compose_map_func = (lambda img_id, image, annotation: preprocess_fn(img_id, image, annotation, is_training)) if is_training: output_columns = ["image", "box", "label", "num_match"] trans = [color_adjust_op, normalize_op, change_swap_op] else: output_columns = ["img_id", "image", "image_shape"] trans = [normalize_op, change_swap_op] ds = ds.map(operations=compose_map_func, input_columns=["img_id", "image", "annotation"], output_columns=output_columns, column_order=output_columns, python_multiprocessing=use_multiprocessing, num_parallel_workers=num_parallel_workers) ds = ds.map(operations=trans, input_columns=["image"], python_multiprocessing=use_multiprocessing, num_parallel_workers=num_parallel_workers) ds = ds.batch(batch_size, drop_remainder=True) ds = ds.repeat(repeat_num) return ds def create_mindrecord(dataset="coco", prefix="ssd.mindrecord", is_training=True): print("Start create dataset!") mindrecord_dir = config.mindrecord_dir mindrecord_file = os.path.join(mindrecord_dir, prefix + "0") if not os.path.exists(mindrecord_file): if not os.path.isdir(mindrecord_dir): os.makedirs(mindrecord_dir) if dataset == "coco": if os.path.isdir(config.coco_root): print("Create Mindrecord.") data_to_mindrecord_byte_image("coco", is_training, prefix) print("Create Mindrecord Done, at {}".format(mindrecord_dir)) else: print("coco_root not exits.") elif dataset == "voc": if os.path.isdir(config.voc_root): print("Create Mindrecord.") voc_data_to_mindrecord(mindrecord_dir, is_training, prefix) print("Create Mindrecord Done, at {}".format(mindrecord_dir)) else: print("voc_root not exits.") else: if os.path.isdir(config.image_dir) and os.path.exists(config.anno_path): print("Create Mindrecord.") data_to_mindrecord_byte_image("other", is_training, prefix) print("Create Mindrecord Done, at {}".format(mindrecord_dir)) else: print("image_dir or anno_path not exits.") return mindrecord_file
true
true
f700254d4ce5eeb6f200af51a86f0d35375b2d2f
224,141
py
Python
python/ccxt/binance.py
Cff01/ccxt
be2352a8b166c77a79ae40311975426f0ef4f984
[ "MIT" ]
1
2021-12-10T16:04:56.000Z
2021-12-10T16:04:56.000Z
python/ccxt/binance.py
Cff01/ccxt
be2352a8b166c77a79ae40311975426f0ef4f984
[ "MIT" ]
null
null
null
python/ccxt/binance.py
Cff01/ccxt
be2352a8b166c77a79ae40311975426f0ef4f984
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxt.base.exchange import Exchange import json from ccxt.base.errors import ExchangeError from ccxt.base.errors import AuthenticationError from ccxt.base.errors import PermissionDenied from ccxt.base.errors import AccountSuspended from ccxt.base.errors import ArgumentsRequired from ccxt.base.errors import BadRequest from ccxt.base.errors import BadSymbol from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import InvalidOrder from ccxt.base.errors import OrderNotFound from ccxt.base.errors import OrderImmediatelyFillable from ccxt.base.errors import NotSupported from ccxt.base.errors import DDoSProtection from ccxt.base.errors import RateLimitExceeded from ccxt.base.errors import ExchangeNotAvailable from ccxt.base.errors import OnMaintenance from ccxt.base.errors import InvalidNonce from ccxt.base.decimal_to_precision import TRUNCATE from ccxt.base.precise import Precise class binance(Exchange): def describe(self): return self.deep_extend(super(binance, self).describe(), { 'id': 'binance', 'name': 'Binance', 'countries': ['JP', 'MT'], # Japan, Malta 'rateLimit': 50, 'certified': True, 'pro': True, # new metainfo interface 'has': { 'cancelAllOrders': True, 'cancelOrder': True, 'CORS': None, 'createOrder': True, 'fetchBalance': True, 'fetchBorrowRate': True, 'fetchBorrowRates': False, 'fetchBidsAsks': True, 'fetchClosedOrders': 'emulated', 'fetchCurrencies': True, 'fetchDepositAddress': True, 'fetchDeposits': True, 'fetchFundingFees': True, 'fetchFundingHistory': True, 'fetchFundingRate': True, 'fetchFundingRateHistory': True, 'fetchFundingRates': True, 'fetchIndexOHLCV': True, 'fetchIsolatedPositions': True, 'fetchMarkets': True, 'fetchMarkOHLCV': True, 'fetchMyTrades': True, 'fetchOHLCV': True, 'fetchOpenOrders': True, 'fetchOrder': True, 'fetchOrderBook': True, 'fetchOrders': True, 'fetchPositions': True, 'fetchPremiumIndexOHLCV': False, 'fetchStatus': True, 'fetchTicker': True, 'fetchTickers': True, 'fetchTime': True, 'fetchTrades': True, 'fetchTradingFee': True, 'fetchTradingFees': True, 'fetchTransactions': False, 'fetchTransfers': True, 'fetchWithdrawals': True, 'setLeverage': True, 'setMarginMode': True, 'setPositionMode': True, 'addMargin': True, 'reduceMargin': True, 'transfer': True, 'withdraw': True, }, 'timeframes': { '1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg', 'test': { 'dapiPublic': 'https://testnet.binancefuture.com/dapi/v1', 'dapiPrivate': 'https://testnet.binancefuture.com/dapi/v1', 'fapiPublic': 'https://testnet.binancefuture.com/fapi/v1', 'fapiPrivate': 'https://testnet.binancefuture.com/fapi/v1', 'fapiPrivateV2': 'https://testnet.binancefuture.com/fapi/v2', 'public': 'https://testnet.binance.vision/api/v3', 'private': 'https://testnet.binance.vision/api/v3', 'v1': 'https://testnet.binance.vision/api/v1', }, 'api': { 'wapi': 'https://api.binance.com/wapi/v3', 'sapi': 'https://api.binance.com/sapi/v1', 'dapiPublic': 'https://dapi.binance.com/dapi/v1', 'dapiPrivate': 'https://dapi.binance.com/dapi/v1', 'dapiPrivateV2': 'https://dapi.binance.com/dapi/v2', 'dapiData': 'https://dapi.binance.com/futures/data', 'fapiPublic': 'https://fapi.binance.com/fapi/v1', 'fapiPrivate': 'https://fapi.binance.com/fapi/v1', 'fapiData': 'https://fapi.binance.com/futures/data', 'fapiPrivateV2': 'https://fapi.binance.com/fapi/v2', 'public': 'https://api.binance.com/api/v3', 'private': 'https://api.binance.com/api/v3', 'v1': 'https://api.binance.com/api/v1', }, 'www': 'https://www.binance.com', # 'referral': { # 'url': 'https://www.binance.com/en/register?ref=BLEJC98C', # 'discount': 0.2, # }, 'doc': [ 'https://binance-docs.github.io/apidocs/spot/en', ], 'api_management': 'https://www.binance.com/en/usercenter/settings/api-management', 'fees': 'https://www.binance.com/en/fee/schedule', }, 'depth': 1, 'api': { # the API structure below will need 3-layer apidefs 'sapi': { 'get': { 'accountSnapshot': 1, 'system/status': 1, # these endpoints require self.apiKey 'margin/asset': 1, 'margin/pair': 1, 'margin/allAssets': 1, 'margin/allPairs': 1, 'margin/priceIndex': 1, # these endpoints require self.apiKey + self.secret 'asset/assetDividend': 1, 'asset/dribblet': 1, 'asset/transfer': 1, 'asset/assetDetail': 1, 'asset/tradeFee': 1, 'asset/get-funding-asset': 1, 'margin/loan': 1, 'margin/repay': 1, 'margin/account': 1, 'margin/transfer': 1, 'margin/interestHistory': 1, 'margin/forceLiquidationRec': 1, 'margin/order': 1, 'margin/openOrders': 1, 'margin/allOrders': 1, 'margin/myTrades': 1, 'margin/maxBorrowable': 5, 'margin/maxTransferable': 5, 'margin/isolated/transfer': 1, 'margin/isolated/account': 1, 'margin/isolated/pair': 1, 'margin/isolated/allPairs': 1, 'margin/isolated/accountLimit': 1, 'margin/interestRateHistory': 1, 'margin/orderList': 2, 'margin/allOrderList': 10, 'margin/openOrderList': 3, 'loan/income': 1, 'fiat/orders': 1, 'fiat/payments': 1, 'futures/transfer': 5, 'futures/loan/borrow/history': 1, 'futures/loan/repay/history': 1, 'futures/loan/wallet': 1, 'futures/loan/configs': 1, 'futures/loan/calcAdjustLevel': 1, 'futures/loan/calcMaxAdjustAmount': 1, 'futures/loan/adjustCollateral/history': 1, 'futures/loan/liquidationHistory': 1, # https://binance-docs.github.io/apidocs/spot/en/#withdraw-sapi 'capital/config/getall': 1, # get networks for withdrawing USDT ERC20 vs USDT Omni 'capital/deposit/address': 1, 'capital/deposit/hisrec': 1, 'capital/deposit/subAddress': 1, 'capital/deposit/subHisrec': 1, 'capital/withdraw/history': 1, 'account/status': 1, 'account/apiTradingStatus': 1, 'account/apiRestrictions/ipRestriction': 1, 'bnbBurn': 1, 'sub-account/assets': 1, 'sub-account/futures/account': 1, 'sub-account/futures/accountSummary': 1, 'sub-account/futures/positionRisk': 1, 'sub-account/futures/internalTransfer': 1, 'sub-account/list': 1, 'sub-account/margin/account': 1, 'sub-account/margin/accountSummary': 1, 'sub-account/spotSummary': 5, 'sub-account/status': 1, 'sub-account/sub/transfer/history': 1, 'sub-account/transfer/subUserHistory': 1, 'sub-account/universalTransfer': 1, # lending endpoints 'lending/daily/product/list': 1, 'lending/daily/userLeftQuota': 1, 'lending/daily/userRedemptionQuota': 1, 'lending/daily/token/position': 1, 'lending/union/account': 1, 'lending/union/purchaseRecord': 1, 'lending/union/redemptionRecord': 1, 'lending/union/interestHistory': 1, 'lending/project/list': 1, 'lending/project/position/list': 1, # mining endpoints 'mining/pub/algoList': 1, 'mining/pub/coinList': 1, 'mining/worker/detail': 5, 'mining/worker/list': 5, 'mining/payment/list': 5, 'mining/statistics/user/status': 5, 'mining/statistics/user/list': 5, # liquid swap endpoints 'bswap/pools': 1, 'bswap/liquidity': {'cost': 1, 'noPoolId': 10}, 'bswap/liquidityOps': 2, 'bswap/quote': 2, 'bswap/swap': 1, 'bswap/poolConfigure': 1, 'bswap/addLiquidityPreview': 1, 'bswap/removeLiquidityPreview': 1, # leveraged token endpoints 'blvt/tokenInfo': 1, 'blvt/subscribe/record': 1, 'blvt/redeem/record': 1, 'blvt/userLimit': 1, # broker api 'apiReferral/ifNewUser': 1, 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, 'apiReferral/rebate/recentRecord': 1, 'apiReferral/rebate/historicalRecord': 1, 'apiReferral/kickback/recentRecord': 1, 'apiReferral/kickback/historicalRecord': 1, # brokerage API 'broker/subAccountApi': 1, 'broker/subAccount': 1, 'broker/subAccountApi/commission/futures': 1, 'broker/subAccountApi/commission/coinFutures': 1, 'broker/info': 1, 'broker/transfer': 1, 'broker/transfer/futures': 1, 'broker/rebate/recentRecord': 1, 'broker/rebate/historicalRecord': 1, 'broker/subAccount/bnbBurn/status': 1, 'broker/subAccount/depositHist': 1, 'broker/subAccount/spotSummary': 1, 'broker/subAccount/marginSummary': 1, 'broker/subAccount/futuresSummary': 1, 'broker/rebate/futures/recentRecord': 1, 'broker/subAccountApi/ipRestriction': 1, 'broker/universalTransfer': 1, # v2 not supported yet # GET /sapi/v2/broker/subAccount/futuresSummary 'account/apiRestrictions': 1, # subaccounts 'managed-subaccount/asset': 1, # c2c / p2p 'c2c/orderMatch/listUserOrderHistory': 1, }, 'post': { 'asset/dust': 1, 'asset/transfer': 1, 'asset/get-funding-asset': 1, 'account/disableFastWithdrawSwitch': 1, 'account/enableFastWithdrawSwitch': 1, 'account/apiRestrictions/ipRestriction': 1, 'account/apiRestrictions/ipRestriction/ipList': 1, 'capital/withdraw/apply': 1, 'margin/transfer': 1, 'margin/loan': 1, 'margin/repay': 1, 'margin/order': 4, 'margin/order/oco': 1, 'margin/isolated/create': 1, 'margin/isolated/transfer': 1, 'margin/isolated/account': 1, 'bnbBurn': 1, 'sub-account/margin/transfer': 1, 'sub-account/margin/enable': 1, # 'sub-account/margin/enable': 1, 'sub-account/futures/enable': 1, 'sub-account/futures/transfer': 1, 'sub-account/futures/internalTransfer': 1, 'sub-account/transfer/subToSub': 1, 'sub-account/transfer/subToMaster': 1, 'sub-account/universalTransfer': 1, 'managed-subaccount/deposit': 1, 'managed-subaccount/withdraw': 1, 'userDataStream': 1, 'userDataStream/isolated': 1, 'futures/transfer': 1, 'futures/loan/borrow': 20, 'futures/loan/repay': 20, 'futures/loan/adjustCollateral': 20, # lending 'lending/customizedFixed/purchase': 1, 'lending/daily/purchase': 1, 'lending/daily/redeem': 1, # liquid swap endpoints 'bswap/liquidityAdd': 2, 'bswap/liquidityRemove': 2, 'bswap/swap': 2, # leveraged token endpoints 'blvt/subscribe': 1, 'blvt/redeem': 1, # brokerage API 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, 'apiReferral/rebate/historicalRecord': 1, 'apiReferral/kickback/historicalRecord': 1, 'broker/subAccount': 1, 'broker/subAccount/margin': 1, 'broker/subAccount/futures': 1, 'broker/subAccountApi': 1, 'broker/subAccountApi/permission': 1, 'broker/subAccountApi/commission': 1, 'broker/subAccountApi/commission/futures': 1, 'broker/subAccountApi/commission/coinFutures': 1, 'broker/transfer': 1, 'broker/transfer/futures': 1, 'broker/rebate/historicalRecord': 1, 'broker/subAccount/bnbBurn/spot': 1, 'broker/subAccount/bnbBurn/marginInterest': 1, 'broker/subAccount/blvt': 1, 'broker/subAccountApi/ipRestriction': 1, 'broker/subAccountApi/ipRestriction/ipList': 1, 'broker/universalTransfer': 1, 'broker/subAccountApi/permission/universalTransfer': 1, 'broker/subAccountApi/permission/vanillaOptions': 1, }, 'put': { 'userDataStream': 1, 'userDataStream/isolated': 1, }, 'delete': { 'account/apiRestrictions/ipRestriction/ipList': 1, 'margin/openOrders': 1, 'margin/order': 1, 'margin/orderList': 1, 'margin/isolated/account': 1, 'userDataStream': 1, 'userDataStream/isolated': 1, # brokerage API 'broker/subAccountApi': 1, 'broker/subAccountApi/ipRestriction/ipList': 1, }, }, # deprecated 'wapi': { 'post': { 'withdraw': 1, 'sub-account/transfer': 1, }, 'get': { 'depositHistory': 1, 'withdrawHistory': 1, 'depositAddress': 1, 'accountStatus': 1, 'systemStatus': 1, 'apiTradingStatus': 1, 'userAssetDribbletLog': 1, 'tradeFee': 1, 'assetDetail': 1, 'sub-account/list': 1, 'sub-account/transfer/history': 1, 'sub-account/assets': 1, }, }, 'dapiPublic': { 'get': { 'ping': 1, 'time': 1, 'exchangeInfo': 1, 'depth': {'cost': 2, 'byLimit': [[50, 2], [100, 5], [500, 10], [1000, 20]]}, 'trades': 1, 'historicalTrades': 20, 'aggTrades': 20, 'premiumIndex': 10, 'fundingRate': 1, 'klines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'continuousKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'indexPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'markPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'ticker/24hr': {'cost': 1, 'noSymbol': 40}, 'ticker/price': {'cost': 1, 'noSymbol': 2}, 'ticker/bookTicker': {'cost': 1, 'noSymbol': 2}, 'openInterest': 1, }, }, 'dapiData': { 'get': { 'openInterestHist': 1, 'topLongShortAccountRatio': 1, 'topLongShortPositionRatio': 1, 'globalLongShortAccountRatio': 1, 'takerBuySellVol': 1, 'basis': 1, }, }, 'dapiPrivate': { 'get': { 'positionSide/dual': 30, 'order': 1, 'openOrder': 1, 'openOrders': {'cost': 1, 'noSymbol': 5}, 'allOrders': {'cost': 20, 'noSymbol': 40}, 'balance': 1, 'account': 5, 'positionMargin/history': 1, 'positionRisk': 1, 'userTrades': {'cost': 20, 'noSymbol': 40}, 'income': 20, 'leverageBracket': 1, 'forceOrders': {'cost': 20, 'noSymbol': 50}, 'adlQuantile': 5, }, 'post': { 'positionSide/dual': 1, 'order': 4, 'batchOrders': 5, 'countdownCancelAll': 10, 'leverage': 1, 'marginType': 1, 'positionMargin': 1, 'listenKey': 1, }, 'put': { 'listenKey': 1, }, 'delete': { 'order': 1, 'allOpenOrders': 1, 'batchOrders': 5, 'listenKey': 1, }, }, 'dapiPrivateV2': { 'get': { 'leverageBracket': 1, }, }, 'fapiPublic': { 'get': { 'ping': 1, 'time': 1, 'exchangeInfo': 1, 'depth': {'cost': 2, 'byLimit': [[50, 2], [100, 5], [500, 10], [1000, 20]]}, 'trades': 1, 'historicalTrades': 20, 'aggTrades': 20, 'klines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'continuousKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'markPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'indexPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'fundingRate': 1, 'premiumIndex': 1, 'ticker/24hr': {'cost': 1, 'noSymbol': 40}, 'ticker/price': {'cost': 1, 'noSymbol': 2}, 'ticker/bookTicker': {'cost': 1, 'noSymbol': 2}, 'openInterest': 1, 'indexInfo': 1, 'apiTradingStatus': {'cost': 1, 'noSymbol': 10}, 'lvtKlines': 1, }, }, 'fapiData': { 'get': { 'openInterestHist': 1, 'topLongShortAccountRatio': 1, 'topLongShortPositionRatio': 1, 'globalLongShortAccountRatio': 1, 'takerlongshortRatio': 1, }, }, 'fapiPrivate': { 'get': { 'forceOrders': {'cost': 20, 'noSymbol': 50}, 'allOrders': 5, 'openOrder': 1, 'openOrders': 1, 'order': 1, 'account': 5, 'balance': 5, 'leverageBracket': 1, 'positionMargin/history': 1, 'positionRisk': 5, 'positionSide/dual': 30, 'userTrades': 5, 'income': 30, 'commissionRate': 20, 'apiTradingStatus': 1, 'multiAssetsMargin': 30, # broker endpoints 'apiReferral/ifNewUser': 1, 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, 'apiReferral/traderNum': 1, 'apiReferral/overview': 1, 'apiReferral/tradeVol': 1, 'apiReferral/rebateVol': 1, 'apiReferral/traderSummary': 1, 'adlQuantile': 5, }, 'post': { 'batchOrders': 5, 'positionSide/dual': 1, 'positionMargin': 1, 'marginType': 1, 'order': 4, 'leverage': 1, 'listenKey': 1, 'countdownCancelAll': 10, 'multiAssetsMargin': 1, # broker endpoints 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, }, 'put': { 'listenKey': 1, }, 'delete': { 'batchOrders': 1, 'order': 1, 'allOpenOrders': 1, 'listenKey': 1, }, }, 'fapiPrivateV2': { 'get': { 'account': 1, 'balance': 1, 'positionRisk': 1, }, }, 'public': { 'get': { 'ping': 1, 'time': 1, 'depth': {'cost': 1, 'byLimit': [[100, 1], [500, 5], [1000, 10], [5000, 50]]}, 'trades': 1, 'aggTrades': 1, 'historicalTrades': 5, 'klines': 1, 'ticker/24hr': {'cost': 1, 'noSymbol': 40}, 'ticker/price': {'cost': 1, 'noSymbol': 2}, 'ticker/bookTicker': {'cost': 1, 'noSymbol': 2}, 'exchangeInfo': 10, }, 'put': { 'userDataStream': 1, }, 'post': { 'userDataStream': 1, }, 'delete': { 'userDataStream': 1, }, }, 'private': { 'get': { 'allOrderList': 10, # oco 'openOrderList': 3, # oco 'orderList': 2, # oco 'order': 2, 'openOrders': {'cost': 3, 'noSymbol': 40}, 'allOrders': 10, 'account': 10, 'myTrades': 10, 'rateLimit/order': 20, }, 'post': { 'order/oco': 1, 'order': 4, 'order/test': 1, }, 'delete': { 'openOrders': 1, # added on 2020-04-25 for canceling all open orders per symbol 'orderList': 1, # oco 'order': 1, }, }, }, 'fees': { 'trading': { 'feeSide': 'get', 'tierBased': False, 'percentage': True, 'taker': self.parse_number('0.001'), 'maker': self.parse_number('0.001'), }, 'future': { 'trading': { 'feeSide': 'quote', 'tierBased': True, 'percentage': True, 'taker': self.parse_number('0.000400'), 'maker': self.parse_number('0.000200'), 'tiers': { 'taker': [ [self.parse_number('0'), self.parse_number('0.000400')], [self.parse_number('250'), self.parse_number('0.000400')], [self.parse_number('2500'), self.parse_number('0.000350')], [self.parse_number('7500'), self.parse_number('0.000320')], [self.parse_number('22500'), self.parse_number('0.000300')], [self.parse_number('50000'), self.parse_number('0.000270')], [self.parse_number('100000'), self.parse_number('0.000250')], [self.parse_number('200000'), self.parse_number('0.000220')], [self.parse_number('400000'), self.parse_number('0.000200')], [self.parse_number('750000'), self.parse_number('0.000170')], ], 'maker': [ [self.parse_number('0'), self.parse_number('0.000200')], [self.parse_number('250'), self.parse_number('0.000160')], [self.parse_number('2500'), self.parse_number('0.000140')], [self.parse_number('7500'), self.parse_number('0.000120')], [self.parse_number('22500'), self.parse_number('0.000100')], [self.parse_number('50000'), self.parse_number('0.000080')], [self.parse_number('100000'), self.parse_number('0.000060')], [self.parse_number('200000'), self.parse_number('0.000040')], [self.parse_number('400000'), self.parse_number('0.000020')], [self.parse_number('750000'), self.parse_number('0')], ], }, }, }, 'delivery': { 'trading': { 'feeSide': 'base', 'tierBased': True, 'percentage': True, 'taker': self.parse_number('0.000500'), 'maker': self.parse_number('0.000100'), 'tiers': { 'taker': [ [self.parse_number('0'), self.parse_number('0.000500')], [self.parse_number('250'), self.parse_number('0.000450')], [self.parse_number('2500'), self.parse_number('0.000400')], [self.parse_number('7500'), self.parse_number('0.000300')], [self.parse_number('22500'), self.parse_number('0.000250')], [self.parse_number('50000'), self.parse_number('0.000240')], [self.parse_number('100000'), self.parse_number('0.000240')], [self.parse_number('200000'), self.parse_number('0.000240')], [self.parse_number('400000'), self.parse_number('0.000240')], [self.parse_number('750000'), self.parse_number('0.000240')], ], 'maker': [ [self.parse_number('0'), self.parse_number('0.000100')], [self.parse_number('250'), self.parse_number('0.000080')], [self.parse_number('2500'), self.parse_number('0.000050')], [self.parse_number('7500'), self.parse_number('0.0000030')], [self.parse_number('22500'), self.parse_number('0')], [self.parse_number('50000'), self.parse_number('-0.000050')], [self.parse_number('100000'), self.parse_number('-0.000060')], [self.parse_number('200000'), self.parse_number('-0.000070')], [self.parse_number('400000'), self.parse_number('-0.000080')], [self.parse_number('750000'), self.parse_number('-0.000090')], ], }, }, }, }, 'commonCurrencies': { 'BCC': 'BCC', # kept for backward-compatibility https://github.com/ccxt/ccxt/issues/4848 'YOYO': 'YOYOW', }, # exchange-specific options 'options': { 'fetchCurrencies': True, # self is a private call and it requires API keys # 'fetchTradesMethod': 'publicGetAggTrades', # publicGetTrades, publicGetHistoricalTrades 'defaultTimeInForce': 'GTC', # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel 'defaultType': 'spot', # 'spot', 'future', 'margin', 'delivery' 'hasAlreadyAuthenticatedSuccessfully': False, 'warnOnFetchOpenOrdersWithoutSymbol': True, 'fetchPositions': 'positionRisk', # or 'account' 'recvWindow': 5 * 1000, # 5 sec, binance default 'timeDifference': 0, # the difference between system clock and Binance clock 'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation 'newOrderRespType': { 'market': 'FULL', # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills 'limit': 'FULL', # we change it from 'ACK' by default to 'FULL'(returns immediately if limit is not hit) }, 'quoteOrderQty': True, # whether market orders support amounts in quote currency 'broker': { 'spot': 'x-R4BD3S82', 'margin': 'x-R4BD3S82', 'future': 'x-xcKtGhcu', 'delivery': 'x-xcKtGhcu', }, 'accountsByType': { 'main': 'MAIN', 'spot': 'MAIN', 'funding': 'FUNDING', 'margin': 'MARGIN', 'future': 'UMFUTURE', 'delivery': 'CMFUTURE', 'mining': 'MINING', }, 'typesByAccount': { 'MAIN': 'spot', 'FUNDING': 'funding', 'MARGIN': 'margin', 'UMFUTURE': 'future', 'CMFUTURE': 'delivery', 'MINING': 'mining', }, 'networks': { 'ERC20': 'ETH', 'TRC20': 'TRX', 'BEP2': 'BNB', 'BEP20': 'BSC', 'OMNI': 'OMNI', 'EOS': 'EOS', 'SPL': 'SOL', }, 'reverseNetworks': { 'tronscan.org': 'TRC20', 'etherscan.io': 'ERC20', 'bscscan.com': 'BSC', 'explorer.binance.org': 'BEP2', 'bithomp.com': 'XRP', 'bloks.io': 'EOS', 'stellar.expert': 'XLM', 'blockchair.com/bitcoin': 'BTC', 'blockchair.com/bitcoin-cash': 'BCH', 'blockchair.com/ecash': 'XEC', 'explorer.litecoin.net': 'LTC', 'explorer.avax.network': 'AVAX', 'solscan.io': 'SOL', 'polkadot.subscan.io': 'DOT', 'dashboard.internetcomputer.org': 'ICP', 'explorer.chiliz.com': 'CHZ', 'cardanoscan.io': 'ADA', 'mainnet.theoan.com': 'AION', 'algoexplorer.io': 'ALGO', 'explorer.ambrosus.com': 'AMB', 'viewblock.io/zilliqa': 'ZIL', 'viewblock.io/arweave': 'AR', 'explorer.ark.io': 'ARK', 'atomscan.com': 'ATOM', 'www.mintscan.io': 'CTK', 'explorer.bitcoindiamond.org': 'BCD', 'btgexplorer.com': 'BTG', 'bts.ai': 'BTS', 'explorer.celo.org': 'CELO', 'explorer.nervos.org': 'CKB', 'cerebro.cortexlabs.ai': 'CTXC', 'chainz.cryptoid.info': 'VIA', 'explorer.dcrdata.org': 'DCR', 'digiexplorer.info': 'DGB', 'dock.subscan.io': 'DOCK', 'dogechain.info': 'DOGE', 'explorer.elrond.com': 'EGLD', 'blockscout.com': 'ETC', 'explore-fetchhub.fetch.ai': 'FET', 'filfox.info': 'FIL', 'fio.bloks.io': 'FIO', 'explorer.firo.org': 'FIRO', 'neoscan.io': 'NEO', 'ftmscan.com': 'FTM', 'explorer.gochain.io': 'GO', 'block.gxb.io': 'GXS', 'hash-hash.info': 'HBAR', 'www.hiveblockexplorer.com': 'HIVE', 'explorer.helium.com': 'HNT', 'tracker.icon.foundation': 'ICX', 'www.iostabc.com': 'IOST', 'explorer.iota.org': 'IOTA', 'iotexscan.io': 'IOTX', 'irishub.iobscan.io': 'IRIS', 'kava.mintscan.io': 'KAVA', 'scope.klaytn.com': 'KLAY', 'kmdexplorer.io': 'KMD', 'kusama.subscan.io': 'KSM', 'explorer.lto.network': 'LTO', 'polygonscan.com': 'POLYGON', 'explorer.ont.io': 'ONT', 'minaexplorer.com': 'MINA', 'nanolooker.com': 'NANO', 'explorer.nebulas.io': 'NAS', 'explorer.nbs.plus': 'NBS', 'explorer.nebl.io': 'NEBL', 'nulscan.io': 'NULS', 'nxscan.com': 'NXS', 'explorer.harmony.one': 'ONE', 'explorer.poa.network': 'POA', 'qtum.info': 'QTUM', 'explorer.rsk.co': 'RSK', 'www.oasisscan.com': 'ROSE', 'ravencoin.network': 'RVN', 'sc.tokenview.com': 'SC', 'secretnodes.com': 'SCRT', 'explorer.skycoin.com': 'SKY', 'steemscan.com': 'STEEM', 'explorer.stacks.co': 'STX', 'www.thetascan.io': 'THETA', 'scan.tomochain.com': 'TOMO', 'explore.vechain.org': 'VET', 'explorer.vite.net': 'VITE', 'www.wanscan.org': 'WAN', 'wavesexplorer.com': 'WAVES', 'wax.eosx.io': 'WAXP', 'waltonchain.pro': 'WTC', 'chain.nem.ninja': 'XEM', 'verge-blockchain.info': 'XVG', 'explorer.yoyow.org': 'YOYOW', 'explorer.zcha.in': 'ZEC', 'explorer.zensystem.io': 'ZEN', }, 'impliedNetworks': { 'ETH': {'ERC20': 'ETH'}, 'TRX': {'TRC20': 'TRX'}, }, 'legalMoney': { 'MXN': True, 'UGX': True, 'SEK': True, 'CHF': True, 'VND': True, 'AED': True, 'DKK': True, 'KZT': True, 'HUF': True, 'PEN': True, 'PHP': True, 'USD': True, 'TRY': True, 'EUR': True, 'NGN': True, 'PLN': True, 'BRL': True, 'ZAR': True, 'KES': True, 'ARS': True, 'RUB': True, 'AUD': True, 'NOK': True, 'CZK': True, 'GBP': True, 'UAH': True, 'GHS': True, 'HKD': True, 'CAD': True, 'INR': True, 'JPY': True, 'NZD': True, }, }, # https://binance-docs.github.io/apidocs/spot/en/#error-codes-2 'exceptions': { 'exact': { 'System is under maintenance.': OnMaintenance, # {"code":1,"msg":"System is under maintenance."} 'System abnormality': ExchangeError, # {"code":-1000,"msg":"System abnormality"} 'You are not authorized to execute self request.': PermissionDenied, # {"msg":"You are not authorized to execute self request."} 'API key does not exist': AuthenticationError, 'Order would trigger immediately.': OrderImmediatelyFillable, 'Stop price would trigger immediately.': OrderImmediatelyFillable, # {"code":-2010,"msg":"Stop price would trigger immediately."} 'Order would immediately match and take.': OrderImmediatelyFillable, # {"code":-2010,"msg":"Order would immediately match and take."} 'Account has insufficient balance for requested action.': InsufficientFunds, 'Rest API trading is not enabled.': ExchangeNotAvailable, "You don't have permission.": PermissionDenied, # {"msg":"You don't have permission.","success":false} 'Market is closed.': ExchangeNotAvailable, # {"code":-1013,"msg":"Market is closed."} 'Too many requests. Please try again later.': DDoSProtection, # {"msg":"Too many requests. Please try again later.","success":false} '-1000': ExchangeNotAvailable, # {"code":-1000,"msg":"An unknown error occured while processing the request."} '-1001': ExchangeNotAvailable, # 'Internal error; unable to process your request. Please try again.' '-1002': AuthenticationError, # 'You are not authorized to execute self request.' '-1003': RateLimitExceeded, # {"code":-1003,"msg":"Too much request weight used, current limit is 1200 request weight per 1 MINUTE. Please use the websocket for live updates to avoid polling the API."} '-1013': InvalidOrder, # createOrder -> 'invalid quantity'/'invalid price'/MIN_NOTIONAL '-1015': RateLimitExceeded, # 'Too many new orders; current limit is %s orders per %s.' '-1016': ExchangeNotAvailable, # 'This service is no longer available.', '-1020': BadRequest, # 'This operation is not supported.' '-1021': InvalidNonce, # 'your time is ahead of server' '-1022': AuthenticationError, # {"code":-1022,"msg":"Signature for self request is not valid."} '-1100': BadRequest, # createOrder(symbol, 1, asdf) -> 'Illegal characters found in parameter 'price' '-1101': BadRequest, # Too many parameters; expected %s and received %s. '-1102': BadRequest, # Param %s or %s must be sent, but both were empty '-1103': BadRequest, # An unknown parameter was sent. '-1104': BadRequest, # Not all sent parameters were read, read 8 parameters but was sent 9 '-1105': BadRequest, # Parameter %s was empty. '-1106': BadRequest, # Parameter %s sent when not required. '-1111': BadRequest, # Precision is over the maximum defined for self asset. '-1112': InvalidOrder, # No orders on book for symbol. '-1114': BadRequest, # TimeInForce parameter sent when not required. '-1115': BadRequest, # Invalid timeInForce. '-1116': BadRequest, # Invalid orderType. '-1117': BadRequest, # Invalid side. '-1118': BadRequest, # New client order ID was empty. '-1119': BadRequest, # Original client order ID was empty. '-1120': BadRequest, # Invalid interval. '-1121': BadSymbol, # Invalid symbol. '-1125': AuthenticationError, # This listenKey does not exist. '-1127': BadRequest, # More than %s hours between startTime and endTime. '-1128': BadRequest, # {"code":-1128,"msg":"Combination of optional parameters invalid."} '-1130': BadRequest, # Data sent for paramter %s is not valid. '-1131': BadRequest, # recvWindow must be less than 60000 '-2008': AuthenticationError, # {"code":-2008,"msg":"Invalid Api-Key ID."} '-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc... '-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER' '-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist' '-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."} '-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action." '-2019': InsufficientFunds, # {"code":-2019,"msg":"Margin is insufficient."} '-3005': InsufficientFunds, # {"code":-3005,"msg":"Transferring out not allowed. Transfer out amount exceeds max amount."} '-3006': InsufficientFunds, # {"code":-3006,"msg":"Your borrow amount has exceed maximum borrow amount."} '-3008': InsufficientFunds, # {"code":-3008,"msg":"Borrow not allowed. Your borrow amount has exceed maximum borrow amount."} '-3010': ExchangeError, # {"code":-3010,"msg":"Repay not allowed. Repay amount exceeds borrow amount."} '-3015': ExchangeError, # {"code":-3015,"msg":"Repay amount exceeds borrow amount."} '-3022': AccountSuspended, # You account's trading is banned. '-4028': BadRequest, # {"code":-4028,"msg":"Leverage 100 is not valid"} '-3020': InsufficientFunds, # {"code":-3020,"msg":"Transfer out amount exceeds max amount."} '-3041': InsufficientFunds, # {"code":-3041,"msg":"Balance is not enough"} '-5013': InsufficientFunds, # Asset transfer failed: insufficient balance" '-11008': InsufficientFunds, # {"code":-11008,"msg":"Exceeding the account's maximum borrowable limit."} '-4051': InsufficientFunds, # {"code":-4051,"msg":"Isolated balance insufficient."} }, 'broad': { 'has no operation privilege': PermissionDenied, 'MAX_POSITION': InvalidOrder, # {"code":-2010,"msg":"Filter failure: MAX_POSITION"} }, }, }) def cost_to_precision(self, symbol, cost): return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['quote'], self.precisionMode, self.paddingMode) def currency_to_precision(self, currency, fee): # info is available in currencies only if the user has configured his api keys if self.safe_value(self.currencies[currency], 'precision') is not None: return self.decimal_to_precision(fee, TRUNCATE, self.currencies[currency]['precision'], self.precisionMode, self.paddingMode) else: return self.number_to_string(fee) def nonce(self): return self.milliseconds() - self.options['timeDifference'] def fetch_time(self, params={}): defaultType = self.safe_string_2(self.options, 'fetchTime', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = 'publicGetTime' if type == 'future': method = 'fapiPublicGetTime' elif type == 'delivery': method = 'dapiPublicGetTime' response = getattr(self, method)(query) return self.safe_integer(response, 'serverTime') def load_time_difference(self, params={}): serverTime = self.fetch_time(params) after = self.milliseconds() self.options['timeDifference'] = after - serverTime return self.options['timeDifference'] def fetch_currencies(self, params={}): fetchCurrenciesEnabled = self.safe_value(self.options, 'fetchCurrencies') if not fetchCurrenciesEnabled: return None # self endpoint requires authentication # while fetchCurrencies is a public API method by design # therefore we check the keys here # and fallback to generating the currencies from the markets if not self.check_required_credentials(False): return None # sandbox/testnet does not support sapi endpoints apiBackup = self.safe_string(self.urls, 'apiBackup') if apiBackup is not None: return None response = self.sapiGetCapitalConfigGetall(params) result = {} for i in range(0, len(response)): # # { # coin: 'LINK', # depositAllEnable: True, # withdrawAllEnable: True, # name: 'ChainLink', # free: '0.06168', # locked: '0', # freeze: '0', # withdrawing: '0', # ipoing: '0', # ipoable: '0', # storage: '0', # isLegalMoney: False, # trading: True, # networkList: [ # { # network: 'BNB', # coin: 'LINK', # withdrawIntegerMultiple: '0', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'Both a MEMO and an Address are required to successfully deposit your LINK BEP2 tokens to Binance.', # name: 'BEP2', # resetAddressStatus: False, # addressRegex: '^(bnb1)[0-9a-z]{38}$', # memoRegex: '^[0-9A-Za-z\\-_]{1,120}$', # withdrawFee: '0.002', # withdrawMin: '0.01', # withdrawMax: '9999999', # minConfirm: 1, # unLockConfirm: 0 # }, # { # network: 'BSC', # coin: 'LINK', # withdrawIntegerMultiple: '0.00000001', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: '', # name: 'BEP20(BSC)', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '0.005', # withdrawMin: '0.01', # withdrawMax: '9999999', # minConfirm: 15, # unLockConfirm: 0 # }, # { # network: 'ETH', # coin: 'LINK', # withdrawIntegerMultiple: '0.00000001', # isDefault: True, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # name: 'ERC20', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '0.34', # withdrawMin: '0.68', # withdrawMax: '0', # minConfirm: 12, # unLockConfirm: 0 # } # ] # } # entry = response[i] id = self.safe_string(entry, 'coin') name = self.safe_string(entry, 'name') code = self.safe_currency_code(id) precision = None isWithdrawEnabled = True isDepositEnabled = True networkList = self.safe_value(entry, 'networkList', []) fees = {} fee = None for j in range(0, len(networkList)): networkItem = networkList[j] network = self.safe_string(networkItem, 'network') # name = self.safe_string(networkItem, 'name') withdrawFee = self.safe_number(networkItem, 'withdrawFee') depositEnable = self.safe_value(networkItem, 'depositEnable') withdrawEnable = self.safe_value(networkItem, 'withdrawEnable') isDepositEnabled = isDepositEnabled or depositEnable isWithdrawEnabled = isWithdrawEnabled or withdrawEnable fees[network] = withdrawFee isDefault = self.safe_value(networkItem, 'isDefault') if isDefault or fee is None: fee = withdrawFee trading = self.safe_value(entry, 'trading') active = (isWithdrawEnabled and isDepositEnabled and trading) result[code] = { 'id': id, 'name': name, 'code': code, 'precision': precision, 'info': entry, 'active': active, 'networks': networkList, 'fee': fee, 'fees': fees, 'limits': self.limits, } return result def fetch_markets(self, params={}): defaultType = self.safe_string_2(self.options, 'fetchMarkets', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if (type != 'spot') and (type != 'future') and (type != 'margin') and (type != 'delivery'): raise ExchangeError(self.id + " does not support '" + type + "' type, set exchange.options['defaultType'] to 'spot', 'margin', 'delivery' or 'future'") # eslint-disable-line quotes method = 'publicGetExchangeInfo' if type == 'future': method = 'fapiPublicGetExchangeInfo' elif type == 'delivery': method = 'dapiPublicGetExchangeInfo' response = getattr(self, method)(query) # # spot / margin # # { # "timezone":"UTC", # "serverTime":1575416692969, # "rateLimits":[ # {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":1200}, # {"rateLimitType":"ORDERS","interval":"SECOND","intervalNum":10,"limit":100}, # {"rateLimitType":"ORDERS","interval":"DAY","intervalNum":1,"limit":200000} # ], # "exchangeFilters":[], # "symbols":[ # { # "symbol":"ETHBTC", # "status":"TRADING", # "baseAsset":"ETH", # "baseAssetPrecision":8, # "quoteAsset":"BTC", # "quotePrecision":8, # "baseCommissionPrecision":8, # "quoteCommissionPrecision":8, # "orderTypes":["LIMIT","LIMIT_MAKER","MARKET","STOP_LOSS_LIMIT","TAKE_PROFIT_LIMIT"], # "icebergAllowed":true, # "ocoAllowed":true, # "quoteOrderQtyMarketAllowed":true, # "isSpotTradingAllowed":true, # "isMarginTradingAllowed":true, # "filters":[ # {"filterType":"PRICE_FILTER","minPrice":"0.00000100","maxPrice":"100000.00000000","tickSize":"0.00000100"}, # {"filterType":"PERCENT_PRICE","multiplierUp":"5","multiplierDown":"0.2","avgPriceMins":5}, # {"filterType":"LOT_SIZE","minQty":"0.00100000","maxQty":"100000.00000000","stepSize":"0.00100000"}, # {"filterType":"MIN_NOTIONAL","minNotional":"0.00010000","applyToMarket":true,"avgPriceMins":5}, # {"filterType":"ICEBERG_PARTS","limit":10}, # {"filterType":"MARKET_LOT_SIZE","minQty":"0.00000000","maxQty":"63100.00000000","stepSize":"0.00000000"}, # {"filterType":"MAX_NUM_ALGO_ORDERS","maxNumAlgoOrders":5} # ] # }, # ], # } # # futures/usdt-margined(fapi) # # { # "timezone":"UTC", # "serverTime":1575417244353, # "rateLimits":[ # {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":1200}, # {"rateLimitType":"ORDERS","interval":"MINUTE","intervalNum":1,"limit":1200} # ], # "exchangeFilters":[], # "symbols":[ # { # "symbol":"BTCUSDT", # "status":"TRADING", # "maintMarginPercent":"2.5000", # "requiredMarginPercent":"5.0000", # "baseAsset":"BTC", # "quoteAsset":"USDT", # "pricePrecision":2, # "quantityPrecision":3, # "baseAssetPrecision":8, # "quotePrecision":8, # "filters":[ # {"minPrice":"0.01","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.01"}, # {"stepSize":"0.001","filterType":"LOT_SIZE","maxQty":"1000","minQty":"0.001"}, # {"stepSize":"0.001","filterType":"MARKET_LOT_SIZE","maxQty":"1000","minQty":"0.001"}, # {"limit":200,"filterType":"MAX_NUM_ORDERS"}, # {"multiplierDown":"0.8500","multiplierUp":"1.1500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"} # ], # "orderTypes":["LIMIT","MARKET","STOP"], # "timeInForce":["GTC","IOC","FOK","GTX"] # } # ] # } # # delivery/coin-margined(dapi) # # { # "timezone": "UTC", # "serverTime": 1597667052958, # "rateLimits": [ # {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":6000}, # {"rateLimitType":"ORDERS","interval":"MINUTE","intervalNum":1,"limit":6000} # ], # "exchangeFilters": [], # "symbols": [ # { # "symbol": "BTCUSD_200925", # "pair": "BTCUSD", # "contractType": "CURRENT_QUARTER", # "deliveryDate": 1601020800000, # "onboardDate": 1590739200000, # "contractStatus": "TRADING", # "contractSize": 100, # "marginAsset": "BTC", # "maintMarginPercent": "2.5000", # "requiredMarginPercent": "5.0000", # "baseAsset": "BTC", # "quoteAsset": "USD", # "pricePrecision": 1, # "quantityPrecision": 0, # "baseAssetPrecision": 8, # "quotePrecision": 8, # "equalQtyPrecision": 4, # "filters": [ # {"minPrice":"0.1","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.1"}, # {"stepSize":"1","filterType":"LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"stepSize":"0","filterType":"MARKET_LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"limit":200,"filterType":"MAX_NUM_ORDERS"}, # {"multiplierDown":"0.9500","multiplierUp":"1.0500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"} # ], # "orderTypes": ["LIMIT","MARKET","STOP","STOP_MARKET","TAKE_PROFIT","TAKE_PROFIT_MARKET","TRAILING_STOP_MARKET"], # "timeInForce": ["GTC","IOC","FOK","GTX"] # }, # { # "symbol": "BTCUSD_PERP", # "pair": "BTCUSD", # "contractType": "PERPETUAL", # "deliveryDate": 4133404800000, # "onboardDate": 1596006000000, # "contractStatus": "TRADING", # "contractSize": 100, # "marginAsset": "BTC", # "maintMarginPercent": "2.5000", # "requiredMarginPercent": "5.0000", # "baseAsset": "BTC", # "quoteAsset": "USD", # "pricePrecision": 1, # "quantityPrecision": 0, # "baseAssetPrecision": 8, # "quotePrecision": 8, # "equalQtyPrecision": 4, # "filters": [ # {"minPrice":"0.1","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.1"}, # {"stepSize":"1","filterType":"LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"stepSize":"1","filterType":"MARKET_LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"limit":200,"filterType":"MAX_NUM_ORDERS"}, # {"multiplierDown":"0.8500","multiplierUp":"1.1500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"} # ], # "orderTypes": ["LIMIT","MARKET","STOP","STOP_MARKET","TAKE_PROFIT","TAKE_PROFIT_MARKET","TRAILING_STOP_MARKET"], # "timeInForce": ["GTC","IOC","FOK","GTX"] # } # ] # } # if self.options['adjustForTimeDifference']: self.load_time_difference() markets = self.safe_value(response, 'symbols', []) result = [] for i in range(0, len(markets)): market = markets[i] spot = (type == 'spot') future = (type == 'future') delivery = (type == 'delivery') id = self.safe_string(market, 'symbol') lowercaseId = self.safe_string_lower(market, 'symbol') baseId = self.safe_string(market, 'baseAsset') quoteId = self.safe_string(market, 'quoteAsset') base = self.safe_currency_code(baseId) quote = self.safe_currency_code(quoteId) contractType = self.safe_string(market, 'contractType') idSymbol = (future or delivery) and (contractType != 'PERPETUAL') symbol = None expiry = None if idSymbol: symbol = id expiry = self.safe_integer(market, 'deliveryDate') else: symbol = base + '/' + quote filters = self.safe_value(market, 'filters', []) filtersByType = self.index_by(filters, 'filterType') precision = { 'base': self.safe_integer(market, 'baseAssetPrecision'), 'quote': self.safe_integer(market, 'quotePrecision'), 'amount': self.safe_integer(market, 'quantityPrecision'), 'price': self.safe_integer(market, 'pricePrecision'), } status = self.safe_string_2(market, 'status', 'contractStatus') active = (status == 'TRADING') margin = self.safe_value(market, 'isMarginTradingAllowed', False) contractSize = None fees = self.fees if future or delivery: contractSize = self.safe_string(market, 'contractSize', '1') fees = self.fees[type] maker = fees['trading']['maker'] taker = fees['trading']['taker'] settleId = self.safe_string(market, 'marginAsset') settle = self.safe_currency_code(settleId) entry = { 'id': id, 'lowercaseId': lowercaseId, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'info': market, 'spot': spot, 'type': type, 'margin': margin, 'future': future, 'delivery': delivery, 'linear': future, 'inverse': delivery, 'expiry': expiry, 'expiryDatetime': self.iso8601(expiry), 'settleId': settleId, 'settle': settle, 'active': active, 'precision': precision, 'contractSize': contractSize, 'maker': maker, 'taker': taker, 'limits': { 'amount': { 'min': None, 'max': None, }, 'price': { 'min': None, 'max': None, }, 'cost': { 'min': None, 'max': None, }, }, } if 'PRICE_FILTER' in filtersByType: filter = self.safe_value(filtersByType, 'PRICE_FILTER', {}) tickSize = self.safe_string(filter, 'tickSize') entry['precision']['price'] = self.precision_from_string(tickSize) # PRICE_FILTER reports zero values for maxPrice # since they updated filter types in November 2018 # https://github.com/ccxt/ccxt/issues/4286 # therefore limits['price']['max'] doesn't have any meaningful value except None entry['limits']['price'] = { 'min': self.safe_number(filter, 'minPrice'), 'max': self.safe_number(filter, 'maxPrice'), } entry['precision']['price'] = self.precision_from_string(filter['tickSize']) if 'LOT_SIZE' in filtersByType: filter = self.safe_value(filtersByType, 'LOT_SIZE', {}) stepSize = self.safe_string(filter, 'stepSize') entry['precision']['amount'] = self.precision_from_string(stepSize) entry['limits']['amount'] = { 'min': self.safe_number(filter, 'minQty'), 'max': self.safe_number(filter, 'maxQty'), } if 'MARKET_LOT_SIZE' in filtersByType: filter = self.safe_value(filtersByType, 'MARKET_LOT_SIZE', {}) entry['limits']['market'] = { 'min': self.safe_number(filter, 'minQty'), 'max': self.safe_number(filter, 'maxQty'), } if 'MIN_NOTIONAL' in filtersByType: filter = self.safe_value(filtersByType, 'MIN_NOTIONAL', {}) entry['limits']['cost']['min'] = self.safe_number_2(filter, 'minNotional', 'notional') result.append(entry) return result def fetch_balance(self, params={}): self.load_markets() defaultType = self.safe_string_2(self.options, 'fetchBalance', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) method = 'privateGetAccount' if type == 'future': options = self.safe_value(self.options, type, {}) fetchBalanceOptions = self.safe_value(options, 'fetchBalance', {}) method = self.safe_string(fetchBalanceOptions, 'method', 'fapiPrivateV2GetAccount') elif type == 'delivery': options = self.safe_value(self.options, type, {}) fetchBalanceOptions = self.safe_value(options, 'fetchBalance', {}) method = self.safe_string(fetchBalanceOptions, 'method', 'dapiPrivateGetAccount') elif type == 'margin': method = 'sapiGetMarginAccount' elif type == 'savings': method = 'sapiGetLendingUnionAccount' elif type == 'funding': method = 'sapiPostAssetGetFundingAsset' query = self.omit(params, 'type') response = getattr(self, method)(query) # # spot # # { # makerCommission: 10, # takerCommission: 10, # buyerCommission: 0, # sellerCommission: 0, # canTrade: True, # canWithdraw: True, # canDeposit: True, # updateTime: 1575357359602, # accountType: "MARGIN", # balances: [ # {asset: "BTC", free: "0.00219821", locked: "0.00000000" }, # ] # } # # margin # # { # "borrowEnabled":true, # "marginLevel":"999.00000000", # "totalAssetOfBtc":"0.00000000", # "totalLiabilityOfBtc":"0.00000000", # "totalNetAssetOfBtc":"0.00000000", # "tradeEnabled":true, # "transferEnabled":true, # "userAssets":[ # {"asset":"MATIC","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"}, # {"asset":"VET","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"}, # {"asset":"USDT","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"} # ], # } # # futures(fapi) # # fapiPrivateGetAccount # # { # "feeTier":0, # "canTrade":true, # "canDeposit":true, # "canWithdraw":true, # "updateTime":0, # "totalInitialMargin":"0.00000000", # "totalMaintMargin":"0.00000000", # "totalWalletBalance":"4.54000000", # "totalUnrealizedProfit":"0.00000000", # "totalMarginBalance":"4.54000000", # "totalPositionInitialMargin":"0.00000000", # "totalOpenOrderInitialMargin":"0.00000000", # "maxWithdrawAmount":"4.54000000", # "assets":[ # { # "asset":"USDT", # "walletBalance":"4.54000000", # "unrealizedProfit":"0.00000000", # "marginBalance":"4.54000000", # "maintMargin":"0.00000000", # "initialMargin":"0.00000000", # "positionInitialMargin":"0.00000000", # "openOrderInitialMargin":"0.00000000", # "maxWithdrawAmount":"4.54000000" # } # ], # "positions":[ # { # "symbol":"BTCUSDT", # "initialMargin":"0.00000", # "maintMargin":"0.00000", # "unrealizedProfit":"0.00000000", # "positionInitialMargin":"0.00000", # "openOrderInitialMargin":"0.00000" # } # ] # } # # fapiPrivateV2GetAccount # # { # "feeTier":0, # "canTrade":true, # "canDeposit":true, # "canWithdraw":true, # "updateTime":0, # "totalInitialMargin":"0.00000000", # "totalMaintMargin":"0.00000000", # "totalWalletBalance":"0.00000000", # "totalUnrealizedProfit":"0.00000000", # "totalMarginBalance":"0.00000000", # "totalPositionInitialMargin":"0.00000000", # "totalOpenOrderInitialMargin":"0.00000000", # "totalCrossWalletBalance":"0.00000000", # "totalCrossUnPnl":"0.00000000", # "availableBalance":"0.00000000", # "maxWithdrawAmount":"0.00000000", # "assets":[ # { # "asset":"BNB", # "walletBalance":"0.01000000", # "unrealizedProfit":"0.00000000", # "marginBalance":"0.01000000", # "maintMargin":"0.00000000", # "initialMargin":"0.00000000", # "positionInitialMargin":"0.00000000", # "openOrderInitialMargin":"0.00000000", # "maxWithdrawAmount":"0.01000000", # "crossWalletBalance":"0.01000000", # "crossUnPnl":"0.00000000", # "availableBalance":"0.01000000" # } # ], # "positions":[ # { # "symbol":"BTCUSDT", # "initialMargin":"0", # "maintMargin":"0", # "unrealizedProfit":"0.00000000", # "positionInitialMargin":"0", # "openOrderInitialMargin":"0", # "leverage":"20", # "isolated":false, # "entryPrice":"0.00000", # "maxNotional":"5000000", # "positionSide":"BOTH" # }, # ] # } # # fapiPrivateV2GetBalance # # [ # { # "accountAlias":"FzFzXquXXqoC", # "asset":"BNB", # "balance":"0.01000000", # "crossWalletBalance":"0.01000000", # "crossUnPnl":"0.00000000", # "availableBalance":"0.01000000", # "maxWithdrawAmount":"0.01000000" # } # ] # # savings # # { # "totalAmountInBTC": "0.3172", # "totalAmountInUSDT": "10000", # "totalFixedAmountInBTC": "0.3172", # "totalFixedAmountInUSDT": "10000", # "totalFlexibleInBTC": "0", # "totalFlexibleInUSDT": "0", # "positionAmountVos": [ # { # "asset": "USDT", # "amount": "10000", # "amountInBTC": "0.3172", # "amountInUSDT": "10000" # }, # { # "asset": "BUSD", # "amount": "0", # "amountInBTC": "0", # "amountInUSDT": "0" # } # ] # } # # binance pay # # [ # { # "asset": "BUSD", # "free": "1129.83", # "locked": "0", # "freeze": "0", # "withdrawing": "0" # } # ] # result = { 'info': response, } timestamp = None if (type == 'spot') or (type == 'margin'): timestamp = self.safe_integer(response, 'updateTime') balances = self.safe_value_2(response, 'balances', 'userAssets', []) for i in range(0, len(balances)): balance = balances[i] currencyId = self.safe_string(balance, 'asset') code = self.safe_currency_code(currencyId) account = self.account() account['free'] = self.safe_string(balance, 'free') account['used'] = self.safe_string(balance, 'locked') result[code] = account elif type == 'savings': positionAmountVos = self.safe_value(response, 'positionAmountVos') for i in range(0, len(positionAmountVos)): entry = positionAmountVos[i] currencyId = self.safe_string(entry, 'asset') code = self.safe_currency_code(currencyId) account = self.account() usedAndTotal = self.safe_string(entry, 'amount') account['total'] = usedAndTotal account['used'] = usedAndTotal result[code] = account elif type == 'funding': for i in range(0, len(response)): entry = response[i] account = self.account() currencyId = self.safe_string(entry, 'asset') code = self.safe_currency_code(currencyId) account['free'] = self.safe_string(entry, 'free') frozen = self.safe_string(entry, 'freeze') withdrawing = self.safe_string(entry, 'withdrawing') locked = self.safe_string(entry, 'locked') account['used'] = Precise.string_add(frozen, Precise.string_add(locked, withdrawing)) result[code] = account else: balances = response if not isinstance(response, list): balances = self.safe_value(response, 'assets', []) for i in range(0, len(balances)): balance = balances[i] currencyId = self.safe_string(balance, 'asset') code = self.safe_currency_code(currencyId) account = self.account() account['free'] = self.safe_string(balance, 'availableBalance') account['used'] = self.safe_string(balance, 'initialMargin') account['total'] = self.safe_string_2(balance, 'marginBalance', 'balance') result[code] = account result['timestamp'] = timestamp result['datetime'] = self.iso8601(timestamp) return self.parse_balance(result) def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit # default 100, max 5000, see https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#order-book method = 'publicGetDepth' if market['linear']: method = 'fapiPublicGetDepth' elif market['inverse']: method = 'dapiPublicGetDepth' response = getattr(self, method)(self.extend(request, params)) # # future # # { # "lastUpdateId":333598053905, # "E":1618631511986, # "T":1618631511964, # "bids":[ # ["2493.56","20.189"], # ["2493.54","1.000"], # ["2493.51","0.005"],["2493.37","0.280"],["2493.31","0.865"],["2493.30","0.514"],["2493.29","2.309"],["2493.25","1.500"],["2493.23","0.012"],["2493.22","7.240"],["2493.21","3.349"],["2493.20","2.030"],["2493.19","58.118"],["2493.18","174.836"],["2493.17","14.436"],["2493.12","2.000"],["2493.09","3.232"],["2493.08","2.010"],["2493.07","2.000"],["2493.06","2.000"],["2493.05","2.684"],["2493.04","2.000"],["2493.03","2.000"],["2493.02","5.000"],["2493.01","2.000"],["2493.00","1.035"],["2492.99","8.546"],["2492.98","4.012"],["2492.96","40.937"],["2492.95","40.595"],["2492.94","21.051"],["2492.92","4.012"],["2492.91","0.200"],["2492.85","2.000"],["2492.83","24.929"],["2492.81","50.000"],["2492.80","0.030"],["2492.76","0.264"],["2492.73","32.098"],["2492.71","32.664"],["2492.70","4.228"],["2492.65","1.230"],["2492.61","5.598"],["2492.60","34.786"],["2492.58","10.393"],["2492.54","4.543"],["2492.50","0.400"],["2492.49","0.600"],["2492.48","4.941"],["2492.45","1.207"],["2492.43","4.878"],["2492.40","4.762"],["2492.39","36.489"],["2492.37","3.000"],["2492.36","4.882"],["2492.33","28.117"],["2492.29","0.490"],["2492.28","76.365"],["2492.27","0.200"],["2492.23","3.804"],["2492.22","1.000"],["2492.19","20.011"],["2492.17","13.500"],["2492.16","4.058"],["2492.14","35.673"],["2492.13","1.915"],["2492.12","76.896"],["2492.10","8.050"],["2492.01","16.615"],["2492.00","10.335"],["2491.95","5.880"],["2491.93","10.000"],["2491.92","3.916"],["2491.90","0.795"],["2491.87","22.000"],["2491.85","1.260"],["2491.84","4.014"],["2491.83","6.668"],["2491.73","0.855"],["2491.72","7.572"],["2491.71","7.000"],["2491.68","3.916"],["2491.66","2.500"],["2491.64","4.945"],["2491.63","2.302"],["2491.62","4.012"],["2491.61","16.170"],["2491.60","0.793"],["2491.59","0.403"],["2491.57","17.445"],["2491.56","88.177"],["2491.53","10.000"],["2491.47","0.013"],["2491.45","0.157"],["2491.44","11.733"],["2491.39","3.593"],["2491.38","3.570"],["2491.36","28.077"],["2491.35","0.808"],["2491.30","0.065"],["2491.29","4.880"],["2491.27","22.000"],["2491.24","9.021"],["2491.23","68.393"],["2491.22","0.050"],["2491.21","1.316"],["2491.20","4.000"],["2491.19","0.108"],["2491.18","0.498"],["2491.17","5.000"],["2491.14","10.000"],["2491.13","0.383"],["2491.12","125.959"],["2491.10","0.870"],["2491.08","10.518"],["2491.05","54.743"],["2491.01","7.980"],["2490.96","3.916"],["2490.95","0.135"],["2490.91","0.140"],["2490.89","8.424"],["2490.88","5.930"],["2490.84","1.208"],["2490.83","2.005"],["2490.82","5.517"],["2490.81","73.707"],["2490.80","1.042"],["2490.79","9.626"],["2490.72","3.916"],["2490.70","0.148"],["2490.69","0.403"],["2490.68","0.012"],["2490.67","21.887"],["2490.66","0.008"],["2490.64","11.500"],["2490.61","0.005"],["2490.58","68.175"],["2490.55","0.218"],["2490.54","14.132"],["2490.53","5.157"],["2490.50","0.018"],["2490.49","9.216"],["2490.48","3.979"],["2490.47","1.884"],["2490.44","0.003"],["2490.36","14.132"],["2490.35","2.008"],["2490.34","0.200"],["2490.33","0.015"],["2490.30","0.065"],["2490.29","5.500"],["2490.28","24.203"],["2490.26","4.373"],["2490.25","0.026"],["2490.24","4.000"],["2490.23","177.628"],["2490.22","14.132"],["2490.21","0.181"],["2490.20","0.645"],["2490.19","9.024"],["2490.18","0.108"],["2490.17","0.085"],["2490.16","0.077"],["2490.14","0.275"],["2490.10","0.080"],["2490.07","0.015"],["2490.04","6.056"],["2490.00","6.796"],["2489.98","0.005"],["2489.97","0.258"],["2489.96","10.084"],["2489.95","1.202"],["2489.91","10.121"],["2489.90","10.084"],["2489.88","0.040"],["2489.87","0.004"],["2489.85","0.003"],["2489.76","3.916"],["2489.73","10.084"],["2489.71","0.272"],["2489.70","12.834"],["2489.67","0.403"],["2489.66","0.362"],["2489.64","0.738"],["2489.63","193.236"],["2489.62","14.152"],["2489.61","0.157"],["2489.59","4.011"],["2489.57","0.015"],["2489.55","0.046"],["2489.52","3.921"],["2489.51","0.005"],["2489.45","80.000"],["2489.44","0.649"],["2489.43","10.088"],["2489.39","0.009"],["2489.37","14.132"],["2489.35","72.262"],["2489.34","10.084"],["2489.33","14.136"],["2489.32","23.953"],["2489.30","0.065"],["2489.28","8.136"],["2489.24","8.022"],["2489.19","14.132"],["2489.18","0.085"],["2489.17","0.108"],["2489.14","10.084"],["2489.13","3.142"],["2489.12","77.827"],["2489.11","10.084"],["2489.10","0.080"],["2489.09","50.024"],["2489.04","3.916"],["2489.03","0.008"],["2489.01","10.084"],["2488.99","0.135"],["2488.98","0.187"],["2488.96","0.324"],["2488.92","0.064"],["2488.85","16.056"],["2488.83","14.132"],["2488.80","3.916"],["2488.79","10.084"],["2488.77","4.414"],["2488.76","0.005"],["2488.75","13.685"],["2488.73","0.020"],["2488.69","0.157"],["2488.60","80.000"],["2488.58","10.164"],["2488.57","0.004"],["2488.56","3.933"],["2488.54","3.311"],["2488.51","12.814"],["2488.50","80.099"],["2488.48","0.684"],["2488.44","0.024"],["2488.42","68.180"],["2488.39","4.412"],["2488.38","26.138"],["2488.34","44.134"],["2488.32","8.014"],["2488.30","0.065"],["2488.29","0.009"],["2488.27","4.513"],["2488.26","4.222"],["2488.25","80.000"],["2488.23","0.007"],["2488.22","0.281"],["2488.19","0.100"],["2488.18","80.100"],["2488.17","80.000"],["2488.16","8.197"],["2488.15","79.184"],["2488.13","0.025"],["2488.11","0.050"],["2488.10","0.080"],["2488.08","3.919"],["2488.04","40.103"],["2488.03","0.120"],["2488.02","0.008"],["2488.01","0.140"],["2488.00","0.406"],["2487.99","0.384"],["2487.98","0.060"],["2487.96","8.010"],["2487.94","0.246"],["2487.93","0.020"],["2487.91","0.136"],["2487.87","0.403"],["2487.84","17.910"],["2487.81","0.005"],["2487.80","0.073"],["2487.74","36.000"],["2487.73","3.225"],["2487.72","0.018"],["2487.71","0.319"],["2487.70","0.006"],["2487.66","0.003"],["2487.64","0.003"],["2487.63","0.008"],["2487.62","0.040"],["2487.60","3.916"],["2487.54","0.805"],["2487.52","0.022"],["2487.51","0.003"],["2487.50","0.051"],["2487.49","6.081"],["2487.47","80.015"],["2487.46","4.735"],["2487.45","30.000"],["2487.41","0.096"],["2487.40","0.078"],["2487.39","0.103"],["2487.37","2.279"],["2487.36","8.152"],["2487.35","2.145"],["2487.32","12.816"],["2487.31","10.023"],["2487.30","0.157"],["2487.27","0.005"],["2487.26","4.010"],["2487.25","0.008"],["2487.24","0.003"],["2487.23","0.014"],["2487.20","0.085"],["2487.17","0.011"],["2487.14","3.217"],["2487.12","3.916"],["2487.11","0.300"],["2487.10","0.088"],["2487.08","10.097"],["2487.07","1.467"],["2487.04","0.600"],["2487.01","18.363"],["2487.00","0.292"],["2486.99","0.014"],["2486.98","0.144"],["2486.97","0.443"],["2486.92","0.005"],["2486.91","0.016"],["2486.89","3.364"],["2486.88","4.166"],["2486.84","24.306"],["2486.83","0.181"],["2486.81","0.015"],["2486.80","0.082"],["2486.79","0.007"],["2486.76","0.011"],["2486.74","0.050"],["2486.73","0.782"],["2486.72","0.004"],["2486.69","0.003"],["2486.68","8.018"],["2486.66","10.004"],["2486.65","40.391"],["2486.64","3.916"],["2486.61","0.489"],["2486.60","0.196"],["2486.57","0.396"],["2486.55","4.015"],["2486.51","3.000"],["2486.50","0.003"],["2486.48","0.005"],["2486.47","0.010"],["2486.45","4.011"],["2486.44","0.602"],["2486.43","0.566"],["2486.42","3.140"],["2486.40","3.958"],["2486.39","0.003"],["2486.34","0.010"],["2486.31","6.281"],["2486.27","0.005"],["2486.26","0.004"],["2486.23","10.088"],["2486.22","0.015"],["2486.17","0.030"],["2486.16","3.916"],["2486.15","0.020"],["2486.13","13.130"],["2486.12","82.414"],["2486.11","0.244"],["2486.10","0.132"],["2486.08","0.720"],["2486.06","0.385"],["2486.01","0.004"],["2486.00","2.359"],["2485.99","154.159"],["2485.98","20.054"],["2485.96","1.000"],["2485.95","0.190"],["2485.92","4.463"],["2485.90","1.557"],["2485.87","0.402"],["2485.85","0.114"],["2485.81","0.900"],["2485.76","4.700"],["2485.75","0.300"],["2485.74","0.196"],["2485.73","4.010"],["2485.72","0.323"],["2485.70","0.263"],["2485.69","0.261"],["2485.68","3.688"],["2485.67","0.005"],["2485.64","1.216"],["2485.63","0.005"],["2485.62","0.015"],["2485.61","0.033"],["2485.60","0.004"],["2485.58","2.012"],["2485.56","0.020"],["2485.54","0.699"],["2485.52","0.003"],["2485.51","1.830"],["2485.48","5.964"],["2485.47","0.015"],["2485.44","7.251"],["2485.43","0.006"],["2485.42","0.644"],["2485.40","8.026"],["2485.38","0.489"],["2485.36","0.014"],["2485.35","0.005"],["2485.31","1.507"],["2485.30","2.107"],["2485.29","0.039"],["2485.28","0.642"],["2485.26","1.990"],["2485.25","4.996"],["2485.23","0.003"],["2485.22","0.277"],["2485.21","0.121"],["2485.20","3.952"],["2485.18","0.006"],["2485.17","0.043"],["2485.15","4.008"],["2485.14","4.434"],["2485.13","1.003"],["2485.05","0.204"],["2485.04","0.254"],["2485.02","5.000"],["2485.01","0.050"],["2485.00","80.821"],["2484.96","3.941"],["2484.95","10.023"],["2484.94","13.935"],["2484.92","0.059"],["2484.90","150.000"],["2484.89","0.004"],["2484.88","150.127"],["2484.87","0.004"],["2484.85","0.100"],["2484.83","0.006"],["2484.82","0.030"],["2484.81","1.246"],["2484.80","0.003"],["2484.79","0.045"],["2484.77","0.003"],["2484.74","0.036"],["2484.72","3.919"],["2484.70","0.134"],["2484.68","1.111"],["2484.66","76.955"],["2484.60","2.580"],["2484.59","31.432"],["2484.58","1.468"],["2484.55","1.153"],["2484.54","0.265"],["2484.53","20.024"],["2484.51","1.047"],["2484.50","0.818"],["2484.49","0.022"],["2484.48","3.887"],["2484.46","0.048"],["2484.45","0.224"],["2484.44","0.174"],["2484.43","223.079"],["2484.42","0.014"],["2484.41","1.115"],["2484.39","26.090"],["2484.38","0.066"],["2484.37","0.121"],["2484.34","0.255"],["2484.33","23.968"],["2484.29","0.085"],["2484.27","1.128"],["2484.26","1.456"],["2484.24","3.916"],["2484.23","28.126"],["2484.22","1.329"],["2484.19","2.015"],["2484.18","0.263"],["2484.15","15.489"],["2484.14","1.135"],["2484.13","0.572"],["2484.12","8.032"],["2484.11","0.021"],["2484.09","0.059"],["2484.08","0.038"],["2484.07","0.147"],["2484.05","24.156"],["2484.04","0.008"],["2484.01","1.184"],["2484.00","4.641"],["2483.99","0.006"],["2483.97","0.294"],["2483.96","0.424"],["2483.94","3.660"],["2483.93","2.067"],["2483.92","0.008"],["2483.89","0.141"],["2483.88","1.089"], # ["2483.87","110.000"],["2483.85","4.018"],["2483.81","150.077"],["2483.80","0.003"],["2483.77","0.020"] # ], # "asks":[ # ["2493.57","0.877"], # ["2493.62","0.063"], # ["2493.71","12.054"], # ] # } timestamp = self.safe_integer(response, 'T') orderbook = self.parse_order_book(response, symbol, timestamp) orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId') return orderbook def parse_ticker(self, ticker, market=None): # # { # symbol: 'ETHBTC', # priceChange: '0.00068700', # priceChangePercent: '2.075', # weightedAvgPrice: '0.03342681', # prevClosePrice: '0.03310300', # lastPrice: '0.03378900', # lastQty: '0.07700000', # bidPrice: '0.03378900', # bidQty: '7.16800000', # askPrice: '0.03379000', # askQty: '24.00000000', # openPrice: '0.03310200', # highPrice: '0.03388900', # lowPrice: '0.03306900', # volume: '205478.41000000', # quoteVolume: '6868.48826294', # openTime: 1601469986932, # closeTime: 1601556386932, # firstId: 196098772, # lastId: 196186315, # count: 87544 # } # # coinm # { # baseVolume: '214549.95171161', # closeTime: '1621965286847', # count: '1283779', # firstId: '152560106', # highPrice: '39938.3', # lastId: '153843955', # lastPrice: '37993.4', # lastQty: '1', # lowPrice: '36457.2', # openPrice: '37783.4', # openTime: '1621878840000', # pair: 'BTCUSD', # priceChange: '210.0', # priceChangePercent: '0.556', # symbol: 'BTCUSD_PERP', # volume: '81990451', # weightedAvgPrice: '38215.08713747' # } # timestamp = self.safe_integer(ticker, 'closeTime') marketId = self.safe_string(ticker, 'symbol') symbol = self.safe_symbol(marketId, market) last = self.safe_number(ticker, 'lastPrice') isCoinm = ('baseVolume' in ticker) baseVolume = None quoteVolume = None if isCoinm: baseVolume = self.safe_number(ticker, 'baseVolume') quoteVolume = self.safe_number(ticker, 'volume') else: baseVolume = self.safe_number(ticker, 'volume') quoteVolume = self.safe_number(ticker, 'quoteVolume') return self.safe_ticker({ 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'high': self.safe_number(ticker, 'highPrice'), 'low': self.safe_number(ticker, 'lowPrice'), 'bid': self.safe_number(ticker, 'bidPrice'), 'bidVolume': self.safe_number(ticker, 'bidQty'), 'ask': self.safe_number(ticker, 'askPrice'), 'askVolume': self.safe_number(ticker, 'askQty'), 'vwap': self.safe_number(ticker, 'weightedAvgPrice'), 'open': self.safe_number(ticker, 'openPrice'), 'close': last, 'last': last, 'previousClose': self.safe_number(ticker, 'prevClosePrice'), # previous day close 'change': self.safe_number(ticker, 'priceChange'), 'percentage': self.safe_number(ticker, 'priceChangePercent'), 'average': None, 'baseVolume': baseVolume, 'quoteVolume': quoteVolume, 'info': ticker, }, market) def fetch_status(self, params={}): response = self.sapiGetSystemStatus(params) status = self.safe_string(response, 'status') if status is not None: status = 'ok' if (status == '0') else 'maintenance' self.status = self.extend(self.status, { 'status': status, 'updated': self.milliseconds(), }) return self.status def fetch_ticker(self, symbol, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } method = 'publicGetTicker24hr' if market['linear']: method = 'fapiPublicGetTicker24hr' elif market['inverse']: method = 'dapiPublicGetTicker24hr' response = getattr(self, method)(self.extend(request, params)) if isinstance(response, list): firstTicker = self.safe_value(response, 0, {}) return self.parse_ticker(firstTicker, market) return self.parse_ticker(response, market) def fetch_bids_asks(self, symbols=None, params={}): self.load_markets() defaultType = self.safe_string_2(self.options, 'fetchBidsAsks', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = None if type == 'future': method = 'fapiPublicGetTickerBookTicker' elif type == 'delivery': method = 'dapiPublicGetTickerBookTicker' else: method = 'publicGetTickerBookTicker' response = getattr(self, method)(query) return self.parse_tickers(response, symbols) def fetch_tickers(self, symbols=None, params={}): self.load_markets() defaultType = self.safe_string_2(self.options, 'fetchTickers', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') defaultMethod = None if type == 'future': defaultMethod = 'fapiPublicGetTicker24hr' elif type == 'delivery': defaultMethod = 'dapiPublicGetTicker24hr' else: defaultMethod = 'publicGetTicker24hr' method = self.safe_string(self.options, 'fetchTickersMethod', defaultMethod) response = getattr(self, method)(query) return self.parse_tickers(response, symbols) def parse_ohlcv(self, ohlcv, market=None): # when api method = publicGetKlines or fapiPublicGetKlines or dapiPublicGetKlines # [ # 1591478520000, # open time # "0.02501300", # open # "0.02501800", # high # "0.02500000", # low # "0.02500000", # close # "22.19000000", # volume # 1591478579999, # close time # "0.55490906", # quote asset volume # 40, # number of trades # "10.92900000", # taker buy base asset volume # "0.27336462", # taker buy quote asset volume # "0" # ignore # ] # # when api method = fapiPublicGetMarkPriceKlines or fapiPublicGetIndexPriceKlines # [ # [ # 1591256460000, # Open time # "9653.29201333", # Open # "9654.56401333", # High # "9653.07367333", # Low # "9653.07367333", # Close(or latest price) # "0", # Ignore # 1591256519999, # Close time # "0", # Ignore # 60, # Number of bisic data # "0", # Ignore # "0", # Ignore # "0" # Ignore # ] # ] # return [ self.safe_integer(ohlcv, 0), self.safe_number(ohlcv, 1), self.safe_number(ohlcv, 2), self.safe_number(ohlcv, 3), self.safe_number(ohlcv, 4), self.safe_number(ohlcv, 5), ] def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) # binance docs say that the default limit 500, max 1500 for futures, max 1000 for spot markets # the reality is that the time range wider than 500 candles won't work right defaultLimit = 500 maxLimit = 1500 price = self.safe_string(params, 'price') params = self.omit(params, 'price') limit = defaultLimit if (limit is None) else min(limit, maxLimit) request = { 'interval': self.timeframes[timeframe], 'limit': limit, } if price == 'index': request['pair'] = market['id'] # Index price takes self argument instead of symbol else: request['symbol'] = market['id'] # duration = self.parse_timeframe(timeframe) if since is not None: request['startTime'] = since # # It didn't work before without the endTime # https://github.com/ccxt/ccxt/issues/8454 # # if since > 0: # endTime = self.sum(since, limit * duration * 1000 - 1) # now = self.milliseconds() # request['endTime'] = min(now, endTime) # } method = 'publicGetKlines' if price == 'mark': if market['inverse']: method = 'dapiPublicGetMarkPriceKlines' else: method = 'fapiPublicGetMarkPriceKlines' elif price == 'index': if market['inverse']: method = 'dapiPublicGetIndexPriceKlines' else: method = 'fapiPublicGetIndexPriceKlines' elif market['linear']: method = 'fapiPublicGetKlines' elif market['inverse']: method = 'dapiPublicGetKlines' response = getattr(self, method)(self.extend(request, params)) # # [ # [1591478520000,"0.02501300","0.02501800","0.02500000","0.02500000","22.19000000",1591478579999,"0.55490906",40,"10.92900000","0.27336462","0"], # [1591478580000,"0.02499600","0.02500900","0.02499400","0.02500300","21.34700000",1591478639999,"0.53370468",24,"7.53800000","0.18850725","0"], # [1591478640000,"0.02500800","0.02501100","0.02500300","0.02500800","154.14200000",1591478699999,"3.85405839",97,"5.32300000","0.13312641","0"], # ] # return self.parse_ohlcvs(response, market, timeframe, since, limit) def fetch_mark_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): request = { 'price': 'mark', } return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params)) def fetch_index_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): request = { 'price': 'index', } return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params)) def parse_trade(self, trade, market=None): if 'isDustTrade' in trade: return self.parse_dust_trade(trade, market) # # aggregate trades # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#compressedaggregate-trades-list # # { # "a": 26129, # Aggregate tradeId # "p": "0.01633102", # Price # "q": "4.70443515", # Quantity # "f": 27781, # First tradeId # "l": 27781, # Last tradeId # "T": 1498793709153, # Timestamp # "m": True, # Was the buyer the maker? # "M": True # Was the trade the best price match? # } # # recent public trades and old public trades # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#recent-trades-list # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#old-trade-lookup-market_data # # { # "id": 28457, # "price": "4.00000100", # "qty": "12.00000000", # "time": 1499865549590, # "isBuyerMaker": True, # "isBestMatch": True # } # # private trades # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#account-trade-list-user_data # # { # "symbol": "BNBBTC", # "id": 28457, # "orderId": 100234, # "price": "4.00000100", # "qty": "12.00000000", # "commission": "10.10000000", # "commissionAsset": "BNB", # "time": 1499865549590, # "isBuyer": True, # "isMaker": False, # "isBestMatch": True # } # # futures trades # https://binance-docs.github.io/apidocs/futures/en/#account-trade-list-user_data # # { # "accountId": 20, # "buyer": False, # "commission": "-0.07819010", # "commissionAsset": "USDT", # "counterPartyId": 653, # "id": 698759, # "maker": False, # "orderId": 25851813, # "price": "7819.01", # "qty": "0.002", # "quoteQty": "0.01563", # "realizedPnl": "-0.91539999", # "side": "SELL", # "symbol": "BTCUSDT", # "time": 1569514978020 # } # { # "symbol": "BTCUSDT", # "id": 477128891, # "orderId": 13809777875, # "side": "SELL", # "price": "38479.55", # "qty": "0.001", # "realizedPnl": "-0.00009534", # "marginAsset": "USDT", # "quoteQty": "38.47955", # "commission": "-0.00076959", # "commissionAsset": "USDT", # "time": 1612733566708, # "positionSide": "BOTH", # "maker": True, # "buyer": False # } # # {respType: FULL} # # { # "price": "4000.00000000", # "qty": "1.00000000", # "commission": "4.00000000", # "commissionAsset": "USDT", # "tradeId": "1234", # } # timestamp = self.safe_integer_2(trade, 'T', 'time') price = self.safe_string_2(trade, 'p', 'price') amount = self.safe_string_2(trade, 'q', 'qty') cost = self.safe_string_2(trade, 'quoteQty', 'baseQty') # inverse futures marketId = self.safe_string(trade, 'symbol') symbol = self.safe_symbol(marketId, market) id = self.safe_string_2(trade, 't', 'a') id = self.safe_string_2(trade, 'id', 'tradeId', id) side = None orderId = self.safe_string(trade, 'orderId') if 'm' in trade: side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally elif 'isBuyerMaker' in trade: side = 'sell' if trade['isBuyerMaker'] else 'buy' elif 'side' in trade: side = self.safe_string_lower(trade, 'side') else: if 'isBuyer' in trade: side = 'buy' if trade['isBuyer'] else 'sell' # self is a True side fee = None if 'commission' in trade: fee = { 'cost': self.safe_string(trade, 'commission'), 'currency': self.safe_currency_code(self.safe_string(trade, 'commissionAsset')), } takerOrMaker = None if 'isMaker' in trade: takerOrMaker = 'maker' if trade['isMaker'] else 'taker' if 'maker' in trade: takerOrMaker = 'maker' if trade['maker'] else 'taker' return self.safe_trade({ 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': id, 'order': orderId, 'type': None, 'side': side, 'takerOrMaker': takerOrMaker, 'price': price, 'amount': amount, 'cost': cost, 'fee': fee, }, market) def fetch_trades(self, symbol, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], # 'fromId': 123, # ID to get aggregate trades from INCLUSIVE. # 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE. # 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE. # 'limit': 500, # default = 500, maximum = 1000 } defaultType = self.safe_string_2(self.options, 'fetchTrades', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') defaultMethod = None if type == 'future': defaultMethod = 'fapiPublicGetAggTrades' elif type == 'delivery': defaultMethod = 'dapiPublicGetAggTrades' else: defaultMethod = 'publicGetAggTrades' method = self.safe_string(self.options, 'fetchTradesMethod', defaultMethod) if method == 'publicGetAggTrades': if since is not None: request['startTime'] = since # https://github.com/ccxt/ccxt/issues/6400 # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#compressedaggregate-trades-list request['endTime'] = self.sum(since, 3600000) if type == 'future': method = 'fapiPublicGetAggTrades' elif type == 'delivery': method = 'dapiPublicGetAggTrades' elif method == 'publicGetHistoricalTrades': if type == 'future': method = 'fapiPublicGetHistoricalTrades' elif type == 'delivery': method = 'dapiPublicGetHistoricalTrades' if limit is not None: request['limit'] = limit # default = 500, maximum = 1000 # # Caveats: # - default limit(500) applies only if no other parameters set, trades up # to the maximum limit may be returned to satisfy other parameters # - if both limit and time window is set and time window contains more # trades than the limit then the last trades from the window are returned # - 'tradeId' accepted and returned by self method is "aggregate" trade id # which is different from actual trade id # - setting both fromId and time window results in error response = getattr(self, method)(self.extend(request, query)) # # aggregate trades # # [ # { # "a": 26129, # Aggregate tradeId # "p": "0.01633102", # Price # "q": "4.70443515", # Quantity # "f": 27781, # First tradeId # "l": 27781, # Last tradeId # "T": 1498793709153, # Timestamp # "m": True, # Was the buyer the maker? # "M": True # Was the trade the best price match? # } # ] # # recent public trades and historical public trades # # [ # { # "id": 28457, # "price": "4.00000100", # "qty": "12.00000000", # "time": 1499865549590, # "isBuyerMaker": True, # "isBestMatch": True # } # ] # return self.parse_trades(response, market, since, limit) def parse_order_status(self, status): statuses = { 'NEW': 'open', 'PARTIALLY_FILLED': 'open', 'FILLED': 'closed', 'CANCELED': 'canceled', 'PENDING_CANCEL': 'canceling', # currently unused 'REJECTED': 'rejected', 'EXPIRED': 'expired', } return self.safe_string(statuses, status, status) def parse_order(self, order, market=None): # # spot # # { # "symbol": "LTCBTC", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "0.0", # "cummulativeQuoteQty": "0.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "icebergQty": "0.0", # "time": 1499827319559, # "updateTime": 1499827319559, # "isWorking": True # } # # futures # # { # "symbol": "BTCUSDT", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "1.0", # "cumQuote": "10.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "updateTime": 1499827319559 # } # # createOrder with {"newOrderRespType": "FULL"} # # { # "symbol": "BTCUSDT", # "orderId": 5403233939, # "orderListId": -1, # "clientOrderId": "x-R4BD3S825e669e75b6c14f69a2c43e", # "transactTime": 1617151923742, # "price": "0.00000000", # "origQty": "0.00050000", # "executedQty": "0.00050000", # "cummulativeQuoteQty": "29.47081500", # "status": "FILLED", # "timeInForce": "GTC", # "type": "MARKET", # "side": "BUY", # "fills": [ # { # "price": "58941.63000000", # "qty": "0.00050000", # "commission": "0.00007050", # "commissionAsset": "BNB", # "tradeId": 737466631 # } # ] # } # # delivery # # { # "orderId": "18742727411", # "symbol": "ETHUSD_PERP", # "pair": "ETHUSD", # "status": "FILLED", # "clientOrderId": "x-xcKtGhcu3e2d1503fdd543b3b02419", # "price": "0", # "avgPrice": "4522.14", # "origQty": "1", # "executedQty": "1", # "cumBase": "0.00221134", # "timeInForce": "GTC", # "type": "MARKET", # "reduceOnly": False, # "closePosition": False, # "side": "SELL", # "positionSide": "BOTH", # "stopPrice": "0", # "workingType": "CONTRACT_PRICE", # "priceProtect": False, # "origType": "MARKET", # "time": "1636061952660", # "updateTime": "1636061952660" # } # status = self.parse_order_status(self.safe_string(order, 'status')) marketId = self.safe_string(order, 'symbol') symbol = self.safe_symbol(marketId, market) filled = self.safe_string(order, 'executedQty', '0') timestamp = None lastTradeTimestamp = None if 'time' in order: timestamp = self.safe_integer(order, 'time') elif 'transactTime' in order: timestamp = self.safe_integer(order, 'transactTime') elif 'updateTime' in order: if status == 'open': if Precise.string_gt(filled, '0'): lastTradeTimestamp = self.safe_integer(order, 'updateTime') else: timestamp = self.safe_integer(order, 'updateTime') average = self.safe_string(order, 'avgPrice') price = self.safe_string(order, 'price') amount = self.safe_string(order, 'origQty') # - Spot/Margin market: cummulativeQuoteQty # - Futures market: cumQuote. # Note self is not the actual cost, since Binance futures uses leverage to calculate margins. cost = self.safe_string_2(order, 'cummulativeQuoteQty', 'cumQuote') cost = self.safe_string(order, 'cumBase', cost) id = self.safe_string(order, 'orderId') type = self.safe_string_lower(order, 'type') side = self.safe_string_lower(order, 'side') fills = self.safe_value(order, 'fills', []) clientOrderId = self.safe_string(order, 'clientOrderId') timeInForce = self.safe_string(order, 'timeInForce') postOnly = (type == 'limit_maker') or (timeInForce == 'GTX') if type == 'limit_maker': type = 'limit' stopPriceString = self.safe_string(order, 'stopPrice') stopPrice = self.parse_number(self.omit_zero(stopPriceString)) return self.safe_order2({ 'info': order, 'id': id, 'clientOrderId': clientOrderId, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'lastTradeTimestamp': lastTradeTimestamp, 'symbol': symbol, 'type': type, 'timeInForce': timeInForce, 'postOnly': postOnly, 'side': side, 'price': price, 'stopPrice': stopPrice, 'amount': amount, 'cost': cost, 'average': average, 'filled': filled, 'remaining': None, 'status': status, 'fee': None, 'trades': fills, }, market) def create_reduce_only_order(self, symbol, type, side, amount, price=None, params={}): request = { 'reduceOnly': True, } return self.create_order(symbol, type, side, amount, price, self.extend(request, params)) def create_order(self, symbol, type, side, amount, price=None, params={}): self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'createOrder', 'defaultType', 'spot') orderType = self.safe_string(params, 'type', defaultType) clientOrderId = self.safe_string_2(params, 'newClientOrderId', 'clientOrderId') postOnly = self.safe_value(params, 'postOnly', False) params = self.omit(params, ['type', 'newClientOrderId', 'clientOrderId', 'postOnly']) reduceOnly = self.safe_value(params, 'reduceOnly') if reduceOnly is not None: if (orderType != 'future') and (orderType != 'delivery'): raise InvalidOrder(self.id + ' createOrder() does not support reduceOnly for ' + orderType + ' orders, reduceOnly orders are supported for futures and perpetuals only') method = 'privatePostOrder' if orderType == 'future': method = 'fapiPrivatePostOrder' elif orderType == 'delivery': method = 'dapiPrivatePostOrder' elif orderType == 'margin': method = 'sapiPostMarginOrder' # the next 5 lines are added to support for testing orders if market['spot']: test = self.safe_value(params, 'test', False) if test: method += 'Test' params = self.omit(params, 'test') # only supported for spot/margin api(all margin markets are spot markets) if postOnly: type = 'LIMIT_MAKER' uppercaseType = type.upper() validOrderTypes = self.safe_value(market['info'], 'orderTypes') if not self.in_array(uppercaseType, validOrderTypes): raise InvalidOrder(self.id + ' ' + type + ' is not a valid order type in market ' + symbol) request = { 'symbol': market['id'], 'type': uppercaseType, 'side': side.upper(), } if clientOrderId is None: broker = self.safe_value(self.options, 'broker') if broker is not None: brokerId = self.safe_string(broker, orderType) if brokerId is not None: request['newClientOrderId'] = brokerId + self.uuid22() else: request['newClientOrderId'] = clientOrderId if (orderType == 'spot') or (orderType == 'margin'): request['newOrderRespType'] = self.safe_value(self.options['newOrderRespType'], type, 'RESULT') # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills else: # delivery and future request['newOrderRespType'] = 'RESULT' # "ACK", "RESULT", default "ACK" # additional required fields depending on the order type timeInForceIsRequired = False priceIsRequired = False stopPriceIsRequired = False quantityIsRequired = False # # spot/margin # # LIMIT timeInForce, quantity, price # MARKET quantity or quoteOrderQty # STOP_LOSS quantity, stopPrice # STOP_LOSS_LIMIT timeInForce, quantity, price, stopPrice # TAKE_PROFIT quantity, stopPrice # TAKE_PROFIT_LIMIT timeInForce, quantity, price, stopPrice # LIMIT_MAKER quantity, price # # futures # # LIMIT timeInForce, quantity, price # MARKET quantity # STOP/TAKE_PROFIT quantity, price, stopPrice # STOP_MARKET stopPrice # TAKE_PROFIT_MARKET stopPrice # TRAILING_STOP_MARKET callbackRate # if uppercaseType == 'MARKET': quoteOrderQty = self.safe_value(self.options, 'quoteOrderQty', False) if quoteOrderQty: quoteOrderQty = self.safe_number(params, 'quoteOrderQty') precision = market['precision']['price'] if quoteOrderQty is not None: request['quoteOrderQty'] = self.decimal_to_precision(quoteOrderQty, TRUNCATE, precision, self.precisionMode) params = self.omit(params, 'quoteOrderQty') elif price is not None: request['quoteOrderQty'] = self.decimal_to_precision(amount * price, TRUNCATE, precision, self.precisionMode) else: quantityIsRequired = True else: quantityIsRequired = True elif uppercaseType == 'LIMIT': priceIsRequired = True timeInForceIsRequired = True quantityIsRequired = True elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'): stopPriceIsRequired = True quantityIsRequired = True if market['linear'] or market['inverse']: priceIsRequired = True elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'): quantityIsRequired = True stopPriceIsRequired = True priceIsRequired = True timeInForceIsRequired = True elif uppercaseType == 'LIMIT_MAKER': priceIsRequired = True quantityIsRequired = True elif uppercaseType == 'STOP': quantityIsRequired = True stopPriceIsRequired = True priceIsRequired = True elif (uppercaseType == 'STOP_MARKET') or (uppercaseType == 'TAKE_PROFIT_MARKET'): closePosition = self.safe_value(params, 'closePosition') if closePosition is None: quantityIsRequired = True stopPriceIsRequired = True elif uppercaseType == 'TRAILING_STOP_MARKET': quantityIsRequired = True callbackRate = self.safe_number(params, 'callbackRate') if callbackRate is None: raise InvalidOrder(self.id + ' createOrder() requires a callbackRate extra param for a ' + type + ' order') if quantityIsRequired: request['quantity'] = self.amount_to_precision(symbol, amount) if priceIsRequired: if price is None: raise InvalidOrder(self.id + ' createOrder() requires a price argument for a ' + type + ' order') request['price'] = self.price_to_precision(symbol, price) if timeInForceIsRequired: request['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel if stopPriceIsRequired: stopPrice = self.safe_number(params, 'stopPrice') if stopPrice is None: raise InvalidOrder(self.id + ' createOrder() requires a stopPrice extra param for a ' + type + ' order') else: params = self.omit(params, 'stopPrice') request['stopPrice'] = self.price_to_precision(symbol, stopPrice) response = getattr(self, method)(self.extend(request, params)) return self.parse_order(response, market) def fetch_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchOrder', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) method = 'privateGetOrder' if type == 'future': method = 'fapiPrivateGetOrder' elif type == 'delivery': method = 'dapiPrivateGetOrder' elif type == 'margin': method = 'sapiGetMarginOrder' request = { 'symbol': market['id'], } clientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId') if clientOrderId is not None: request['origClientOrderId'] = clientOrderId else: request['orderId'] = id query = self.omit(params, ['type', 'clientOrderId', 'origClientOrderId']) response = getattr(self, method)(self.extend(request, query)) return self.parse_order(response, market) def fetch_orders(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) method = 'privateGetAllOrders' if type == 'future': method = 'fapiPrivateGetAllOrders' elif type == 'delivery': method = 'dapiPrivateGetAllOrders' elif type == 'margin': method = 'sapiGetMarginAllOrders' request = { 'symbol': market['id'], } if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit query = self.omit(params, 'type') response = getattr(self, method)(self.extend(request, query)) # # spot # # [ # { # "symbol": "LTCBTC", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "0.0", # "cummulativeQuoteQty": "0.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "icebergQty": "0.0", # "time": 1499827319559, # "updateTime": 1499827319559, # "isWorking": True # } # ] # # futures # # [ # { # "symbol": "BTCUSDT", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "1.0", # "cumQuote": "10.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "updateTime": 1499827319559 # } # ] # return self.parse_orders(response, market, since, limit) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None query = None type = None request = {} if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') elif self.options['warnOnFetchOpenOrdersWithoutSymbol']: symbols = self.symbols numSymbols = len(symbols) fetchOpenOrdersRateLimit = int(numSymbols / 2) raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.') else: defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = 'privateGetOpenOrders' if type == 'future': method = 'fapiPrivateGetOpenOrders' elif type == 'delivery': method = 'dapiPrivateGetOpenOrders' elif type == 'margin': method = 'sapiGetMarginOpenOrders' response = getattr(self, method)(self.extend(request, query)) return self.parse_orders(response, market, since, limit) def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}): orders = self.fetch_orders(symbol, since, limit, params) return self.filter_by(orders, 'status', 'closed') def cancel_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) # https://github.com/ccxt/ccxt/issues/6507 origClientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId') request = { 'symbol': market['id'], # 'orderId': id, # 'origClientOrderId': id, } if origClientOrderId is None: request['orderId'] = id else: request['origClientOrderId'] = origClientOrderId method = 'privateDeleteOrder' if type == 'future': method = 'fapiPrivateDeleteOrder' elif type == 'delivery': method = 'dapiPrivateDeleteOrder' elif type == 'margin': method = 'sapiDeleteMarginOrder' query = self.omit(params, ['type', 'origClientOrderId', 'clientOrderId']) response = getattr(self, method)(self.extend(request, query)) return self.parse_order(response, market) def cancel_all_orders(self, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } defaultType = self.safe_string_2(self.options, 'cancelAllOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = 'privateDeleteOpenOrders' if type == 'margin': method = 'sapiDeleteMarginOpenOrders' elif type == 'future': method = 'fapiPrivateDeleteAllOpenOrders' elif type == 'delivery': method = 'dapiPrivateDeleteAllOpenOrders' response = getattr(self, method)(self.extend(request, query)) if isinstance(response, list): return self.parse_orders(response, market) else: return response def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchMyTrades', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') method = None if type == 'spot': method = 'privateGetMyTrades' elif type == 'margin': method = 'sapiGetMarginMyTrades' elif type == 'future': method = 'fapiPrivateGetUserTrades' elif type == 'delivery': method = 'dapiPrivateGetUserTrades' request = { 'symbol': market['id'], } if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit response = getattr(self, method)(self.extend(request, params)) # # spot trade # # [ # { # "symbol": "BNBBTC", # "id": 28457, # "orderId": 100234, # "price": "4.00000100", # "qty": "12.00000000", # "commission": "10.10000000", # "commissionAsset": "BNB", # "time": 1499865549590, # "isBuyer": True, # "isMaker": False, # "isBestMatch": True, # } # ] # # futures trade # # [ # { # "accountId": 20, # "buyer": False, # "commission": "-0.07819010", # "commissionAsset": "USDT", # "counterPartyId": 653, # "id": 698759, # "maker": False, # "orderId": 25851813, # "price": "7819.01", # "qty": "0.002", # "quoteQty": "0.01563", # "realizedPnl": "-0.91539999", # "side": "SELL", # "symbol": "BTCUSDT", # "time": 1569514978020 # } # ] # return self.parse_trades(response, market, since, limit) def fetch_my_dust_trades(self, symbol=None, since=None, limit=None, params={}): # # Binance provides an opportunity to trade insignificant(i.e. non-tradable and non-withdrawable) # token leftovers(of any asset) into `BNB` coin which in turn can be used to pay trading fees with it. # The corresponding trades history is called the `Dust Log` and can be requested via the following end-point: # https://github.com/binance-exchange/binance-official-api-docs/blob/master/wapi-api.md#dustlog-user_data # self.load_markets() request = {} if since is not None: request['startTime'] = since request['endTime'] = self.sum(since, 7776000000) response = self.sapiGetAssetDribblet(self.extend(request, params)) # { # "total": "4", # "userAssetDribblets": [ # { # "operateTime": "1627575731000", # "totalServiceChargeAmount": "0.00001453", # "totalTransferedAmount": "0.00072693", # "transId": "70899815863", # "userAssetDribbletDetails": [ # { # "fromAsset": "LTC", # "amount": "0.000006", # "transferedAmount": "0.00000267", # "serviceChargeAmount": "0.00000005", # "operateTime": "1627575731000", # "transId": "70899815863" # }, # { # "fromAsset": "GBP", # "amount": "0.15949157", # "transferedAmount": "0.00072426", # "serviceChargeAmount": "0.00001448", # "operateTime": "1627575731000", # "transId": "70899815863" # } # ] # }, # ] # } results = self.safe_value(response, 'userAssetDribblets', []) rows = self.safe_integer(response, 'total', 0) data = [] for i in range(0, rows): logs = self.safe_value(results[i], 'userAssetDribbletDetails', []) for j in range(0, len(logs)): logs[j]['isDustTrade'] = True data.append(logs[j]) trades = self.parse_trades(data, None, since, limit) return self.filter_by_since_limit(trades, since, limit) def parse_dust_trade(self, trade, market=None): # # { # "fromAsset": "USDT", # "amount": "0.009669", # "transferedAmount": "0.00002992", # "serviceChargeAmount": "0.00000059", # "operateTime": "1628076010000", # "transId": "71416578712", # "isDustTrade": True # } # orderId = self.safe_string(trade, 'transId') timestamp = self.safe_integer(trade, 'operateTime') currencyId = self.safe_string(trade, 'fromAsset') tradedCurrency = self.safe_currency_code(currencyId) bnb = self.currency('BNB') earnedCurrency = bnb['code'] applicantSymbol = earnedCurrency + '/' + tradedCurrency tradedCurrencyIsQuote = False if applicantSymbol in self.markets: tradedCurrencyIsQuote = True feeCostString = self.safe_string(trade, 'serviceChargeAmount') fee = { 'currency': earnedCurrency, 'cost': self.parse_number(feeCostString), } symbol = None amountString = None costString = None side = None if tradedCurrencyIsQuote: symbol = applicantSymbol amountString = self.safe_string(trade, 'transferedAmount') costString = self.safe_string(trade, 'amount') side = 'buy' else: symbol = tradedCurrency + '/' + earnedCurrency amountString = self.safe_string(trade, 'amount') costString = self.safe_string(trade, 'transferedAmount') side = 'sell' priceString = None if costString is not None: if amountString: priceString = Precise.string_div(costString, amountString) id = None amount = self.parse_number(amountString) price = self.parse_number(priceString) cost = self.parse_number(costString) type = None takerOrMaker = None return { 'id': id, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'order': orderId, 'type': type, 'takerOrMaker': takerOrMaker, 'side': side, 'amount': amount, 'price': price, 'cost': cost, 'fee': fee, 'info': trade, } def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None response = None request = {} legalMoney = self.safe_value(self.options, 'legalMoney', {}) if code in legalMoney: if code is not None: currency = self.currency(code) request['transactionType'] = 0 if since is not None: request['beginTime'] = since raw = self.sapiGetFiatOrders(self.extend(request, params)) response = self.safe_value(raw, 'data') # { # "code": "000000", # "message": "success", # "data": [ # { # "orderNo": "25ced37075c1470ba8939d0df2316e23", # "fiatCurrency": "EUR", # "indicatedAmount": "15.00", # "amount": "15.00", # "totalFee": "0.00", # "method": "card", # "status": "Failed", # "createTime": 1627501026000, # "updateTime": 1627501027000 # } # ], # "total": 1, # "success": True # } else: if code is not None: currency = self.currency(code) request['coin'] = currency['id'] if since is not None: request['startTime'] = since # max 3 months range https://github.com/ccxt/ccxt/issues/6495 request['endTime'] = self.sum(since, 7776000000) if limit is not None: request['limit'] = limit response = self.sapiGetCapitalDepositHisrec(self.extend(request, params)) # [ # { # "amount": "0.01844487", # "coin": "BCH", # "network": "BCH", # "status": 1, # "address": "1NYxAJhW2281HK1KtJeaENBqHeygA88FzR", # "addressTag": "", # "txId": "bafc5902504d6504a00b7d0306a41154cbf1d1b767ab70f3bc226327362588af", # "insertTime": 1610784980000, # "transferType": 0, # "confirmTimes": "2/2" # }, # { # "amount": "4500", # "coin": "USDT", # "network": "BSC", # "status": 1, # "address": "0xc9c923c87347ca0f3451d6d308ce84f691b9f501", # "addressTag": "", # "txId": "Internal transfer 51376627901", # "insertTime": 1618394381000, # "transferType": 1, # "confirmTimes": "1/15" # } # ] return self.parse_transactions(response, currency, since, limit) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): self.load_markets() legalMoney = self.safe_value(self.options, 'legalMoney', {}) request = {} response = None currency = None if code in legalMoney: if code is not None: currency = self.currency(code) request['transactionType'] = 1 if since is not None: request['beginTime'] = since raw = self.sapiGetFiatOrders(self.extend(request, params)) response = self.safe_value(raw, 'data') # { # "code": "000000", # "message": "success", # "data": [ # { # "orderNo": "CJW706452266115170304", # "fiatCurrency": "GBP", # "indicatedAmount": "10001.50", # "amount": "100.00", # "totalFee": "1.50", # "method": "bank transfer", # "status": "Successful", # "createTime": 1620037745000, # "updateTime": 1620038480000 # }, # { # "orderNo": "CJW706287492781891584", # "fiatCurrency": "GBP", # "indicatedAmount": "10001.50", # "amount": "100.00", # "totalFee": "1.50", # "method": "bank transfer", # "status": "Successful", # "createTime": 1619998460000, # "updateTime": 1619998823000 # } # ], # "total": 39, # "success": True # } else: if code is not None: currency = self.currency(code) request['coin'] = currency['id'] if since is not None: request['startTime'] = since # max 3 months range https://github.com/ccxt/ccxt/issues/6495 request['endTime'] = self.sum(since, 7776000000) if limit is not None: request['limit'] = limit response = self.sapiGetCapitalWithdrawHistory(self.extend(request, params)) # [ # { # "id": "69e53ad305124b96b43668ceab158a18", # "amount": "28.75", # "transactionFee": "0.25", # "coin": "XRP", # "status": 6, # "address": "r3T75fuLjX51mmfb5Sk1kMNuhBgBPJsjza", # "addressTag": "101286922", # "txId": "19A5B24ED0B697E4F0E9CD09FCB007170A605BC93C9280B9E6379C5E6EF0F65A", # "applyTime": "2021-04-15 12:09:16", # "network": "XRP", # "transferType": 0 # }, # { # "id": "9a67628b16ba4988ae20d329333f16bc", # "amount": "20", # "transactionFee": "20", # "coin": "USDT", # "status": 6, # "address": "0x0AB991497116f7F5532a4c2f4f7B1784488628e1", # "txId": "0x77fbf2cf2c85b552f0fd31fd2e56dc95c08adae031d96f3717d8b17e1aea3e46", # "applyTime": "2021-04-15 12:06:53", # "network": "ETH", # "transferType": 0 # }, # { # "id": "a7cdc0afbfa44a48bd225c9ece958fe2", # "amount": "51", # "transactionFee": "1", # "coin": "USDT", # "status": 6, # "address": "TYDmtuWL8bsyjvcauUTerpfYyVhFtBjqyo", # "txId": "168a75112bce6ceb4823c66726ad47620ad332e69fe92d9cb8ceb76023f9a028", # "applyTime": "2021-04-13 12:46:59", # "network": "TRX", # "transferType": 0 # } # ] return self.parse_transactions(response, currency, since, limit) def parse_transaction_status_by_type(self, status, type=None): statusesByType = { 'deposit': { '0': 'pending', '1': 'ok', # Fiat # Processing, Failed, Successful, Finished, Refunding, Refunded, Refund Failed, Order Partial credit Stopped 'Processing': 'pending', 'Failed': 'failed', 'Successful': 'ok', 'Refunding': 'canceled', 'Refunded': 'canceled', 'Refund Failed': 'failed', }, 'withdrawal': { '0': 'pending', # Email Sent '1': 'canceled', # Cancelled(different from 1 = ok in deposits) '2': 'pending', # Awaiting Approval '3': 'failed', # Rejected '4': 'pending', # Processing '5': 'failed', # Failure '6': 'ok', # Completed # Fiat # Processing, Failed, Successful, Finished, Refunding, Refunded, Refund Failed, Order Partial credit Stopped 'Processing': 'pending', 'Failed': 'failed', 'Successful': 'ok', 'Refunding': 'canceled', 'Refunded': 'canceled', 'Refund Failed': 'failed', }, } statuses = self.safe_value(statusesByType, type, {}) return self.safe_string(statuses, status, status) def parse_transaction(self, transaction, currency=None): # # fetchDeposits # # { # "amount": "4500", # "coin": "USDT", # "network": "BSC", # "status": 1, # "address": "0xc9c923c87347ca0f3451d6d308ce84f691b9f501", # "addressTag": "", # "txId": "Internal transfer 51376627901", # "insertTime": 1618394381000, # "transferType": 1, # "confirmTimes": "1/15" # } # # fetchWithdrawals # # { # "id": "69e53ad305124b96b43668ceab158a18", # "amount": "28.75", # "transactionFee": "0.25", # "coin": "XRP", # "status": 6, # "address": "r3T75fuLjX51mmfb5Sk1kMNuhBgBPJsjza", # "addressTag": "101286922", # "txId": "19A5B24ED0B697E4F0E9CD09FCB007170A605BC93C9280B9E6379C5E6EF0F65A", # "applyTime": "2021-04-15 12:09:16", # "network": "XRP", # "transferType": 0 # } # # fiat transaction # withdraw # { # "orderNo": "CJW684897551397171200", # "fiatCurrency": "GBP", # "indicatedAmount": "29.99", # "amount": "28.49", # "totalFee": "1.50", # "method": "bank transfer", # "status": "Successful", # "createTime": 1614898701000, # "updateTime": 1614898820000 # } # # deposit # { # "orderNo": "25ced37075c1470ba8939d0df2316e23", # "fiatCurrency": "EUR", # "indicatedAmount": "15.00", # "amount": "15.00", # "totalFee": "0.00", # "method": "card", # "status": "Failed", # "createTime": "1627501026000", # "updateTime": "1627501027000" # } # id = self.safe_string_2(transaction, 'id', 'orderNo') address = self.safe_string(transaction, 'address') tag = self.safe_string(transaction, 'addressTag') # set but unused if tag is not None: if len(tag) < 1: tag = None txid = self.safe_string(transaction, 'txId') if (txid is not None) and (txid.find('Internal transfer ') >= 0): txid = txid[18:] currencyId = self.safe_string_2(transaction, 'coin', 'fiatCurrency') code = self.safe_currency_code(currencyId, currency) timestamp = None insertTime = self.safe_integer_2(transaction, 'insertTime', 'createTime') applyTime = self.parse8601(self.safe_string(transaction, 'applyTime')) type = self.safe_string(transaction, 'type') if type is None: if (insertTime is not None) and (applyTime is None): type = 'deposit' timestamp = insertTime elif (insertTime is None) and (applyTime is not None): type = 'withdrawal' timestamp = applyTime status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type) amount = self.safe_number(transaction, 'amount') feeCost = self.safe_number_2(transaction, 'transactionFee', 'totalFee') fee = None if feeCost is not None: fee = {'currency': code, 'cost': feeCost} updated = self.safe_integer_2(transaction, 'successTime', 'updateTime') internal = self.safe_integer(transaction, 'transferType', False) internal = True if internal else False return { 'info': transaction, 'id': id, 'txid': txid, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'address': address, 'addressTo': address, 'addressFrom': None, 'tag': tag, 'tagTo': tag, 'tagFrom': None, 'type': type, 'amount': amount, 'currency': code, 'status': status, 'updated': updated, 'internal': internal, 'fee': fee, } def parse_transfer_status(self, status): statuses = { 'CONFIRMED': 'ok', } return self.safe_string(statuses, status, status) def parse_transfer(self, transfer, currency=None): # # transfer # # { # "tranId":13526853623 # } # # fetchTransfers # # { # timestamp: 1614640878000, # asset: 'USDT', # amount: '25', # type: 'MAIN_UMFUTURE', # status: 'CONFIRMED', # tranId: 43000126248 # } # id = self.safe_string(transfer, 'tranId') currencyId = self.safe_string(transfer, 'asset') code = self.safe_currency_code(currencyId, currency) amount = self.safe_number(transfer, 'amount') type = self.safe_string(transfer, 'type') fromAccount = None toAccount = None typesByAccount = self.safe_value(self.options, 'typesByAccount', {}) if type is not None: parts = type.split('_') fromAccount = self.safe_value(parts, 0) toAccount = self.safe_value(parts, 1) fromAccount = self.safe_string(typesByAccount, fromAccount, fromAccount) toAccount = self.safe_string(typesByAccount, toAccount, toAccount) timestamp = self.safe_integer(transfer, 'timestamp') status = self.parse_transfer_status(self.safe_string(transfer, 'status')) return { 'info': transfer, 'id': id, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'currency': code, 'amount': amount, 'fromAccount': fromAccount, 'toAccount': toAccount, 'status': status, } def parse_income(self, income, market=None): # # { # "symbol": "ETHUSDT", # "incomeType": "FUNDING_FEE", # "income": "0.00134317", # "asset": "USDT", # "time": "1621584000000", # "info": "FUNDING_FEE", # "tranId": "4480321991774044580", # "tradeId": "" # } # marketId = self.safe_string(income, 'symbol') symbol = self.safe_symbol(marketId, market) amount = self.safe_number(income, 'income') currencyId = self.safe_string(income, 'asset') code = self.safe_currency_code(currencyId) id = self.safe_string(income, 'tranId') timestamp = self.safe_integer(income, 'time') return { 'info': income, 'symbol': symbol, 'code': code, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'id': id, 'amount': amount, } def parse_incomes(self, incomes, market=None, since=None, limit=None): result = [] for i in range(0, len(incomes)): entry = incomes[i] parsed = self.parse_income(entry, market) result.append(parsed) sorted = self.sort_by(result, 'timestamp') return self.filter_by_since_limit(sorted, since, limit) def transfer(self, code, amount, fromAccount, toAccount, params={}): self.load_markets() currency = self.currency(code) type = self.safe_string(params, 'type') if type is None: accountsByType = self.safe_value(self.options, 'accountsByType', {}) fromAccount = fromAccount.lower() toAccount = toAccount.lower() fromId = self.safe_string(accountsByType, fromAccount) toId = self.safe_string(accountsByType, toAccount) if fromId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' fromAccount must be one of ' + ', '.join(keys)) if toId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' toAccount must be one of ' + ', '.join(keys)) type = fromId + '_' + toId request = { 'asset': currency['id'], 'amount': self.currency_to_precision(code, amount), 'type': type, } response = self.sapiPostAssetTransfer(self.extend(request, params)) # # { # "tranId":13526853623 # } # transfer = self.parse_transfer(response, currency) return self.extend(transfer, { 'amount': amount, 'currency': code, 'fromAccount': fromAccount, 'toAccount': toAccount, }) def fetch_transfers(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None if code is not None: currency = self.currency(code) defaultType = self.safe_string_2(self.options, 'fetchTransfers', 'defaultType', 'spot') fromAccount = self.safe_string(params, 'fromAccount', defaultType) defaultTo = 'spot' if (fromAccount == 'future') else 'future' toAccount = self.safe_string(params, 'toAccount', defaultTo) type = self.safe_string(params, 'type') accountsByType = self.safe_value(self.options, 'accountsByType', {}) fromId = self.safe_string(accountsByType, fromAccount) toId = self.safe_string(accountsByType, toAccount) if type is None: if fromId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' fromAccount parameter must be one of ' + ', '.join(keys)) if toId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' toAccount parameter must be one of ' + ', '.join(keys)) type = fromId + '_' + toId request = { 'type': type, } if since is not None: request['startTime'] = since if limit is not None: request['size'] = limit response = self.sapiGetAssetTransfer(self.extend(request, params)) # # { # total: 3, # rows: [ # { # timestamp: 1614640878000, # asset: 'USDT', # amount: '25', # type: 'MAIN_UMFUTURE', # status: 'CONFIRMED', # tranId: 43000126248 # }, # ] # } # rows = self.safe_value(response, 'rows', []) return self.parse_transfers(rows, currency, since, limit) def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'coin': currency['id'], # 'network': 'ETH', # 'BSC', 'XMR', you can get network and isDefault in networkList in the response of sapiGetCapitalConfigDetail } networks = self.safe_value(self.options, 'networks', {}) network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH network = self.safe_string(networks, network, network) # handle ERC20>ETH alias if network is not None: request['network'] = network params = self.omit(params, 'network') # has support for the 'network' parameter # https://binance-docs.github.io/apidocs/spot/en/#deposit-address-supporting-network-user_data response = self.sapiGetCapitalDepositAddress(self.extend(request, params)) # # { # currency: 'XRP', # address: 'rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh', # tag: '108618262', # info: { # coin: 'XRP', # address: 'rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh', # tag: '108618262', # url: 'https://bithomp.com/explorer/rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh' # } # } # address = self.safe_string(response, 'address') url = self.safe_string(response, 'url') impliedNetwork = None if url is not None: reverseNetworks = self.safe_value(self.options, 'reverseNetworks', {}) parts = url.split('/') topLevel = self.safe_string(parts, 2) if (topLevel == 'blockchair.com') or (topLevel == 'viewblock.io'): subLevel = self.safe_string(parts, 3) if subLevel is not None: topLevel = topLevel + '/' + subLevel impliedNetwork = self.safe_string(reverseNetworks, topLevel) impliedNetworks = self.safe_value(self.options, 'impliedNetworks', { 'ETH': {'ERC20': 'ETH'}, 'TRX': {'TRC20': 'TRX'}, }) if code in impliedNetworks: conversion = self.safe_value(impliedNetworks, code, {}) impliedNetwork = self.safe_string(conversion, impliedNetwork, impliedNetwork) tag = self.safe_string(response, 'tag', '') if len(tag) == 0: tag = None self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'network': impliedNetwork, 'info': response, } def fetch_funding_fees(self, codes=None, params={}): self.load_markets() response = self.sapiGetCapitalConfigGetall(params) # # [ # { # coin: 'BAT', # depositAllEnable: True, # withdrawAllEnable: True, # name: 'Basic Attention Token', # free: '0', # locked: '0', # freeze: '0', # withdrawing: '0', # ipoing: '0', # ipoable: '0', # storage: '0', # isLegalMoney: False, # trading: True, # networkList: [ # { # network: 'BNB', # coin: 'BAT', # withdrawIntegerMultiple: '0.00000001', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'The name of self asset is Basic Attention Token(BAT). Both a MEMO and an Address are required to successfully deposit your BEP2 tokens to Binance.', # name: 'BEP2', # resetAddressStatus: False, # addressRegex: '^(bnb1)[0-9a-z]{38}$', # memoRegex: '^[0-9A-Za-z\\-_]{1,120}$', # withdrawFee: '0.27', # withdrawMin: '0.54', # withdrawMax: '10000000000', # minConfirm: '1', # unLockConfirm: '0' # }, # { # network: 'BSC', # coin: 'BAT', # withdrawIntegerMultiple: '0.00000001', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'The name of self asset is Basic Attention Token. Please ensure you are depositing Basic Attention Token(BAT) tokens under the contract address ending in 9766e.', # name: 'BEP20(BSC)', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '0.27', # withdrawMin: '0.54', # withdrawMax: '10000000000', # minConfirm: '15', # unLockConfirm: '0' # }, # { # network: 'ETH', # coin: 'BAT', # withdrawIntegerMultiple: '0.00000001', # isDefault: True, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'The name of self asset is Basic Attention Token. Please ensure you are depositing Basic Attention Token(BAT) tokens under the contract address ending in 887ef.', # name: 'ERC20', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '27', # withdrawMin: '54', # withdrawMax: '10000000000', # minConfirm: '12', # unLockConfirm: '0' # } # ] # } # ] # withdrawFees = {} for i in range(0, len(response)): entry = response[i] currencyId = self.safe_string(entry, 'coin') code = self.safe_currency_code(currencyId) networkList = self.safe_value(entry, 'networkList') withdrawFees[code] = {} for j in range(0, len(networkList)): networkEntry = networkList[j] networkId = self.safe_string(networkEntry, 'network') networkCode = self.safe_currency_code(networkId) fee = self.safe_number(networkEntry, 'withdrawFee') withdrawFees[code][networkCode] = fee return { 'withdraw': withdrawFees, 'deposit': {}, 'info': response, } def withdraw(self, code, amount, address, tag=None, params={}): tag, params = self.handle_withdraw_tag_and_params(tag, params) self.check_address(address) self.load_markets() currency = self.currency(code) request = { 'coin': currency['id'], 'address': address, 'amount': amount, # https://binance-docs.github.io/apidocs/spot/en/#withdraw-sapi # issue sapiGetCapitalConfigGetall() to get networks for withdrawing USDT ERC20 vs USDT Omni # 'network': 'ETH', # 'BTC', 'TRX', etc, optional } if tag is not None: request['addressTag'] = tag networks = self.safe_value(self.options, 'networks', {}) network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH network = self.safe_string(networks, network, network) # handle ERC20>ETH alias if network is not None: request['network'] = network params = self.omit(params, 'network') response = self.sapiPostCapitalWithdrawApply(self.extend(request, params)) # {id: '9a67628b16ba4988ae20d329333f16bc'} return { 'info': response, 'id': self.safe_string(response, 'id'), } def parse_trading_fee(self, fee, market=None): # # { # "symbol": "ADABNB", # "makerCommission": 0.001, # "takerCommission": 0.001 # } # marketId = self.safe_string(fee, 'symbol') symbol = self.safe_symbol(marketId) return { 'info': fee, 'symbol': symbol, 'maker': self.safe_number(fee, 'makerCommission'), 'taker': self.safe_number(fee, 'takerCommission'), } def fetch_trading_fee(self, symbol, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } response = self.sapiGetAssetTradeFee(self.extend(request, params)) # # [ # { # "symbol": "BTCUSDT", # "makerCommission": "0.001", # "takerCommission": "0.001" # } # ] # first = self.safe_value(response, 0, {}) return self.parse_trading_fee(first) def fetch_trading_fees(self, params={}): self.load_markets() method = None defaultType = self.safe_string_2(self.options, 'fetchFundingRates', 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if (type == 'spot') or (type == 'margin'): method = 'sapiGetAssetTradeFee' elif type == 'future': method = 'fapiPrivateGetAccount' elif type == 'delivery': method = 'dapiPrivateGetAccount' response = getattr(self, method)(query) # # sapi / spot # # [ # { # "symbol": "ZRXBNB", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # { # "symbol": "ZRXBTC", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # ] # # fapi / future / linear # # { # "feeTier": 0, # account commisssion tier # "canTrade": True, # if can trade # "canDeposit": True, # if can transfer in asset # "canWithdraw": True, # if can transfer out asset # "updateTime": 0, # "totalInitialMargin": "0.00000000", # total initial margin required with current mark price(useless with isolated positions), only for USDT asset # "totalMaintMargin": "0.00000000", # total maintenance margin required, only for USDT asset # "totalWalletBalance": "23.72469206", # total wallet balance, only for USDT asset # "totalUnrealizedProfit": "0.00000000", # total unrealized profit, only for USDT asset # "totalMarginBalance": "23.72469206", # total margin balance, only for USDT asset # "totalPositionInitialMargin": "0.00000000", # initial margin required for positions with current mark price, only for USDT asset # "totalOpenOrderInitialMargin": "0.00000000", # initial margin required for open orders with current mark price, only for USDT asset # "totalCrossWalletBalance": "23.72469206", # crossed wallet balance, only for USDT asset # "totalCrossUnPnl": "0.00000000", # unrealized profit of crossed positions, only for USDT asset # "availableBalance": "23.72469206", # available balance, only for USDT asset # "maxWithdrawAmount": "23.72469206" # maximum amount for transfer out, only for USDT asset # ... # } # # dapi / delivery / inverse # # { # "canDeposit": True, # "canTrade": True, # "canWithdraw": True, # "feeTier": 2, # "updateTime": 0 # } # if (type == 'spot') or (type == 'margin'): # # [ # { # "symbol": "ZRXBNB", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # { # "symbol": "ZRXBTC", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # ] # result = {} for i in range(0, len(response)): fee = self.parse_trading_fee(response[i]) symbol = fee['symbol'] result[symbol] = fee return result elif type == 'future': # # { # "feeTier": 0, # account commisssion tier # "canTrade": True, # if can trade # "canDeposit": True, # if can transfer in asset # "canWithdraw": True, # if can transfer out asset # "updateTime": 0, # "totalInitialMargin": "0.00000000", # total initial margin required with current mark price(useless with isolated positions), only for USDT asset # "totalMaintMargin": "0.00000000", # total maintenance margin required, only for USDT asset # "totalWalletBalance": "23.72469206", # total wallet balance, only for USDT asset # "totalUnrealizedProfit": "0.00000000", # total unrealized profit, only for USDT asset # "totalMarginBalance": "23.72469206", # total margin balance, only for USDT asset # "totalPositionInitialMargin": "0.00000000", # initial margin required for positions with current mark price, only for USDT asset # "totalOpenOrderInitialMargin": "0.00000000", # initial margin required for open orders with current mark price, only for USDT asset # "totalCrossWalletBalance": "23.72469206", # crossed wallet balance, only for USDT asset # "totalCrossUnPnl": "0.00000000", # unrealized profit of crossed positions, only for USDT asset # "availableBalance": "23.72469206", # available balance, only for USDT asset # "maxWithdrawAmount": "23.72469206" # maximum amount for transfer out, only for USDT asset # ... # } # symbols = list(self.markets.keys()) result = {} feeTier = self.safe_integer(response, 'feeTier') feeTiers = self.fees[type]['trading']['tiers'] maker = feeTiers['maker'][feeTier][1] taker = feeTiers['taker'][feeTier][1] for i in range(0, len(symbols)): symbol = symbols[i] result[symbol] = { 'info': { 'feeTier': feeTier, }, 'symbol': symbol, 'maker': maker, 'taker': taker, } return result elif type == 'delivery': # # { # "canDeposit": True, # "canTrade": True, # "canWithdraw": True, # "feeTier": 2, # "updateTime": 0 # } # symbols = list(self.markets.keys()) result = {} feeTier = self.safe_integer(response, 'feeTier') feeTiers = self.fees[type]['trading']['tiers'] maker = feeTiers['maker'][feeTier][1] taker = feeTiers['taker'][feeTier][1] for i in range(0, len(symbols)): symbol = symbols[i] result[symbol] = { 'info': { 'feeTier': feeTier, }, 'symbol': symbol, 'maker': maker, 'taker': taker, } return result def futures_transfer(self, code, amount, type, params={}): if (type < 1) or (type > 4): raise ArgumentsRequired(self.id + ' type must be between 1 and 4') self.load_markets() currency = self.currency(code) request = { 'asset': currency['id'], 'amount': amount, 'type': type, } response = self.sapiPostFuturesTransfer(self.extend(request, params)) # # { # "tranId": 100000001 # } # return self.parse_transfer(response, currency) def fetch_funding_rate(self, symbol, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } method = None if market['linear']: method = 'fapiPublicGetPremiumIndex' elif market['inverse']: method = 'dapiPublicGetPremiumIndex' else: raise NotSupported(self.id + ' fetchFundingRate() supports linear and inverse contracts only') response = getattr(self, method)(self.extend(request, params)) if market['inverse']: response = response[0] # # { # "symbol": "BTCUSDT", # "markPrice": "45802.81129892", # "indexPrice": "45745.47701915", # "estimatedSettlePrice": "45133.91753671", # "lastFundingRate": "0.00063521", # "interestRate": "0.00010000", # "nextFundingTime": "1621267200000", # "time": "1621252344001" # } # return self.parse_funding_rate(response, market) def fetch_funding_rate_history(self, symbol=None, since=None, limit=None, params={}): # # Gets a history of funding rates with their timestamps # (param) symbol: Future currency pair(e.g. "BTC/USDT") # (param) limit: maximum number of data points returned # (param) since: Unix timestamp in miliseconds for the time of the earliest requested funding rate # (param) params: Object containing more params for the request # - until: Unix timestamp in miliseconds for the time of the earliest requested funding rate # return: [{symbol, fundingRate, timestamp}] # self.load_markets() request = {} method = None defaultType = self.safe_string_2(self.options, 'fetchFundingRateHistory', 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') if type == 'future': method = 'fapiPublicGetFundingRate' elif type == 'delivery': method = 'dapiPublicGetFundingRate' if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] if market['linear']: method = 'fapiPublicGetFundingRate' elif market['inverse']: method = 'dapiPublicGetFundingRate' if method is None: raise NotSupported(self.id + ' fetchFundingRateHistory() not supported for ' + type + ' markets') if since is not None: request['startTime'] = since till = self.safe_integer(params, 'till') # unified in milliseconds endTime = self.safe_string(params, 'endTime', till) # exchange-specific in milliseconds params = self.omit(params, ['endTime', 'till']) if endTime is not None: request['endTime'] = endTime if limit is not None: request['limit'] = limit response = getattr(self, method)(self.extend(request, params)) # # { # "symbol": "BTCUSDT", # "fundingRate": "0.00063521", # "fundingTime": "1621267200000", # } # rates = [] for i in range(0, len(response)): entry = response[i] timestamp = self.safe_integer(entry, 'fundingTime') rates.append({ 'info': entry, 'symbol': self.safe_symbol(self.safe_string(entry, 'symbol')), 'fundingRate': self.safe_number(entry, 'fundingRate'), 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), }) sorted = self.sort_by(rates, 'timestamp') return self.filter_by_symbol_since_limit(sorted, symbol, since, limit) def fetch_funding_rates(self, symbols=None, params={}): self.load_markets() method = None defaultType = self.safe_string_2(self.options, 'fetchFundingRates', 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if type == 'future': method = 'fapiPublicGetPremiumIndex' elif type == 'delivery': method = 'dapiPublicGetPremiumIndex' else: raise NotSupported(self.id + ' fetchFundingRates() supports linear and inverse contracts only') response = getattr(self, method)(query) result = [] for i in range(0, len(response)): entry = response[i] parsed = self.parse_funding_rate(entry) result.append(parsed) return self.filter_by_array(result, 'symbol', symbols) def parse_funding_rate(self, premiumIndex, market=None): # ensure it matches with https://www.binance.com/en/futures/funding-history/0 # # { # "symbol": "BTCUSDT", # "markPrice": "45802.81129892", # "indexPrice": "45745.47701915", # "estimatedSettlePrice": "45133.91753671", # "lastFundingRate": "0.00063521", # "interestRate": "0.00010000", # "nextFundingTime": "1621267200000", # "time": "1621252344001" # } # timestamp = self.safe_integer(premiumIndex, 'time') marketId = self.safe_string(premiumIndex, 'symbol') symbol = self.safe_symbol(marketId, market) markPrice = self.safe_number(premiumIndex, 'markPrice') indexPrice = self.safe_number(premiumIndex, 'indexPrice') interestRate = self.safe_number(premiumIndex, 'interestRate') estimatedSettlePrice = self.safe_number(premiumIndex, 'estimatedSettlePrice') nextFundingRate = self.safe_number(premiumIndex, 'lastFundingRate') nextFundingTime = self.safe_integer(premiumIndex, 'nextFundingTime') previousFundingTime = nextFundingTime - (8 * 3600000) return { 'info': premiumIndex, 'symbol': symbol, 'markPrice': markPrice, 'indexPrice': indexPrice, 'interestRate': interestRate, 'estimatedSettlePrice': estimatedSettlePrice, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'previousFundingRate': None, 'nextFundingRate': nextFundingRate, 'previousFundingTimestamp': previousFundingTime, # subtract 8 hours 'nextFundingTimestamp': nextFundingTime, 'previousFundingDatetime': self.iso8601(previousFundingTime), 'nextFundingDatetime': self.iso8601(nextFundingTime), } def parse_account_positions(self, account): positions = self.safe_value(account, 'positions') assets = self.safe_value(account, 'assets') balances = {} for i in range(0, len(assets)): entry = assets[i] currencyId = self.safe_string(entry, 'asset') code = self.safe_currency_code(currencyId) crossWalletBalance = self.safe_string(entry, 'crossWalletBalance') crossUnPnl = self.safe_string(entry, 'crossUnPnl') balances[code] = { 'crossMargin': Precise.string_add(crossWalletBalance, crossUnPnl), 'crossWalletBalance': crossWalletBalance, } result = [] for i in range(0, len(positions)): position = positions[i] marketId = self.safe_string(position, 'symbol') market = self.safe_market(marketId) code = market['quote'] if (self.options['defaultType'] == 'future') else market['base'] # sometimes not all the codes are correctly returned... if code in balances: parsed = self.parse_account_position(self.extend(position, { 'crossMargin': balances[code]['crossMargin'], 'crossWalletBalance': balances[code]['crossWalletBalance'], }), market) result.append(parsed) return result def parse_account_position(self, position, market=None): # # usdm # { # "symbol": "BTCBUSD", # "initialMargin": "0", # "maintMargin": "0", # "unrealizedProfit": "0.00000000", # "positionInitialMargin": "0", # "openOrderInitialMargin": "0", # "leverage": "20", # "isolated": False, # "entryPrice": "0.0000", # "maxNotional": "100000", # "positionSide": "BOTH", # "positionAmt": "0.000", # "notional": "0", # "isolatedWallet": "0", # "updateTime": "0", # "crossMargin": "100.93634809", # } # # coinm # { # "symbol": "BTCUSD_210625", # "initialMargin": "0.00024393", # "maintMargin": "0.00002439", # "unrealizedProfit": "-0.00000163", # "positionInitialMargin": "0.00024393", # "openOrderInitialMargin": "0", # "leverage": "10", # "isolated": False, # "positionSide": "BOTH", # "entryPrice": "41021.20000069", # "maxQty": "100", # "notionalValue": "0.00243939", # "isolatedWallet": "0", # "crossMargin": "0.314" # "crossWalletBalance": "34", # } # marketId = self.safe_string(position, 'symbol') market = self.safe_market(marketId, market) symbol = market['symbol'] leverageString = self.safe_string(position, 'leverage') leverage = int(leverageString) initialMarginString = self.safe_string(position, 'initialMargin') initialMargin = self.parse_number(initialMarginString) initialMarginPercentageString = Precise.string_div('1', leverageString, 8) rational = (1000 % leverage) == 0 if not rational: initialMarginPercentageString = Precise.string_div(Precise.string_add(initialMarginPercentageString, '1e-8'), '1', 8) usdm = ('notional' in position) maintenanceMarginString = self.safe_string(position, 'maintMargin') maintenanceMargin = self.parse_number(maintenanceMarginString) entryPriceString = self.safe_string(position, 'entryPrice') entryPrice = self.parse_number(entryPriceString) notionalString = self.safe_string_2(position, 'notional', 'notionalValue') notionalStringAbs = Precise.string_abs(notionalString) notionalFloat = float(notionalString) notionalFloatAbs = float(notionalStringAbs) notional = self.parse_number(Precise.string_abs(notionalString)) contractsString = self.safe_string(position, 'positionAmt') contractsStringAbs = Precise.string_abs(contractsString) if contractsString is None: entryNotional = Precise.string_mul(Precise.string_mul(leverageString, initialMarginString), entryPriceString) contractsString = Precise.string_div(entryNotional, market['contractSize']) contractsStringAbs = Precise.string_div(Precise.string_add(contractsString, '0.5'), '1', 0) contracts = self.parse_number(contractsStringAbs) leverageBrackets = self.safe_value(self.options, 'leverageBrackets', {}) leverageBracket = self.safe_value(leverageBrackets, symbol, []) maintenanceMarginPercentageString = None for i in range(0, len(leverageBracket)): bracket = leverageBracket[i] if notionalFloatAbs < bracket[0]: break maintenanceMarginPercentageString = bracket[1] maintenanceMarginPercentage = self.parse_number(maintenanceMarginPercentageString) unrealizedPnlString = self.safe_string(position, 'unrealizedProfit') unrealizedPnl = self.parse_number(unrealizedPnlString) timestamp = self.safe_integer(position, 'updateTime') if timestamp == 0: timestamp = None isolated = self.safe_value(position, 'isolated') marginType = None collateralString = None walletBalance = None if isolated: marginType = 'isolated' walletBalance = self.safe_string(position, 'isolatedWallet') collateralString = Precise.string_add(walletBalance, unrealizedPnlString) else: marginType = 'cross' walletBalance = self.safe_string(position, 'crossWalletBalance') collateralString = self.safe_string(position, 'crossMargin') collateral = self.parse_number(collateralString) marginRatio = None side = None percentage = None liquidationPriceStringRaw = None liquidationPrice = None if notionalFloat == 0.0: entryPrice = None else: side = 'short' if (notionalFloat < 0) else 'long' marginRatio = self.parse_number(Precise.string_div(Precise.string_add(Precise.string_div(maintenanceMarginString, collateralString), '5e-5'), '1', 4)) percentage = self.parse_number(Precise.string_mul(Precise.string_div(unrealizedPnlString, initialMarginString, 4), '100')) if usdm: # calculate liquidation price # # liquidationPrice = (walletBalance / (contracts * (±1 + mmp))) + (±entryPrice / (±1 + mmp)) # # mmp = maintenanceMarginPercentage # where ± is negative for long and positive for short # TODO: calculate liquidation price for coinm contracts onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_add('1', maintenanceMarginPercentageString) else: onePlusMaintenanceMarginPercentageString = Precise.string_add('-1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) leftSide = Precise.string_div(walletBalance, Precise.string_mul(contractsStringAbs, onePlusMaintenanceMarginPercentageString)) rightSide = Precise.string_div(entryPriceSignString, onePlusMaintenanceMarginPercentageString) liquidationPriceStringRaw = Precise.string_add(leftSide, rightSide) else: # calculate liquidation price # # liquidationPrice = (contracts * contractSize(±1 - mmp)) / (±1/entryPrice * contracts * contractSize - walletBalance) # onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_sub('1', maintenanceMarginPercentageString) else: onePlusMaintenanceMarginPercentageString = Precise.string_sub('-1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) size = Precise.string_mul(contractsStringAbs, market['contractSize']) leftSide = Precise.string_mul(size, onePlusMaintenanceMarginPercentageString) rightSide = Precise.string_sub(Precise.string_mul(Precise.string_div('1', entryPriceSignString), size), walletBalance) liquidationPriceStringRaw = Precise.string_div(leftSide, rightSide) pricePrecision = market['precision']['price'] pricePrecisionPlusOne = pricePrecision + 1 pricePrecisionPlusOneString = str(pricePrecisionPlusOne) # round half up rounder = Precise('5e-' + pricePrecisionPlusOneString) rounderString = str(rounder) liquidationPriceRoundedString = Precise.string_add(rounderString, liquidationPriceStringRaw) truncatedLiquidationPrice = Precise.string_div(liquidationPriceRoundedString, '1', pricePrecision) if truncatedLiquidationPrice[0] == '-': # user cannot be liquidated # since he has more collateral than the size of the position truncatedLiquidationPrice = None liquidationPrice = self.parse_number(truncatedLiquidationPrice) positionSide = self.safe_string(position, 'positionSide') hedged = positionSide != 'BOTH' return { 'info': position, 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'initialMargin': initialMargin, 'initialMarginPercentage': self.parse_number(initialMarginPercentageString), 'maintenanceMargin': maintenanceMargin, 'maintenanceMarginPercentage': maintenanceMarginPercentage, 'entryPrice': entryPrice, 'notional': notional, 'leverage': self.parse_number(leverageString), 'unrealizedPnl': unrealizedPnl, 'contracts': contracts, 'contractSize': self.parse_number(market['contractSize']), 'marginRatio': marginRatio, 'liquidationPrice': liquidationPrice, 'markPrice': None, 'collateral': collateral, 'marginType': marginType, 'side': side, 'hedged': hedged, 'percentage': percentage, } def parse_position_risk(self, position, market=None): # # usdm # { # "symbol": "BTCUSDT", # "positionAmt": "0.001", # "entryPrice": "43578.07000", # "markPrice": "43532.30000000", # "unRealizedProfit": "-0.04577000", # "liquidationPrice": "21841.24993976", # "leverage": "2", # "maxNotionalValue": "300000000", # "marginType": "isolated", # "isolatedMargin": "21.77841506", # "isAutoAddMargin": "false", # "positionSide": "BOTH", # "notional": "43.53230000", # "isolatedWallet": "21.82418506", # "updateTime": "1621358023886" # } # # coinm # { # "symbol": "BTCUSD_PERP", # "positionAmt": "2", # "entryPrice": "37643.10000021", # "markPrice": "38103.05510455", # "unRealizedProfit": "0.00006413", # "liquidationPrice": "25119.97445760", # "leverage": "2", # "maxQty": "1500", # "marginType": "isolated", # "isolatedMargin": "0.00274471", # "isAutoAddMargin": "false", # "positionSide": "BOTH", # "notionalValue": "0.00524892", # "isolatedWallet": "0.00268058" # } # marketId = self.safe_string(position, 'symbol') market = self.safe_market(marketId, market) symbol = market['symbol'] leverageBrackets = self.safe_value(self.options, 'leverageBrackets', {}) leverageBracket = self.safe_value(leverageBrackets, symbol, []) notionalString = self.safe_string_2(position, 'notional', 'notionalValue') notionalStringAbs = Precise.string_abs(notionalString) notionalFloatAbs = float(notionalStringAbs) notionalFloat = float(notionalString) maintenanceMarginPercentageString = None for i in range(0, len(leverageBracket)): bracket = leverageBracket[i] if notionalFloatAbs < bracket[0]: break maintenanceMarginPercentageString = bracket[1] notional = self.parse_number(notionalStringAbs) contractsAbs = Precise.string_abs(self.safe_string(position, 'positionAmt')) contracts = self.parse_number(contractsAbs) unrealizedPnlString = self.safe_string(position, 'unRealizedProfit') unrealizedPnl = self.parse_number(unrealizedPnlString) leverageString = self.safe_string(position, 'leverage') leverage = int(leverageString) liquidationPriceString = self.omit_zero(self.safe_string(position, 'liquidationPrice')) liquidationPrice = self.parse_number(liquidationPriceString) collateralString = None marginType = self.safe_string(position, 'marginType') side = None if notionalFloat > 0: side = 'long' elif notionalFloat < 0: side = 'short' entryPriceString = self.safe_string(position, 'entryPrice') entryPrice = self.parse_number(entryPriceString) if marginType == 'cross': # calculate collateral if market['linear']: # walletBalance = (liquidationPrice * (±1 + mmp) ± entryPrice) * contracts onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_add('1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) else: onePlusMaintenanceMarginPercentageString = Precise.string_add('-1', maintenanceMarginPercentageString) inner = Precise.string_mul(liquidationPriceString, onePlusMaintenanceMarginPercentageString) leftSide = Precise.string_add(inner, entryPriceSignString) collateralString = Precise.string_div(Precise.string_mul(leftSide, contractsAbs), '1', market['precision']['quote']) else: # walletBalance = (contracts * contractSize) * (±1/entryPrice - (±1 - mmp) / liquidationPrice) onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_sub('1', maintenanceMarginPercentageString) else: onePlusMaintenanceMarginPercentageString = Precise.string_sub('-1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) leftSide = Precise.string_mul(contractsAbs, market['contractSize']) rightSide = Precise.string_sub(Precise.string_div('1', entryPriceSignString), Precise.string_div(onePlusMaintenanceMarginPercentageString, liquidationPriceString)) collateralString = Precise.string_div(Precise.string_mul(leftSide, rightSide), '1', market['precision']['base']) else: collateralString = self.safe_string(position, 'isolatedMargin') collateralString = '0' if (collateralString is None) else collateralString collateralFloat = float(collateralString) collateral = self.parse_number(collateralString) markPrice = self.parse_number(self.omit_zero(self.safe_string(position, 'markPrice'))) timestamp = self.safe_integer(position, 'updateTime') if timestamp == 0: timestamp = None maintenanceMarginPercentage = self.parse_number(maintenanceMarginPercentageString) maintenanceMarginString = Precise.string_mul(maintenanceMarginPercentageString, notionalStringAbs) maintenanceMargin = self.parse_number(maintenanceMarginString) initialMarginPercentageString = Precise.string_div('1', leverageString, 8) rational = (1000 % leverage) == 0 if not rational: initialMarginPercentageString = Precise.string_add(initialMarginPercentageString, '1e-8') initialMarginString = Precise.string_div(Precise.string_mul(notionalStringAbs, initialMarginPercentageString), '1', 8) initialMargin = self.parse_number(initialMarginString) marginRatio = None percentage = None if collateralFloat != 0.0: marginRatio = self.parse_number(Precise.string_div(Precise.string_add(Precise.string_div(maintenanceMarginString, collateralString), '5e-5'), '1', 4)) percentage = self.parse_number(Precise.string_mul(Precise.string_div(unrealizedPnlString, initialMarginString, 4), '100')) positionSide = self.safe_string(position, 'positionSide') hedged = positionSide != 'BOTH' return { 'info': position, 'symbol': symbol, 'contracts': contracts, 'contractSize': self.parse_number(market['contractSize']), 'unrealizedPnl': unrealizedPnl, 'leverage': self.parse_number(leverageString), 'liquidationPrice': liquidationPrice, 'collateral': collateral, 'notional': notional, 'markPrice': markPrice, 'entryPrice': entryPrice, 'timestamp': timestamp, 'initialMargin': initialMargin, 'initialMarginPercentage': self.parse_number(initialMarginPercentageString), 'maintenanceMargin': maintenanceMargin, 'maintenanceMarginPercentage': maintenanceMarginPercentage, 'marginRatio': marginRatio, 'datetime': self.iso8601(timestamp), 'marginType': marginType, 'side': side, 'hedged': hedged, 'percentage': percentage, } def load_leverage_brackets(self, reload=False, params={}): self.load_markets() # by default cache the leverage bracket # it contains useful stuff like the maintenance margin and initial margin for positions leverageBrackets = self.safe_value(self.options, 'leverageBrackets') if (leverageBrackets is None) or (reload): method = None defaultType = self.safe_string(self.options, 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if type == 'future': method = 'fapiPrivateGetLeverageBracket' elif type == 'delivery': method = 'dapiPrivateV2GetLeverageBracket' else: raise NotSupported(self.id + ' loadLeverageBrackets() supports linear and inverse contracts only') response = getattr(self, method)(query) self.options['leverageBrackets'] = {} for i in range(0, len(response)): entry = response[i] marketId = self.safe_string(entry, 'symbol') symbol = self.safe_symbol(marketId) brackets = self.safe_value(entry, 'brackets') result = [] for j in range(0, len(brackets)): bracket = brackets[j] # we use floats here internally on purpose floorValue = self.safe_float_2(bracket, 'notionalFloor', 'qtyFloor') maintenanceMarginPercentage = self.safe_string(bracket, 'maintMarginRatio') result.append([floorValue, maintenanceMarginPercentage]) self.options['leverageBrackets'][symbol] = result return self.options['leverageBrackets'] def fetch_positions(self, symbols=None, params={}): defaultMethod = self.safe_string(self.options, 'fetchPositions', 'positionRisk') if defaultMethod == 'positionRisk': return self.fetch_positions_risk(symbols, params) elif defaultMethod == 'account': return self.fetch_account_positions(symbols, params) else: raise NotSupported(self.id + '.options["fetchPositions"] = "' + defaultMethod + '" is invalid, please choose between "account" and "positionRisk"') def fetch_account_positions(self, symbols=None, params={}): if symbols is not None: if not isinstance(symbols, list): raise ArgumentsRequired(self.id + ' fetchPositions requires an array argument for symbols') self.load_markets() self.load_leverage_brackets() method = None defaultType = self.safe_string(self.options, 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if type == 'future': method = 'fapiPrivateGetAccount' elif type == 'delivery': method = 'dapiPrivateGetAccount' else: raise NotSupported(self.id + ' fetchPositions() supports linear and inverse contracts only') account = getattr(self, method)(query) result = self.parse_account_positions(account) return self.filter_by_array(result, 'symbol', symbols, False) def fetch_positions_risk(self, symbols=None, params={}): if symbols is not None: if not isinstance(symbols, list): raise ArgumentsRequired(self.id + ' fetchPositions requires an array argument for symbols') self.load_markets() self.load_leverage_brackets() request = {} method = None defaultType = 'future' defaultType = self.safe_string(self.options, 'defaultType', defaultType) type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') if (type == 'future') or (type == 'linear'): method = 'fapiPrivateGetPositionRisk' elif (type == 'delivery') or (type == 'inverse'): method = 'dapiPrivateGetPositionRisk' else: raise NotSupported(self.id + ' fetchIsolatedPositions() supports linear and inverse contracts only') response = getattr(self, method)(self.extend(request, params)) result = [] for i in range(0, len(response)): parsed = self.parse_position_risk(response[i]) result.append(parsed) return self.filter_by_array(result, 'symbol', symbols, False) def fetch_funding_history(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None method = None defaultType = 'future' request = { 'incomeType': 'FUNDING_FEE', # "TRANSFER","WELCOME_BONUS", "REALIZED_PNL","FUNDING_FEE", "COMMISSION" and "INSURANCE_CLEAR" } if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] if market['linear']: defaultType = 'future' elif market['inverse']: defaultType = 'delivery' else: raise NotSupported(self.id + ' fetchFundingHistory() supports linear and inverse contracts only') if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit defaultType = self.safe_string_2(self.options, 'fetchFundingHistory', 'defaultType', defaultType) type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') if (type == 'future') or (type == 'linear'): method = 'fapiPrivateGetIncome' elif (type == 'delivery') or (type == 'inverse'): method = 'dapiPrivateGetIncome' else: raise NotSupported(self.id + ' fetchFundingHistory() supports linear and inverse contracts only') response = getattr(self, method)(self.extend(request, params)) return self.parse_incomes(response, market, since, limit) def set_leverage(self, leverage, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' setLeverage() requires a symbol argument') # WARNING: THIS WILL INCREASE LIQUIDATION PRICE FOR OPEN ISOLATED LONG POSITIONS # AND DECREASE LIQUIDATION PRICE FOR OPEN ISOLATED SHORT POSITIONS if (leverage < 1) or (leverage > 125): raise BadRequest(self.id + ' leverage should be between 1 and 125') self.load_markets() market = self.market(symbol) method = None if market['linear']: method = 'fapiPrivatePostLeverage' elif market['inverse']: method = 'dapiPrivatePostLeverage' else: raise NotSupported(self.id + ' setLeverage() supports linear and inverse contracts only') request = { 'symbol': market['id'], 'leverage': leverage, } return getattr(self, method)(self.extend(request, params)) def set_margin_mode(self, marginType, symbol=None, params={}): # # {"code": -4048 , "msg": "Margin type cannot be changed if there exists position."} # # or # # {"code": 200, "msg": "success"} # marginType = marginType.upper() if (marginType != 'ISOLATED') and (marginType != 'CROSSED'): raise BadRequest(self.id + ' marginType must be either isolated or crossed') self.load_markets() market = self.market(symbol) method = None if market['linear']: method = 'fapiPrivatePostMarginType' elif market['inverse']: method = 'dapiPrivatePostMarginType' else: raise NotSupported(self.id + ' setMarginMode() supports linear and inverse contracts only') request = { 'symbol': market['id'], 'marginType': marginType, } return getattr(self, method)(self.extend(request, params)) def set_position_mode(self, hedged, symbol=None, params={}): defaultType = self.safe_string(self.options, 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) params = self.omit(params, ['type']) dualSidePosition = None if hedged: dualSidePosition = 'true' else: dualSidePosition = 'false' request = { 'dualSidePosition': dualSidePosition, } method = None if type == 'delivery': method = 'dapiPrivatePostPositionSideDual' else: # default to future method = 'fapiPrivatePostPositionSideDual' # # { # "code": 200, # "msg": "success" # } # return getattr(self, method)(self.extend(request, params)) def sign(self, path, api='public', method='GET', params={}, headers=None, body=None): if not (api in self.urls['api']): raise NotSupported(self.id + ' does not have a testnet/sandbox URL for ' + api + ' endpoints') url = self.urls['api'][api] url += '/' + path if api == 'wapi': url += '.html' if path == 'historicalTrades': if self.apiKey: headers = { 'X-MBX-APIKEY': self.apiKey, } else: raise AuthenticationError(self.id + ' historicalTrades endpoint requires `apiKey` credential') userDataStream = (path == 'userDataStream') or (path == 'listenKey') if userDataStream: if self.apiKey: # v1 special case for userDataStream headers = { 'X-MBX-APIKEY': self.apiKey, 'Content-Type': 'application/x-www-form-urlencoded', } if method != 'GET': body = self.urlencode(params) else: raise AuthenticationError(self.id + ' userDataStream endpoint requires `apiKey` credential') elif (api == 'private') or (api == 'sapi') or (api == 'wapi' and path != 'systemStatus') or (api == 'dapiPrivate') or (api == 'dapiPrivateV2') or (api == 'fapiPrivate') or (api == 'fapiPrivateV2'): self.check_required_credentials() query = None recvWindow = self.safe_integer(self.options, 'recvWindow', 5000) if (api == 'sapi') and (path == 'asset/dust'): query = self.urlencode_with_array_repeat(self.extend({ 'timestamp': self.nonce(), 'recvWindow': recvWindow, }, params)) elif (path == 'batchOrders') or (path.find('sub-account') >= 0): query = self.rawencode(self.extend({ 'timestamp': self.nonce(), 'recvWindow': recvWindow, }, params)) else: query = self.urlencode(self.extend({ 'timestamp': self.nonce(), 'recvWindow': recvWindow, }, params)) signature = self.hmac(self.encode(query), self.encode(self.secret)) query += '&' + 'signature=' + signature headers = { 'X-MBX-APIKEY': self.apiKey, } if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'): url += '?' + query else: body = query headers['Content-Type'] = 'application/x-www-form-urlencoded' else: if params: url += '?' + self.urlencode(params) return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody): if (code == 418) or (code == 429): raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body) # error response in a form: {"code": -1013, "msg": "Invalid quantity."} # following block cointains legacy checks against message patterns in "msg" property # will switch "code" checks eventually, when we know all of them if code >= 400: if body.find('Price * QTY is zero or less') >= 0: raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body) if body.find('LOT_SIZE') >= 0: raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body) if body.find('PRICE_FILTER') >= 0: raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body) if response is None: return # fallback to default error handler # check success value for wapi endpoints # response in format {'msg': 'The coin does not exist.', 'success': True/false} success = self.safe_value(response, 'success', True) if not success: message = self.safe_string(response, 'msg') parsedMessage = None if message is not None: try: parsedMessage = json.loads(message) except Exception as e: # do nothing parsedMessage = None if parsedMessage is not None: response = parsedMessage message = self.safe_string(response, 'msg') if message is not None: self.throw_exactly_matched_exception(self.exceptions['exact'], message, self.id + ' ' + message) self.throw_broadly_matched_exception(self.exceptions['broad'], message, self.id + ' ' + message) # checks against error codes error = self.safe_string(response, 'code') if error is not None: # https://github.com/ccxt/ccxt/issues/6501 # https://github.com/ccxt/ccxt/issues/7742 if (error == '200') or Precise.string_equals(error, '0'): return # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} # despite that their message is very confusing, it is raised by Binance # on a temporary ban, the API key is valid, but disabled for a while if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']: raise DDoSProtection(self.id + ' temporary banned: ' + body) feedback = self.id + ' ' + body self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback) raise ExchangeError(feedback) if not success: raise ExchangeError(self.id + ' ' + body) def calculate_rate_limiter_cost(self, api, method, path, params, config={}, context={}): if ('noSymbol' in config) and not ('symbol' in params): return config['noSymbol'] elif ('noPoolId' in config) and not ('poolId' in params): return config['noPoolId'] elif ('byLimit' in config) and ('limit' in params): limit = params['limit'] byLimit = config['byLimit'] for i in range(0, len(byLimit)): entry = byLimit[i] if limit <= entry[0]: return entry[1] return self.safe_integer(config, 'cost', 1) def request(self, path, api='public', method='GET', params={}, headers=None, body=None, config={}, context={}): response = self.fetch2(path, api, method, params, headers, body, config, context) # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} if (api == 'private') or (api == 'wapi'): self.options['hasAlreadyAuthenticatedSuccessfully'] = True return response def modify_margin_helper(self, symbol, amount, addOrReduce, params={}): # used to modify isolated positions defaultType = self.safe_string(self.options, 'defaultType', 'future') if defaultType == 'spot': defaultType = 'future' type = self.safe_string(params, 'type', defaultType) if (type == 'margin') or (type == 'spot'): raise NotSupported(self.id + ' add / reduce margin only supported with type future or delivery') self.load_markets() market = self.market(symbol) request = { 'type': addOrReduce, 'symbol': market['id'], 'amount': amount, } method = None code = None if type == 'future': method = 'fapiPrivatePostPositionMargin' code = market['quote'] else: method = 'dapiPrivatePostPositionMargin' code = market['base'] response = getattr(self, method)(self.extend(request, params)) # # { # "code": 200, # "msg": "Successfully modify position margin.", # "amount": 0.001, # "type": 1 # } # rawType = self.safe_integer(response, 'type') resultType = 'add' if (rawType == 1) else 'reduce' resultAmount = self.safe_number(response, 'amount') errorCode = self.safe_string(response, 'code') status = 'ok' if (errorCode == '200') else 'failed' return { 'info': response, 'type': resultType, 'amount': resultAmount, 'code': code, 'symbol': market['symbol'], 'status': status, } def reduce_margin(self, symbol, amount, params={}): return self.modify_margin_helper(symbol, amount, 2, params) def add_margin(self, symbol, amount, params={}): return self.modify_margin_helper(symbol, amount, 1, params) def fetch_borrow_rate(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'asset': currency['id'], # 'vipLevel': self.safe_integer(params, 'vipLevel'), } response = self.sapiGetMarginInterestRateHistory(self.extend(request, params)) # # [ # { # "asset": "USDT", # "timestamp": 1638230400000, # "dailyInterestRate": "0.0006", # "vipLevel": 0 # }, # ... # ] # rate = self.safe_value(response, 0) timestamp = self.safe_number(rate, 'timestamp') return { 'currency': code, 'rate': self.safe_number(rate, 'dailyInterestRate'), 'period': 86400000, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'info': response, }
48.316663
10,000
0.490834
from ccxt.base.exchange import Exchange import json from ccxt.base.errors import ExchangeError from ccxt.base.errors import AuthenticationError from ccxt.base.errors import PermissionDenied from ccxt.base.errors import AccountSuspended from ccxt.base.errors import ArgumentsRequired from ccxt.base.errors import BadRequest from ccxt.base.errors import BadSymbol from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import InvalidOrder from ccxt.base.errors import OrderNotFound from ccxt.base.errors import OrderImmediatelyFillable from ccxt.base.errors import NotSupported from ccxt.base.errors import DDoSProtection from ccxt.base.errors import RateLimitExceeded from ccxt.base.errors import ExchangeNotAvailable from ccxt.base.errors import OnMaintenance from ccxt.base.errors import InvalidNonce from ccxt.base.decimal_to_precision import TRUNCATE from ccxt.base.precise import Precise class binance(Exchange): def describe(self): return self.deep_extend(super(binance, self).describe(), { 'id': 'binance', 'name': 'Binance', 'countries': ['JP', 'MT'], 'rateLimit': 50, 'certified': True, 'pro': True, 'has': { 'cancelAllOrders': True, 'cancelOrder': True, 'CORS': None, 'createOrder': True, 'fetchBalance': True, 'fetchBorrowRate': True, 'fetchBorrowRates': False, 'fetchBidsAsks': True, 'fetchClosedOrders': 'emulated', 'fetchCurrencies': True, 'fetchDepositAddress': True, 'fetchDeposits': True, 'fetchFundingFees': True, 'fetchFundingHistory': True, 'fetchFundingRate': True, 'fetchFundingRateHistory': True, 'fetchFundingRates': True, 'fetchIndexOHLCV': True, 'fetchIsolatedPositions': True, 'fetchMarkets': True, 'fetchMarkOHLCV': True, 'fetchMyTrades': True, 'fetchOHLCV': True, 'fetchOpenOrders': True, 'fetchOrder': True, 'fetchOrderBook': True, 'fetchOrders': True, 'fetchPositions': True, 'fetchPremiumIndexOHLCV': False, 'fetchStatus': True, 'fetchTicker': True, 'fetchTickers': True, 'fetchTime': True, 'fetchTrades': True, 'fetchTradingFee': True, 'fetchTradingFees': True, 'fetchTransactions': False, 'fetchTransfers': True, 'fetchWithdrawals': True, 'setLeverage': True, 'setMarginMode': True, 'setPositionMode': True, 'addMargin': True, 'reduceMargin': True, 'transfer': True, 'withdraw': True, }, 'timeframes': { '1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg', 'test': { 'dapiPublic': 'https://testnet.binancefuture.com/dapi/v1', 'dapiPrivate': 'https://testnet.binancefuture.com/dapi/v1', 'fapiPublic': 'https://testnet.binancefuture.com/fapi/v1', 'fapiPrivate': 'https://testnet.binancefuture.com/fapi/v1', 'fapiPrivateV2': 'https://testnet.binancefuture.com/fapi/v2', 'public': 'https://testnet.binance.vision/api/v3', 'private': 'https://testnet.binance.vision/api/v3', 'v1': 'https://testnet.binance.vision/api/v1', }, 'api': { 'wapi': 'https://api.binance.com/wapi/v3', 'sapi': 'https://api.binance.com/sapi/v1', 'dapiPublic': 'https://dapi.binance.com/dapi/v1', 'dapiPrivate': 'https://dapi.binance.com/dapi/v1', 'dapiPrivateV2': 'https://dapi.binance.com/dapi/v2', 'dapiData': 'https://dapi.binance.com/futures/data', 'fapiPublic': 'https://fapi.binance.com/fapi/v1', 'fapiPrivate': 'https://fapi.binance.com/fapi/v1', 'fapiData': 'https://fapi.binance.com/futures/data', 'fapiPrivateV2': 'https://fapi.binance.com/fapi/v2', 'public': 'https://api.binance.com/api/v3', 'private': 'https://api.binance.com/api/v3', 'v1': 'https://api.binance.com/api/v1', }, 'www': 'https://www.binance.com', 'doc': [ 'https://binance-docs.github.io/apidocs/spot/en', ], 'api_management': 'https://www.binance.com/en/usercenter/settings/api-management', 'fees': 'https://www.binance.com/en/fee/schedule', }, 'depth': 1, 'api': { 'sapi': { 'get': { 'accountSnapshot': 1, 'system/status': 1, 'margin/asset': 1, 'margin/pair': 1, 'margin/allAssets': 1, 'margin/allPairs': 1, 'margin/priceIndex': 1, 'asset/assetDividend': 1, 'asset/dribblet': 1, 'asset/transfer': 1, 'asset/assetDetail': 1, 'asset/tradeFee': 1, 'asset/get-funding-asset': 1, 'margin/loan': 1, 'margin/repay': 1, 'margin/account': 1, 'margin/transfer': 1, 'margin/interestHistory': 1, 'margin/forceLiquidationRec': 1, 'margin/order': 1, 'margin/openOrders': 1, 'margin/allOrders': 1, 'margin/myTrades': 1, 'margin/maxBorrowable': 5, 'margin/maxTransferable': 5, 'margin/isolated/transfer': 1, 'margin/isolated/account': 1, 'margin/isolated/pair': 1, 'margin/isolated/allPairs': 1, 'margin/isolated/accountLimit': 1, 'margin/interestRateHistory': 1, 'margin/orderList': 2, 'margin/allOrderList': 10, 'margin/openOrderList': 3, 'loan/income': 1, 'fiat/orders': 1, 'fiat/payments': 1, 'futures/transfer': 5, 'futures/loan/borrow/history': 1, 'futures/loan/repay/history': 1, 'futures/loan/wallet': 1, 'futures/loan/configs': 1, 'futures/loan/calcAdjustLevel': 1, 'futures/loan/calcMaxAdjustAmount': 1, 'futures/loan/adjustCollateral/history': 1, 'futures/loan/liquidationHistory': 1, 'capital/config/getall': 1, 'capital/deposit/address': 1, 'capital/deposit/hisrec': 1, 'capital/deposit/subAddress': 1, 'capital/deposit/subHisrec': 1, 'capital/withdraw/history': 1, 'account/status': 1, 'account/apiTradingStatus': 1, 'account/apiRestrictions/ipRestriction': 1, 'bnbBurn': 1, 'sub-account/assets': 1, 'sub-account/futures/account': 1, 'sub-account/futures/accountSummary': 1, 'sub-account/futures/positionRisk': 1, 'sub-account/futures/internalTransfer': 1, 'sub-account/list': 1, 'sub-account/margin/account': 1, 'sub-account/margin/accountSummary': 1, 'sub-account/spotSummary': 5, 'sub-account/status': 1, 'sub-account/sub/transfer/history': 1, 'sub-account/transfer/subUserHistory': 1, 'sub-account/universalTransfer': 1, 'lending/daily/product/list': 1, 'lending/daily/userLeftQuota': 1, 'lending/daily/userRedemptionQuota': 1, 'lending/daily/token/position': 1, 'lending/union/account': 1, 'lending/union/purchaseRecord': 1, 'lending/union/redemptionRecord': 1, 'lending/union/interestHistory': 1, 'lending/project/list': 1, 'lending/project/position/list': 1, 'mining/pub/algoList': 1, 'mining/pub/coinList': 1, 'mining/worker/detail': 5, 'mining/worker/list': 5, 'mining/payment/list': 5, 'mining/statistics/user/status': 5, 'mining/statistics/user/list': 5, 'bswap/pools': 1, 'bswap/liquidity': {'cost': 1, 'noPoolId': 10}, 'bswap/liquidityOps': 2, 'bswap/quote': 2, 'bswap/swap': 1, 'bswap/poolConfigure': 1, 'bswap/addLiquidityPreview': 1, 'bswap/removeLiquidityPreview': 1, 'blvt/tokenInfo': 1, 'blvt/subscribe/record': 1, 'blvt/redeem/record': 1, 'blvt/userLimit': 1, 'apiReferral/ifNewUser': 1, 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, 'apiReferral/rebate/recentRecord': 1, 'apiReferral/rebate/historicalRecord': 1, 'apiReferral/kickback/recentRecord': 1, 'apiReferral/kickback/historicalRecord': 1, 'broker/subAccountApi': 1, 'broker/subAccount': 1, 'broker/subAccountApi/commission/futures': 1, 'broker/subAccountApi/commission/coinFutures': 1, 'broker/info': 1, 'broker/transfer': 1, 'broker/transfer/futures': 1, 'broker/rebate/recentRecord': 1, 'broker/rebate/historicalRecord': 1, 'broker/subAccount/bnbBurn/status': 1, 'broker/subAccount/depositHist': 1, 'broker/subAccount/spotSummary': 1, 'broker/subAccount/marginSummary': 1, 'broker/subAccount/futuresSummary': 1, 'broker/rebate/futures/recentRecord': 1, 'broker/subAccountApi/ipRestriction': 1, 'broker/universalTransfer': 1, 'account/apiRestrictions': 1, 'managed-subaccount/asset': 1, 'c2c/orderMatch/listUserOrderHistory': 1, }, 'post': { 'asset/dust': 1, 'asset/transfer': 1, 'asset/get-funding-asset': 1, 'account/disableFastWithdrawSwitch': 1, 'account/enableFastWithdrawSwitch': 1, 'account/apiRestrictions/ipRestriction': 1, 'account/apiRestrictions/ipRestriction/ipList': 1, 'capital/withdraw/apply': 1, 'margin/transfer': 1, 'margin/loan': 1, 'margin/repay': 1, 'margin/order': 4, 'margin/order/oco': 1, 'margin/isolated/create': 1, 'margin/isolated/transfer': 1, 'margin/isolated/account': 1, 'bnbBurn': 1, 'sub-account/margin/transfer': 1, 'sub-account/margin/enable': 1, 'sub-account/futures/enable': 1, 'sub-account/futures/transfer': 1, 'sub-account/futures/internalTransfer': 1, 'sub-account/transfer/subToSub': 1, 'sub-account/transfer/subToMaster': 1, 'sub-account/universalTransfer': 1, 'managed-subaccount/deposit': 1, 'managed-subaccount/withdraw': 1, 'userDataStream': 1, 'userDataStream/isolated': 1, 'futures/transfer': 1, 'futures/loan/borrow': 20, 'futures/loan/repay': 20, 'futures/loan/adjustCollateral': 20, 'lending/customizedFixed/purchase': 1, 'lending/daily/purchase': 1, 'lending/daily/redeem': 1, 'bswap/liquidityAdd': 2, 'bswap/liquidityRemove': 2, 'bswap/swap': 2, 'blvt/subscribe': 1, 'blvt/redeem': 1, 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, 'apiReferral/rebate/historicalRecord': 1, 'apiReferral/kickback/historicalRecord': 1, 'broker/subAccount': 1, 'broker/subAccount/margin': 1, 'broker/subAccount/futures': 1, 'broker/subAccountApi': 1, 'broker/subAccountApi/permission': 1, 'broker/subAccountApi/commission': 1, 'broker/subAccountApi/commission/futures': 1, 'broker/subAccountApi/commission/coinFutures': 1, 'broker/transfer': 1, 'broker/transfer/futures': 1, 'broker/rebate/historicalRecord': 1, 'broker/subAccount/bnbBurn/spot': 1, 'broker/subAccount/bnbBurn/marginInterest': 1, 'broker/subAccount/blvt': 1, 'broker/subAccountApi/ipRestriction': 1, 'broker/subAccountApi/ipRestriction/ipList': 1, 'broker/universalTransfer': 1, 'broker/subAccountApi/permission/universalTransfer': 1, 'broker/subAccountApi/permission/vanillaOptions': 1, }, 'put': { 'userDataStream': 1, 'userDataStream/isolated': 1, }, 'delete': { 'account/apiRestrictions/ipRestriction/ipList': 1, 'margin/openOrders': 1, 'margin/order': 1, 'margin/orderList': 1, 'margin/isolated/account': 1, 'userDataStream': 1, 'userDataStream/isolated': 1, 'broker/subAccountApi': 1, 'broker/subAccountApi/ipRestriction/ipList': 1, }, }, 'wapi': { 'post': { 'withdraw': 1, 'sub-account/transfer': 1, }, 'get': { 'depositHistory': 1, 'withdrawHistory': 1, 'depositAddress': 1, 'accountStatus': 1, 'systemStatus': 1, 'apiTradingStatus': 1, 'userAssetDribbletLog': 1, 'tradeFee': 1, 'assetDetail': 1, 'sub-account/list': 1, 'sub-account/transfer/history': 1, 'sub-account/assets': 1, }, }, 'dapiPublic': { 'get': { 'ping': 1, 'time': 1, 'exchangeInfo': 1, 'depth': {'cost': 2, 'byLimit': [[50, 2], [100, 5], [500, 10], [1000, 20]]}, 'trades': 1, 'historicalTrades': 20, 'aggTrades': 20, 'premiumIndex': 10, 'fundingRate': 1, 'klines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'continuousKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'indexPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'markPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'ticker/24hr': {'cost': 1, 'noSymbol': 40}, 'ticker/price': {'cost': 1, 'noSymbol': 2}, 'ticker/bookTicker': {'cost': 1, 'noSymbol': 2}, 'openInterest': 1, }, }, 'dapiData': { 'get': { 'openInterestHist': 1, 'topLongShortAccountRatio': 1, 'topLongShortPositionRatio': 1, 'globalLongShortAccountRatio': 1, 'takerBuySellVol': 1, 'basis': 1, }, }, 'dapiPrivate': { 'get': { 'positionSide/dual': 30, 'order': 1, 'openOrder': 1, 'openOrders': {'cost': 1, 'noSymbol': 5}, 'allOrders': {'cost': 20, 'noSymbol': 40}, 'balance': 1, 'account': 5, 'positionMargin/history': 1, 'positionRisk': 1, 'userTrades': {'cost': 20, 'noSymbol': 40}, 'income': 20, 'leverageBracket': 1, 'forceOrders': {'cost': 20, 'noSymbol': 50}, 'adlQuantile': 5, }, 'post': { 'positionSide/dual': 1, 'order': 4, 'batchOrders': 5, 'countdownCancelAll': 10, 'leverage': 1, 'marginType': 1, 'positionMargin': 1, 'listenKey': 1, }, 'put': { 'listenKey': 1, }, 'delete': { 'order': 1, 'allOpenOrders': 1, 'batchOrders': 5, 'listenKey': 1, }, }, 'dapiPrivateV2': { 'get': { 'leverageBracket': 1, }, }, 'fapiPublic': { 'get': { 'ping': 1, 'time': 1, 'exchangeInfo': 1, 'depth': {'cost': 2, 'byLimit': [[50, 2], [100, 5], [500, 10], [1000, 20]]}, 'trades': 1, 'historicalTrades': 20, 'aggTrades': 20, 'klines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'continuousKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'markPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'indexPriceKlines': {'cost': 1, 'byLimit': [[99, 1], [499, 2], [1000, 5], [10000, 10]]}, 'fundingRate': 1, 'premiumIndex': 1, 'ticker/24hr': {'cost': 1, 'noSymbol': 40}, 'ticker/price': {'cost': 1, 'noSymbol': 2}, 'ticker/bookTicker': {'cost': 1, 'noSymbol': 2}, 'openInterest': 1, 'indexInfo': 1, 'apiTradingStatus': {'cost': 1, 'noSymbol': 10}, 'lvtKlines': 1, }, }, 'fapiData': { 'get': { 'openInterestHist': 1, 'topLongShortAccountRatio': 1, 'topLongShortPositionRatio': 1, 'globalLongShortAccountRatio': 1, 'takerlongshortRatio': 1, }, }, 'fapiPrivate': { 'get': { 'forceOrders': {'cost': 20, 'noSymbol': 50}, 'allOrders': 5, 'openOrder': 1, 'openOrders': 1, 'order': 1, 'account': 5, 'balance': 5, 'leverageBracket': 1, 'positionMargin/history': 1, 'positionRisk': 5, 'positionSide/dual': 30, 'userTrades': 5, 'income': 30, 'commissionRate': 20, 'apiTradingStatus': 1, 'multiAssetsMargin': 30, 'apiReferral/ifNewUser': 1, 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, 'apiReferral/traderNum': 1, 'apiReferral/overview': 1, 'apiReferral/tradeVol': 1, 'apiReferral/rebateVol': 1, 'apiReferral/traderSummary': 1, 'adlQuantile': 5, }, 'post': { 'batchOrders': 5, 'positionSide/dual': 1, 'positionMargin': 1, 'marginType': 1, 'order': 4, 'leverage': 1, 'listenKey': 1, 'countdownCancelAll': 10, 'multiAssetsMargin': 1, 'apiReferral/customization': 1, 'apiReferral/userCustomization': 1, }, 'put': { 'listenKey': 1, }, 'delete': { 'batchOrders': 1, 'order': 1, 'allOpenOrders': 1, 'listenKey': 1, }, }, 'fapiPrivateV2': { 'get': { 'account': 1, 'balance': 1, 'positionRisk': 1, }, }, 'public': { 'get': { 'ping': 1, 'time': 1, 'depth': {'cost': 1, 'byLimit': [[100, 1], [500, 5], [1000, 10], [5000, 50]]}, 'trades': 1, 'aggTrades': 1, 'historicalTrades': 5, 'klines': 1, 'ticker/24hr': {'cost': 1, 'noSymbol': 40}, 'ticker/price': {'cost': 1, 'noSymbol': 2}, 'ticker/bookTicker': {'cost': 1, 'noSymbol': 2}, 'exchangeInfo': 10, }, 'put': { 'userDataStream': 1, }, 'post': { 'userDataStream': 1, }, 'delete': { 'userDataStream': 1, }, }, 'private': { 'get': { 'allOrderList': 10, 'openOrderList': 3, 'orderList': 2, 'order': 2, 'openOrders': {'cost': 3, 'noSymbol': 40}, 'allOrders': 10, 'account': 10, 'myTrades': 10, 'rateLimit/order': 20, }, 'post': { 'order/oco': 1, 'order': 4, 'order/test': 1, }, 'delete': { 'openOrders': 1, 'orderList': 1, 'order': 1, }, }, }, 'fees': { 'trading': { 'feeSide': 'get', 'tierBased': False, 'percentage': True, 'taker': self.parse_number('0.001'), 'maker': self.parse_number('0.001'), }, 'future': { 'trading': { 'feeSide': 'quote', 'tierBased': True, 'percentage': True, 'taker': self.parse_number('0.000400'), 'maker': self.parse_number('0.000200'), 'tiers': { 'taker': [ [self.parse_number('0'), self.parse_number('0.000400')], [self.parse_number('250'), self.parse_number('0.000400')], [self.parse_number('2500'), self.parse_number('0.000350')], [self.parse_number('7500'), self.parse_number('0.000320')], [self.parse_number('22500'), self.parse_number('0.000300')], [self.parse_number('50000'), self.parse_number('0.000270')], [self.parse_number('100000'), self.parse_number('0.000250')], [self.parse_number('200000'), self.parse_number('0.000220')], [self.parse_number('400000'), self.parse_number('0.000200')], [self.parse_number('750000'), self.parse_number('0.000170')], ], 'maker': [ [self.parse_number('0'), self.parse_number('0.000200')], [self.parse_number('250'), self.parse_number('0.000160')], [self.parse_number('2500'), self.parse_number('0.000140')], [self.parse_number('7500'), self.parse_number('0.000120')], [self.parse_number('22500'), self.parse_number('0.000100')], [self.parse_number('50000'), self.parse_number('0.000080')], [self.parse_number('100000'), self.parse_number('0.000060')], [self.parse_number('200000'), self.parse_number('0.000040')], [self.parse_number('400000'), self.parse_number('0.000020')], [self.parse_number('750000'), self.parse_number('0')], ], }, }, }, 'delivery': { 'trading': { 'feeSide': 'base', 'tierBased': True, 'percentage': True, 'taker': self.parse_number('0.000500'), 'maker': self.parse_number('0.000100'), 'tiers': { 'taker': [ [self.parse_number('0'), self.parse_number('0.000500')], [self.parse_number('250'), self.parse_number('0.000450')], [self.parse_number('2500'), self.parse_number('0.000400')], [self.parse_number('7500'), self.parse_number('0.000300')], [self.parse_number('22500'), self.parse_number('0.000250')], [self.parse_number('50000'), self.parse_number('0.000240')], [self.parse_number('100000'), self.parse_number('0.000240')], [self.parse_number('200000'), self.parse_number('0.000240')], [self.parse_number('400000'), self.parse_number('0.000240')], [self.parse_number('750000'), self.parse_number('0.000240')], ], 'maker': [ [self.parse_number('0'), self.parse_number('0.000100')], [self.parse_number('250'), self.parse_number('0.000080')], [self.parse_number('2500'), self.parse_number('0.000050')], [self.parse_number('7500'), self.parse_number('0.0000030')], [self.parse_number('22500'), self.parse_number('0')], [self.parse_number('50000'), self.parse_number('-0.000050')], [self.parse_number('100000'), self.parse_number('-0.000060')], [self.parse_number('200000'), self.parse_number('-0.000070')], [self.parse_number('400000'), self.parse_number('-0.000080')], [self.parse_number('750000'), self.parse_number('-0.000090')], ], }, }, }, }, 'commonCurrencies': { 'BCC': 'BCC', 'YOYO': 'YOYOW', }, 'options': { 'fetchCurrencies': True, 'defaultTimeInForce': 'GTC', 'defaultType': 'spot', 'hasAlreadyAuthenticatedSuccessfully': False, 'warnOnFetchOpenOrdersWithoutSymbol': True, 'fetchPositions': 'positionRisk', 'recvWindow': 5 * 1000, 'timeDifference': 0, 'adjustForTimeDifference': False, 'newOrderRespType': { 'market': 'FULL', 'limit': 'FULL', }, 'quoteOrderQty': True, 'broker': { 'spot': 'x-R4BD3S82', 'margin': 'x-R4BD3S82', 'future': 'x-xcKtGhcu', 'delivery': 'x-xcKtGhcu', }, 'accountsByType': { 'main': 'MAIN', 'spot': 'MAIN', 'funding': 'FUNDING', 'margin': 'MARGIN', 'future': 'UMFUTURE', 'delivery': 'CMFUTURE', 'mining': 'MINING', }, 'typesByAccount': { 'MAIN': 'spot', 'FUNDING': 'funding', 'MARGIN': 'margin', 'UMFUTURE': 'future', 'CMFUTURE': 'delivery', 'MINING': 'mining', }, 'networks': { 'ERC20': 'ETH', 'TRC20': 'TRX', 'BEP2': 'BNB', 'BEP20': 'BSC', 'OMNI': 'OMNI', 'EOS': 'EOS', 'SPL': 'SOL', }, 'reverseNetworks': { 'tronscan.org': 'TRC20', 'etherscan.io': 'ERC20', 'bscscan.com': 'BSC', 'explorer.binance.org': 'BEP2', 'bithomp.com': 'XRP', 'bloks.io': 'EOS', 'stellar.expert': 'XLM', 'blockchair.com/bitcoin': 'BTC', 'blockchair.com/bitcoin-cash': 'BCH', 'blockchair.com/ecash': 'XEC', 'explorer.litecoin.net': 'LTC', 'explorer.avax.network': 'AVAX', 'solscan.io': 'SOL', 'polkadot.subscan.io': 'DOT', 'dashboard.internetcomputer.org': 'ICP', 'explorer.chiliz.com': 'CHZ', 'cardanoscan.io': 'ADA', 'mainnet.theoan.com': 'AION', 'algoexplorer.io': 'ALGO', 'explorer.ambrosus.com': 'AMB', 'viewblock.io/zilliqa': 'ZIL', 'viewblock.io/arweave': 'AR', 'explorer.ark.io': 'ARK', 'atomscan.com': 'ATOM', 'www.mintscan.io': 'CTK', 'explorer.bitcoindiamond.org': 'BCD', 'btgexplorer.com': 'BTG', 'bts.ai': 'BTS', 'explorer.celo.org': 'CELO', 'explorer.nervos.org': 'CKB', 'cerebro.cortexlabs.ai': 'CTXC', 'chainz.cryptoid.info': 'VIA', 'explorer.dcrdata.org': 'DCR', 'digiexplorer.info': 'DGB', 'dock.subscan.io': 'DOCK', 'dogechain.info': 'DOGE', 'explorer.elrond.com': 'EGLD', 'blockscout.com': 'ETC', 'explore-fetchhub.fetch.ai': 'FET', 'filfox.info': 'FIL', 'fio.bloks.io': 'FIO', 'explorer.firo.org': 'FIRO', 'neoscan.io': 'NEO', 'ftmscan.com': 'FTM', 'explorer.gochain.io': 'GO', 'block.gxb.io': 'GXS', 'hash-hash.info': 'HBAR', 'www.hiveblockexplorer.com': 'HIVE', 'explorer.helium.com': 'HNT', 'tracker.icon.foundation': 'ICX', 'www.iostabc.com': 'IOST', 'explorer.iota.org': 'IOTA', 'iotexscan.io': 'IOTX', 'irishub.iobscan.io': 'IRIS', 'kava.mintscan.io': 'KAVA', 'scope.klaytn.com': 'KLAY', 'kmdexplorer.io': 'KMD', 'kusama.subscan.io': 'KSM', 'explorer.lto.network': 'LTO', 'polygonscan.com': 'POLYGON', 'explorer.ont.io': 'ONT', 'minaexplorer.com': 'MINA', 'nanolooker.com': 'NANO', 'explorer.nebulas.io': 'NAS', 'explorer.nbs.plus': 'NBS', 'explorer.nebl.io': 'NEBL', 'nulscan.io': 'NULS', 'nxscan.com': 'NXS', 'explorer.harmony.one': 'ONE', 'explorer.poa.network': 'POA', 'qtum.info': 'QTUM', 'explorer.rsk.co': 'RSK', 'www.oasisscan.com': 'ROSE', 'ravencoin.network': 'RVN', 'sc.tokenview.com': 'SC', 'secretnodes.com': 'SCRT', 'explorer.skycoin.com': 'SKY', 'steemscan.com': 'STEEM', 'explorer.stacks.co': 'STX', 'www.thetascan.io': 'THETA', 'scan.tomochain.com': 'TOMO', 'explore.vechain.org': 'VET', 'explorer.vite.net': 'VITE', 'www.wanscan.org': 'WAN', 'wavesexplorer.com': 'WAVES', 'wax.eosx.io': 'WAXP', 'waltonchain.pro': 'WTC', 'chain.nem.ninja': 'XEM', 'verge-blockchain.info': 'XVG', 'explorer.yoyow.org': 'YOYOW', 'explorer.zcha.in': 'ZEC', 'explorer.zensystem.io': 'ZEN', }, 'impliedNetworks': { 'ETH': {'ERC20': 'ETH'}, 'TRX': {'TRC20': 'TRX'}, }, 'legalMoney': { 'MXN': True, 'UGX': True, 'SEK': True, 'CHF': True, 'VND': True, 'AED': True, 'DKK': True, 'KZT': True, 'HUF': True, 'PEN': True, 'PHP': True, 'USD': True, 'TRY': True, 'EUR': True, 'NGN': True, 'PLN': True, 'BRL': True, 'ZAR': True, 'KES': True, 'ARS': True, 'RUB': True, 'AUD': True, 'NOK': True, 'CZK': True, 'GBP': True, 'UAH': True, 'GHS': True, 'HKD': True, 'CAD': True, 'INR': True, 'JPY': True, 'NZD': True, }, }, 'exceptions': { 'exact': { 'System is under maintenance.': OnMaintenance, 'System abnormality': ExchangeError, 'You are not authorized to execute self request.': PermissionDenied, 'API key does not exist': AuthenticationError, 'Order would trigger immediately.': OrderImmediatelyFillable, 'Stop price would trigger immediately.': OrderImmediatelyFillable, 'Order would immediately match and take.': OrderImmediatelyFillable, 'Account has insufficient balance for requested action.': InsufficientFunds, 'Rest API trading is not enabled.': ExchangeNotAvailable, "You don't have permission.": PermissionDenied, # {"msg":"You don't have permission.","success":false} 'Market is closed.': ExchangeNotAvailable, 'Too many requests. Please try again later.': DDoSProtection, '-1000': ExchangeNotAvailable, '-1001': ExchangeNotAvailable, '-1002': AuthenticationError, '-1003': RateLimitExceeded, '-1013': InvalidOrder, '-1015': RateLimitExceeded, '-1016': ExchangeNotAvailable, '-1020': BadRequest, '-1021': InvalidNonce, '-1022': AuthenticationError, '-1100': BadRequest, '-1101': BadRequest, # Too many parameters; expected %s and received %s. '-1102': BadRequest, # Param %s or %s must be sent, but both were empty '-1103': BadRequest, # An unknown parameter was sent. '-1104': BadRequest, # Not all sent parameters were read, read 8 parameters but was sent 9 '-1105': BadRequest, # Parameter %s was empty. '-1106': BadRequest, # Parameter %s sent when not required. '-1111': BadRequest, # Precision is over the maximum defined for self asset. '-1112': InvalidOrder, # No orders on book for symbol. '-1114': BadRequest, # TimeInForce parameter sent when not required. '-1115': BadRequest, # Invalid timeInForce. '-1116': BadRequest, # Invalid orderType. '-1117': BadRequest, # Invalid side. '-1118': BadRequest, # New client order ID was empty. '-1119': BadRequest, # Original client order ID was empty. '-1120': BadRequest, # Invalid interval. '-1121': BadSymbol, # Invalid symbol. '-1125': AuthenticationError, # This listenKey does not exist. '-1127': BadRequest, # More than %s hours between startTime and endTime. '-1128': BadRequest, # {"code":-1128,"msg":"Combination of optional parameters invalid."} '-1130': BadRequest, # Data sent for paramter %s is not valid. '-1131': BadRequest, # recvWindow must be less than 60000 '-2008': AuthenticationError, # {"code":-2008,"msg":"Invalid Api-Key ID."} '-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc... '-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER' '-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist' '-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."} '-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action." '-2019': InsufficientFunds, # {"code":-2019,"msg":"Margin is insufficient."} '-3005': InsufficientFunds, # {"code":-3005,"msg":"Transferring out not allowed. Transfer out amount exceeds max amount."} '-3006': InsufficientFunds, # {"code":-3006,"msg":"Your borrow amount has exceed maximum borrow amount."} '-3008': InsufficientFunds, # {"code":-3008,"msg":"Borrow not allowed. Your borrow amount has exceed maximum borrow amount."} '-3010': ExchangeError, # {"code":-3010,"msg":"Repay not allowed. Repay amount exceeds borrow amount."} '-3015': ExchangeError, # {"code":-3015,"msg":"Repay amount exceeds borrow amount."} '-3022': AccountSuspended, # You account's trading is banned. '-4028': BadRequest, '-3020': InsufficientFunds, '-3041': InsufficientFunds, '-5013': InsufficientFunds, '-11008': InsufficientFunds, # {"code":-11008,"msg":"Exceeding the account's maximum borrowable limit."} '-4051': InsufficientFunds, # {"code":-4051,"msg":"Isolated balance insufficient."} }, 'broad': { 'has no operation privilege': PermissionDenied, 'MAX_POSITION': InvalidOrder, # {"code":-2010,"msg":"Filter failure: MAX_POSITION"} }, }, }) def cost_to_precision(self, symbol, cost): return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['quote'], self.precisionMode, self.paddingMode) def currency_to_precision(self, currency, fee): # info is available in currencies only if the user has configured his api keys if self.safe_value(self.currencies[currency], 'precision') is not None: return self.decimal_to_precision(fee, TRUNCATE, self.currencies[currency]['precision'], self.precisionMode, self.paddingMode) else: return self.number_to_string(fee) def nonce(self): return self.milliseconds() - self.options['timeDifference'] def fetch_time(self, params={}): defaultType = self.safe_string_2(self.options, 'fetchTime', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = 'publicGetTime' if type == 'future': method = 'fapiPublicGetTime' elif type == 'delivery': method = 'dapiPublicGetTime' response = getattr(self, method)(query) return self.safe_integer(response, 'serverTime') def load_time_difference(self, params={}): serverTime = self.fetch_time(params) after = self.milliseconds() self.options['timeDifference'] = after - serverTime return self.options['timeDifference'] def fetch_currencies(self, params={}): fetchCurrenciesEnabled = self.safe_value(self.options, 'fetchCurrencies') if not fetchCurrenciesEnabled: return None # self endpoint requires authentication # while fetchCurrencies is a public API method by design # therefore we check the keys here # and fallback to generating the currencies from the markets if not self.check_required_credentials(False): return None # sandbox/testnet does not support sapi endpoints apiBackup = self.safe_string(self.urls, 'apiBackup') if apiBackup is not None: return None response = self.sapiGetCapitalConfigGetall(params) result = {} for i in range(0, len(response)): # # { # coin: 'LINK', # depositAllEnable: True, # withdrawAllEnable: True, # name: 'ChainLink', # free: '0.06168', # locked: '0', # freeze: '0', # withdrawing: '0', # ipoing: '0', # ipoable: '0', # storage: '0', # isLegalMoney: False, # trading: True, # networkList: [ # { # network: 'BNB', # coin: 'LINK', # withdrawIntegerMultiple: '0', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'Both a MEMO and an Address are required to successfully deposit your LINK BEP2 tokens to Binance.', # name: 'BEP2', # resetAddressStatus: False, # addressRegex: '^(bnb1)[0-9a-z]{38}$', # memoRegex: '^[0-9A-Za-z\\-_]{1,120}$', # withdrawFee: '0.002', # withdrawMin: '0.01', # withdrawMax: '9999999', # minConfirm: 1, # unLockConfirm: 0 # }, # { # network: 'BSC', # coin: 'LINK', # withdrawIntegerMultiple: '0.00000001', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: '', # name: 'BEP20(BSC)', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '0.005', # withdrawMin: '0.01', # withdrawMax: '9999999', # minConfirm: 15, # unLockConfirm: 0 # }, # { # network: 'ETH', # coin: 'LINK', # withdrawIntegerMultiple: '0.00000001', # isDefault: True, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # name: 'ERC20', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '0.34', # withdrawMin: '0.68', # withdrawMax: '0', # minConfirm: 12, # unLockConfirm: 0 # } # ] # } # entry = response[i] id = self.safe_string(entry, 'coin') name = self.safe_string(entry, 'name') code = self.safe_currency_code(id) precision = None isWithdrawEnabled = True isDepositEnabled = True networkList = self.safe_value(entry, 'networkList', []) fees = {} fee = None for j in range(0, len(networkList)): networkItem = networkList[j] network = self.safe_string(networkItem, 'network') # name = self.safe_string(networkItem, 'name') withdrawFee = self.safe_number(networkItem, 'withdrawFee') depositEnable = self.safe_value(networkItem, 'depositEnable') withdrawEnable = self.safe_value(networkItem, 'withdrawEnable') isDepositEnabled = isDepositEnabled or depositEnable isWithdrawEnabled = isWithdrawEnabled or withdrawEnable fees[network] = withdrawFee isDefault = self.safe_value(networkItem, 'isDefault') if isDefault or fee is None: fee = withdrawFee trading = self.safe_value(entry, 'trading') active = (isWithdrawEnabled and isDepositEnabled and trading) result[code] = { 'id': id, 'name': name, 'code': code, 'precision': precision, 'info': entry, 'active': active, 'networks': networkList, 'fee': fee, 'fees': fees, 'limits': self.limits, } return result def fetch_markets(self, params={}): defaultType = self.safe_string_2(self.options, 'fetchMarkets', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if (type != 'spot') and (type != 'future') and (type != 'margin') and (type != 'delivery'): raise ExchangeError(self.id + " does not support '" + type + "' type, set exchange.options['defaultType'] to 'spot', 'margin', 'delivery' or 'future'") # eslint-disable-line quotes method = 'publicGetExchangeInfo' if type == 'future': method = 'fapiPublicGetExchangeInfo' elif type == 'delivery': method = 'dapiPublicGetExchangeInfo' response = getattr(self, method)(query) # # spot / margin # # { # "timezone":"UTC", # "serverTime":1575416692969, # "rateLimits":[ # {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":1200}, # {"rateLimitType":"ORDERS","interval":"SECOND","intervalNum":10,"limit":100}, # {"rateLimitType":"ORDERS","interval":"DAY","intervalNum":1,"limit":200000} # ], # "exchangeFilters":[], # "symbols":[ # { # "symbol":"ETHBTC", # "status":"TRADING", # "baseAsset":"ETH", # "baseAssetPrecision":8, # "quoteAsset":"BTC", # "quotePrecision":8, # "baseCommissionPrecision":8, # "quoteCommissionPrecision":8, # "orderTypes":["LIMIT","LIMIT_MAKER","MARKET","STOP_LOSS_LIMIT","TAKE_PROFIT_LIMIT"], # "icebergAllowed":true, # "ocoAllowed":true, # "quoteOrderQtyMarketAllowed":true, # "isSpotTradingAllowed":true, # "isMarginTradingAllowed":true, # "filters":[ # {"filterType":"PRICE_FILTER","minPrice":"0.00000100","maxPrice":"100000.00000000","tickSize":"0.00000100"}, # {"filterType":"PERCENT_PRICE","multiplierUp":"5","multiplierDown":"0.2","avgPriceMins":5}, # {"filterType":"LOT_SIZE","minQty":"0.00100000","maxQty":"100000.00000000","stepSize":"0.00100000"}, # {"filterType":"MIN_NOTIONAL","minNotional":"0.00010000","applyToMarket":true,"avgPriceMins":5}, # {"filterType":"ICEBERG_PARTS","limit":10}, # {"filterType":"MARKET_LOT_SIZE","minQty":"0.00000000","maxQty":"63100.00000000","stepSize":"0.00000000"}, # {"filterType":"MAX_NUM_ALGO_ORDERS","maxNumAlgoOrders":5} # ] # }, # ], # } # # futures/usdt-margined(fapi) # # { # "timezone":"UTC", # "serverTime":1575417244353, # "rateLimits":[ # {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":1200}, # {"rateLimitType":"ORDERS","interval":"MINUTE","intervalNum":1,"limit":1200} # ], # "exchangeFilters":[], # "symbols":[ # { # "symbol":"BTCUSDT", # "status":"TRADING", # "maintMarginPercent":"2.5000", # "requiredMarginPercent":"5.0000", # "baseAsset":"BTC", # "quoteAsset":"USDT", # "pricePrecision":2, # "quantityPrecision":3, # "baseAssetPrecision":8, # "quotePrecision":8, # "filters":[ # {"minPrice":"0.01","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.01"}, # {"stepSize":"0.001","filterType":"LOT_SIZE","maxQty":"1000","minQty":"0.001"}, # {"stepSize":"0.001","filterType":"MARKET_LOT_SIZE","maxQty":"1000","minQty":"0.001"}, # {"limit":200,"filterType":"MAX_NUM_ORDERS"}, # {"multiplierDown":"0.8500","multiplierUp":"1.1500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"} # ], # "orderTypes":["LIMIT","MARKET","STOP"], # "timeInForce":["GTC","IOC","FOK","GTX"] # } # ] # } # # delivery/coin-margined(dapi) # # { # "timezone": "UTC", # "serverTime": 1597667052958, # "rateLimits": [ # {"rateLimitType":"REQUEST_WEIGHT","interval":"MINUTE","intervalNum":1,"limit":6000}, # {"rateLimitType":"ORDERS","interval":"MINUTE","intervalNum":1,"limit":6000} # ], # "exchangeFilters": [], # "symbols": [ # { # "symbol": "BTCUSD_200925", # "pair": "BTCUSD", # "contractType": "CURRENT_QUARTER", # "deliveryDate": 1601020800000, # "onboardDate": 1590739200000, # "contractStatus": "TRADING", # "contractSize": 100, # "marginAsset": "BTC", # "maintMarginPercent": "2.5000", # "requiredMarginPercent": "5.0000", # "baseAsset": "BTC", # "quoteAsset": "USD", # "pricePrecision": 1, # "quantityPrecision": 0, # "baseAssetPrecision": 8, # "quotePrecision": 8, # "equalQtyPrecision": 4, # "filters": [ # {"minPrice":"0.1","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.1"}, # {"stepSize":"1","filterType":"LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"stepSize":"0","filterType":"MARKET_LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"limit":200,"filterType":"MAX_NUM_ORDERS"}, # {"multiplierDown":"0.9500","multiplierUp":"1.0500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"} # ], # "orderTypes": ["LIMIT","MARKET","STOP","STOP_MARKET","TAKE_PROFIT","TAKE_PROFIT_MARKET","TRAILING_STOP_MARKET"], # "timeInForce": ["GTC","IOC","FOK","GTX"] # }, # { # "symbol": "BTCUSD_PERP", # "pair": "BTCUSD", # "contractType": "PERPETUAL", # "deliveryDate": 4133404800000, # "onboardDate": 1596006000000, # "contractStatus": "TRADING", # "contractSize": 100, # "marginAsset": "BTC", # "maintMarginPercent": "2.5000", # "requiredMarginPercent": "5.0000", # "baseAsset": "BTC", # "quoteAsset": "USD", # "pricePrecision": 1, # "quantityPrecision": 0, # "baseAssetPrecision": 8, # "quotePrecision": 8, # "equalQtyPrecision": 4, # "filters": [ # {"minPrice":"0.1","maxPrice":"100000","filterType":"PRICE_FILTER","tickSize":"0.1"}, # {"stepSize":"1","filterType":"LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"stepSize":"1","filterType":"MARKET_LOT_SIZE","maxQty":"100000","minQty":"1"}, # {"limit":200,"filterType":"MAX_NUM_ORDERS"}, # {"multiplierDown":"0.8500","multiplierUp":"1.1500","multiplierDecimal":"4","filterType":"PERCENT_PRICE"} # ], # "orderTypes": ["LIMIT","MARKET","STOP","STOP_MARKET","TAKE_PROFIT","TAKE_PROFIT_MARKET","TRAILING_STOP_MARKET"], # "timeInForce": ["GTC","IOC","FOK","GTX"] # } # ] # } # if self.options['adjustForTimeDifference']: self.load_time_difference() markets = self.safe_value(response, 'symbols', []) result = [] for i in range(0, len(markets)): market = markets[i] spot = (type == 'spot') future = (type == 'future') delivery = (type == 'delivery') id = self.safe_string(market, 'symbol') lowercaseId = self.safe_string_lower(market, 'symbol') baseId = self.safe_string(market, 'baseAsset') quoteId = self.safe_string(market, 'quoteAsset') base = self.safe_currency_code(baseId) quote = self.safe_currency_code(quoteId) contractType = self.safe_string(market, 'contractType') idSymbol = (future or delivery) and (contractType != 'PERPETUAL') symbol = None expiry = None if idSymbol: symbol = id expiry = self.safe_integer(market, 'deliveryDate') else: symbol = base + '/' + quote filters = self.safe_value(market, 'filters', []) filtersByType = self.index_by(filters, 'filterType') precision = { 'base': self.safe_integer(market, 'baseAssetPrecision'), 'quote': self.safe_integer(market, 'quotePrecision'), 'amount': self.safe_integer(market, 'quantityPrecision'), 'price': self.safe_integer(market, 'pricePrecision'), } status = self.safe_string_2(market, 'status', 'contractStatus') active = (status == 'TRADING') margin = self.safe_value(market, 'isMarginTradingAllowed', False) contractSize = None fees = self.fees if future or delivery: contractSize = self.safe_string(market, 'contractSize', '1') fees = self.fees[type] maker = fees['trading']['maker'] taker = fees['trading']['taker'] settleId = self.safe_string(market, 'marginAsset') settle = self.safe_currency_code(settleId) entry = { 'id': id, 'lowercaseId': lowercaseId, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'info': market, 'spot': spot, 'type': type, 'margin': margin, 'future': future, 'delivery': delivery, 'linear': future, 'inverse': delivery, 'expiry': expiry, 'expiryDatetime': self.iso8601(expiry), 'settleId': settleId, 'settle': settle, 'active': active, 'precision': precision, 'contractSize': contractSize, 'maker': maker, 'taker': taker, 'limits': { 'amount': { 'min': None, 'max': None, }, 'price': { 'min': None, 'max': None, }, 'cost': { 'min': None, 'max': None, }, }, } if 'PRICE_FILTER' in filtersByType: filter = self.safe_value(filtersByType, 'PRICE_FILTER', {}) tickSize = self.safe_string(filter, 'tickSize') entry['precision']['price'] = self.precision_from_string(tickSize) # PRICE_FILTER reports zero values for maxPrice # since they updated filter types in November 2018 # https://github.com/ccxt/ccxt/issues/4286 # therefore limits['price']['max'] doesn't have any meaningful value except None entry['limits']['price'] = { 'min': self.safe_number(filter, 'minPrice'), 'max': self.safe_number(filter, 'maxPrice'), } entry['precision']['price'] = self.precision_from_string(filter['tickSize']) if 'LOT_SIZE' in filtersByType: filter = self.safe_value(filtersByType, 'LOT_SIZE', {}) stepSize = self.safe_string(filter, 'stepSize') entry['precision']['amount'] = self.precision_from_string(stepSize) entry['limits']['amount'] = { 'min': self.safe_number(filter, 'minQty'), 'max': self.safe_number(filter, 'maxQty'), } if 'MARKET_LOT_SIZE' in filtersByType: filter = self.safe_value(filtersByType, 'MARKET_LOT_SIZE', {}) entry['limits']['market'] = { 'min': self.safe_number(filter, 'minQty'), 'max': self.safe_number(filter, 'maxQty'), } if 'MIN_NOTIONAL' in filtersByType: filter = self.safe_value(filtersByType, 'MIN_NOTIONAL', {}) entry['limits']['cost']['min'] = self.safe_number_2(filter, 'minNotional', 'notional') result.append(entry) return result def fetch_balance(self, params={}): self.load_markets() defaultType = self.safe_string_2(self.options, 'fetchBalance', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) method = 'privateGetAccount' if type == 'future': options = self.safe_value(self.options, type, {}) fetchBalanceOptions = self.safe_value(options, 'fetchBalance', {}) method = self.safe_string(fetchBalanceOptions, 'method', 'fapiPrivateV2GetAccount') elif type == 'delivery': options = self.safe_value(self.options, type, {}) fetchBalanceOptions = self.safe_value(options, 'fetchBalance', {}) method = self.safe_string(fetchBalanceOptions, 'method', 'dapiPrivateGetAccount') elif type == 'margin': method = 'sapiGetMarginAccount' elif type == 'savings': method = 'sapiGetLendingUnionAccount' elif type == 'funding': method = 'sapiPostAssetGetFundingAsset' query = self.omit(params, 'type') response = getattr(self, method)(query) # # spot # # { # makerCommission: 10, # takerCommission: 10, # buyerCommission: 0, # sellerCommission: 0, # canTrade: True, # canWithdraw: True, # canDeposit: True, # updateTime: 1575357359602, # accountType: "MARGIN", # balances: [ # {asset: "BTC", free: "0.00219821", locked: "0.00000000" }, # ] # } # # margin # # { # "borrowEnabled":true, # "marginLevel":"999.00000000", # "totalAssetOfBtc":"0.00000000", # "totalLiabilityOfBtc":"0.00000000", # "totalNetAssetOfBtc":"0.00000000", # "tradeEnabled":true, # "transferEnabled":true, # "userAssets":[ # {"asset":"MATIC","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"}, # {"asset":"VET","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"}, # {"asset":"USDT","borrowed":"0.00000000","free":"0.00000000","interest":"0.00000000","locked":"0.00000000","netAsset":"0.00000000"} # ], # } # # futures(fapi) # # fapiPrivateGetAccount # # { # "feeTier":0, # "canTrade":true, # "canDeposit":true, # "canWithdraw":true, # "updateTime":0, # "totalInitialMargin":"0.00000000", # "totalMaintMargin":"0.00000000", # "totalWalletBalance":"4.54000000", # "totalUnrealizedProfit":"0.00000000", # "totalMarginBalance":"4.54000000", # "totalPositionInitialMargin":"0.00000000", # "totalOpenOrderInitialMargin":"0.00000000", # "maxWithdrawAmount":"4.54000000", # "assets":[ # { # "asset":"USDT", # "walletBalance":"4.54000000", # "unrealizedProfit":"0.00000000", # "marginBalance":"4.54000000", # "maintMargin":"0.00000000", # "initialMargin":"0.00000000", # "positionInitialMargin":"0.00000000", # "openOrderInitialMargin":"0.00000000", # "maxWithdrawAmount":"4.54000000" # } # ], # "positions":[ # { # "symbol":"BTCUSDT", # "initialMargin":"0.00000", # "maintMargin":"0.00000", # "unrealizedProfit":"0.00000000", # "positionInitialMargin":"0.00000", # "openOrderInitialMargin":"0.00000" # } # ] # } # # fapiPrivateV2GetAccount # # { # "feeTier":0, # "canTrade":true, # "canDeposit":true, # "canWithdraw":true, # "updateTime":0, # "totalInitialMargin":"0.00000000", # "totalMaintMargin":"0.00000000", # "totalWalletBalance":"0.00000000", # "totalUnrealizedProfit":"0.00000000", # "totalMarginBalance":"0.00000000", # "totalPositionInitialMargin":"0.00000000", # "totalOpenOrderInitialMargin":"0.00000000", # "totalCrossWalletBalance":"0.00000000", # "totalCrossUnPnl":"0.00000000", # "availableBalance":"0.00000000", # "maxWithdrawAmount":"0.00000000", # "assets":[ # { # "asset":"BNB", # "walletBalance":"0.01000000", # "unrealizedProfit":"0.00000000", # "marginBalance":"0.01000000", # "maintMargin":"0.00000000", # "initialMargin":"0.00000000", # "positionInitialMargin":"0.00000000", # "openOrderInitialMargin":"0.00000000", # "maxWithdrawAmount":"0.01000000", # "crossWalletBalance":"0.01000000", # "crossUnPnl":"0.00000000", # "availableBalance":"0.01000000" # } # ], # "positions":[ # { # "symbol":"BTCUSDT", # "initialMargin":"0", # "maintMargin":"0", # "unrealizedProfit":"0.00000000", # "positionInitialMargin":"0", # "openOrderInitialMargin":"0", # "leverage":"20", # "isolated":false, # "entryPrice":"0.00000", # "maxNotional":"5000000", # "positionSide":"BOTH" # }, # ] # } # # fapiPrivateV2GetBalance # # [ # { # "accountAlias":"FzFzXquXXqoC", # "asset":"BNB", # "balance":"0.01000000", # "crossWalletBalance":"0.01000000", # "crossUnPnl":"0.00000000", # "availableBalance":"0.01000000", # "maxWithdrawAmount":"0.01000000" # } # ] # # savings # # { # "totalAmountInBTC": "0.3172", # "totalAmountInUSDT": "10000", # "totalFixedAmountInBTC": "0.3172", # "totalFixedAmountInUSDT": "10000", # "totalFlexibleInBTC": "0", # "totalFlexibleInUSDT": "0", # "positionAmountVos": [ # { # "asset": "USDT", # "amount": "10000", # "amountInBTC": "0.3172", # "amountInUSDT": "10000" # }, # { # "asset": "BUSD", # "amount": "0", # "amountInBTC": "0", # "amountInUSDT": "0" # } # ] # } # # binance pay # # [ # { # "asset": "BUSD", # "free": "1129.83", # "locked": "0", # "freeze": "0", # "withdrawing": "0" # } # ] # result = { 'info': response, } timestamp = None if (type == 'spot') or (type == 'margin'): timestamp = self.safe_integer(response, 'updateTime') balances = self.safe_value_2(response, 'balances', 'userAssets', []) for i in range(0, len(balances)): balance = balances[i] currencyId = self.safe_string(balance, 'asset') code = self.safe_currency_code(currencyId) account = self.account() account['free'] = self.safe_string(balance, 'free') account['used'] = self.safe_string(balance, 'locked') result[code] = account elif type == 'savings': positionAmountVos = self.safe_value(response, 'positionAmountVos') for i in range(0, len(positionAmountVos)): entry = positionAmountVos[i] currencyId = self.safe_string(entry, 'asset') code = self.safe_currency_code(currencyId) account = self.account() usedAndTotal = self.safe_string(entry, 'amount') account['total'] = usedAndTotal account['used'] = usedAndTotal result[code] = account elif type == 'funding': for i in range(0, len(response)): entry = response[i] account = self.account() currencyId = self.safe_string(entry, 'asset') code = self.safe_currency_code(currencyId) account['free'] = self.safe_string(entry, 'free') frozen = self.safe_string(entry, 'freeze') withdrawing = self.safe_string(entry, 'withdrawing') locked = self.safe_string(entry, 'locked') account['used'] = Precise.string_add(frozen, Precise.string_add(locked, withdrawing)) result[code] = account else: balances = response if not isinstance(response, list): balances = self.safe_value(response, 'assets', []) for i in range(0, len(balances)): balance = balances[i] currencyId = self.safe_string(balance, 'asset') code = self.safe_currency_code(currencyId) account = self.account() account['free'] = self.safe_string(balance, 'availableBalance') account['used'] = self.safe_string(balance, 'initialMargin') account['total'] = self.safe_string_2(balance, 'marginBalance', 'balance') result[code] = account result['timestamp'] = timestamp result['datetime'] = self.iso8601(timestamp) return self.parse_balance(result) def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit # default 100, max 5000, see https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#order-book method = 'publicGetDepth' if market['linear']: method = 'fapiPublicGetDepth' elif market['inverse']: method = 'dapiPublicGetDepth' response = getattr(self, method)(self.extend(request, params)) # # future # # { # "lastUpdateId":333598053905, # "E":1618631511986, # "T":1618631511964, # "bids":[ # ["2493.56","20.189"], # ["2493.54","1.000"], # ["2493.51","0.005"],["2493.37","0.280"],["2493.31","0.865"],["2493.30","0.514"],["2493.29","2.309"],["2493.25","1.500"],["2493.23","0.012"],["2493.22","7.240"],["2493.21","3.349"],["2493.20","2.030"],["2493.19","58.118"],["2493.18","174.836"],["2493.17","14.436"],["2493.12","2.000"],["2493.09","3.232"],["2493.08","2.010"],["2493.07","2.000"],["2493.06","2.000"],["2493.05","2.684"],["2493.04","2.000"],["2493.03","2.000"],["2493.02","5.000"],["2493.01","2.000"],["2493.00","1.035"],["2492.99","8.546"],["2492.98","4.012"],["2492.96","40.937"],["2492.95","40.595"],["2492.94","21.051"],["2492.92","4.012"],["2492.91","0.200"],["2492.85","2.000"],["2492.83","24.929"],["2492.81","50.000"],["2492.80","0.030"],["2492.76","0.264"],["2492.73","32.098"],["2492.71","32.664"],["2492.70","4.228"],["2492.65","1.230"],["2492.61","5.598"],["2492.60","34.786"],["2492.58","10.393"],["2492.54","4.543"],["2492.50","0.400"],["2492.49","0.600"],["2492.48","4.941"],["2492.45","1.207"],["2492.43","4.878"],["2492.40","4.762"],["2492.39","36.489"],["2492.37","3.000"],["2492.36","4.882"],["2492.33","28.117"],["2492.29","0.490"],["2492.28","76.365"],["2492.27","0.200"],["2492.23","3.804"],["2492.22","1.000"],["2492.19","20.011"],["2492.17","13.500"],["2492.16","4.058"],["2492.14","35.673"],["2492.13","1.915"],["2492.12","76.896"],["2492.10","8.050"],["2492.01","16.615"],["2492.00","10.335"],["2491.95","5.880"],["2491.93","10.000"],["2491.92","3.916"],["2491.90","0.795"],["2491.87","22.000"],["2491.85","1.260"],["2491.84","4.014"],["2491.83","6.668"],["2491.73","0.855"],["2491.72","7.572"],["2491.71","7.000"],["2491.68","3.916"],["2491.66","2.500"],["2491.64","4.945"],["2491.63","2.302"],["2491.62","4.012"],["2491.61","16.170"],["2491.60","0.793"],["2491.59","0.403"],["2491.57","17.445"],["2491.56","88.177"],["2491.53","10.000"],["2491.47","0.013"],["2491.45","0.157"],["2491.44","11.733"],["2491.39","3.593"],["2491.38","3.570"],["2491.36","28.077"],["2491.35","0.808"],["2491.30","0.065"],["2491.29","4.880"],["2491.27","22.000"],["2491.24","9.021"],["2491.23","68.393"],["2491.22","0.050"],["2491.21","1.316"],["2491.20","4.000"],["2491.19","0.108"],["2491.18","0.498"],["2491.17","5.000"],["2491.14","10.000"],["2491.13","0.383"],["2491.12","125.959"],["2491.10","0.870"],["2491.08","10.518"],["2491.05","54.743"],["2491.01","7.980"],["2490.96","3.916"],["2490.95","0.135"],["2490.91","0.140"],["2490.89","8.424"],["2490.88","5.930"],["2490.84","1.208"],["2490.83","2.005"],["2490.82","5.517"],["2490.81","73.707"],["2490.80","1.042"],["2490.79","9.626"],["2490.72","3.916"],["2490.70","0.148"],["2490.69","0.403"],["2490.68","0.012"],["2490.67","21.887"],["2490.66","0.008"],["2490.64","11.500"],["2490.61","0.005"],["2490.58","68.175"],["2490.55","0.218"],["2490.54","14.132"],["2490.53","5.157"],["2490.50","0.018"],["2490.49","9.216"],["2490.48","3.979"],["2490.47","1.884"],["2490.44","0.003"],["2490.36","14.132"],["2490.35","2.008"],["2490.34","0.200"],["2490.33","0.015"],["2490.30","0.065"],["2490.29","5.500"],["2490.28","24.203"],["2490.26","4.373"],["2490.25","0.026"],["2490.24","4.000"],["2490.23","177.628"],["2490.22","14.132"],["2490.21","0.181"],["2490.20","0.645"],["2490.19","9.024"],["2490.18","0.108"],["2490.17","0.085"],["2490.16","0.077"],["2490.14","0.275"],["2490.10","0.080"],["2490.07","0.015"],["2490.04","6.056"],["2490.00","6.796"],["2489.98","0.005"],["2489.97","0.258"],["2489.96","10.084"],["2489.95","1.202"],["2489.91","10.121"],["2489.90","10.084"],["2489.88","0.040"],["2489.87","0.004"],["2489.85","0.003"],["2489.76","3.916"],["2489.73","10.084"],["2489.71","0.272"],["2489.70","12.834"],["2489.67","0.403"],["2489.66","0.362"],["2489.64","0.738"],["2489.63","193.236"],["2489.62","14.152"],["2489.61","0.157"],["2489.59","4.011"],["2489.57","0.015"],["2489.55","0.046"],["2489.52","3.921"],["2489.51","0.005"],["2489.45","80.000"],["2489.44","0.649"],["2489.43","10.088"],["2489.39","0.009"],["2489.37","14.132"],["2489.35","72.262"],["2489.34","10.084"],["2489.33","14.136"],["2489.32","23.953"],["2489.30","0.065"],["2489.28","8.136"],["2489.24","8.022"],["2489.19","14.132"],["2489.18","0.085"],["2489.17","0.108"],["2489.14","10.084"],["2489.13","3.142"],["2489.12","77.827"],["2489.11","10.084"],["2489.10","0.080"],["2489.09","50.024"],["2489.04","3.916"],["2489.03","0.008"],["2489.01","10.084"],["2488.99","0.135"],["2488.98","0.187"],["2488.96","0.324"],["2488.92","0.064"],["2488.85","16.056"],["2488.83","14.132"],["2488.80","3.916"],["2488.79","10.084"],["2488.77","4.414"],["2488.76","0.005"],["2488.75","13.685"],["2488.73","0.020"],["2488.69","0.157"],["2488.60","80.000"],["2488.58","10.164"],["2488.57","0.004"],["2488.56","3.933"],["2488.54","3.311"],["2488.51","12.814"],["2488.50","80.099"],["2488.48","0.684"],["2488.44","0.024"],["2488.42","68.180"],["2488.39","4.412"],["2488.38","26.138"],["2488.34","44.134"],["2488.32","8.014"],["2488.30","0.065"],["2488.29","0.009"],["2488.27","4.513"],["2488.26","4.222"],["2488.25","80.000"],["2488.23","0.007"],["2488.22","0.281"],["2488.19","0.100"],["2488.18","80.100"],["2488.17","80.000"],["2488.16","8.197"],["2488.15","79.184"],["2488.13","0.025"],["2488.11","0.050"],["2488.10","0.080"],["2488.08","3.919"],["2488.04","40.103"],["2488.03","0.120"],["2488.02","0.008"],["2488.01","0.140"],["2488.00","0.406"],["2487.99","0.384"],["2487.98","0.060"],["2487.96","8.010"],["2487.94","0.246"],["2487.93","0.020"],["2487.91","0.136"],["2487.87","0.403"],["2487.84","17.910"],["2487.81","0.005"],["2487.80","0.073"],["2487.74","36.000"],["2487.73","3.225"],["2487.72","0.018"],["2487.71","0.319"],["2487.70","0.006"],["2487.66","0.003"],["2487.64","0.003"],["2487.63","0.008"],["2487.62","0.040"],["2487.60","3.916"],["2487.54","0.805"],["2487.52","0.022"],["2487.51","0.003"],["2487.50","0.051"],["2487.49","6.081"],["2487.47","80.015"],["2487.46","4.735"],["2487.45","30.000"],["2487.41","0.096"],["2487.40","0.078"],["2487.39","0.103"],["2487.37","2.279"],["2487.36","8.152"],["2487.35","2.145"],["2487.32","12.816"],["2487.31","10.023"],["2487.30","0.157"],["2487.27","0.005"],["2487.26","4.010"],["2487.25","0.008"],["2487.24","0.003"],["2487.23","0.014"],["2487.20","0.085"],["2487.17","0.011"],["2487.14","3.217"],["2487.12","3.916"],["2487.11","0.300"],["2487.10","0.088"],["2487.08","10.097"],["2487.07","1.467"],["2487.04","0.600"],["2487.01","18.363"],["2487.00","0.292"],["2486.99","0.014"],["2486.98","0.144"],["2486.97","0.443"],["2486.92","0.005"],["2486.91","0.016"],["2486.89","3.364"],["2486.88","4.166"],["2486.84","24.306"],["2486.83","0.181"],["2486.81","0.015"],["2486.80","0.082"],["2486.79","0.007"],["2486.76","0.011"],["2486.74","0.050"],["2486.73","0.782"],["2486.72","0.004"],["2486.69","0.003"],["2486.68","8.018"],["2486.66","10.004"],["2486.65","40.391"],["2486.64","3.916"],["2486.61","0.489"],["2486.60","0.196"],["2486.57","0.396"],["2486.55","4.015"],["2486.51","3.000"],["2486.50","0.003"],["2486.48","0.005"],["2486.47","0.010"],["2486.45","4.011"],["2486.44","0.602"],["2486.43","0.566"],["2486.42","3.140"],["2486.40","3.958"],["2486.39","0.003"],["2486.34","0.010"],["2486.31","6.281"],["2486.27","0.005"],["2486.26","0.004"],["2486.23","10.088"],["2486.22","0.015"],["2486.17","0.030"],["2486.16","3.916"],["2486.15","0.020"],["2486.13","13.130"],["2486.12","82.414"],["2486.11","0.244"],["2486.10","0.132"],["2486.08","0.720"],["2486.06","0.385"],["2486.01","0.004"],["2486.00","2.359"],["2485.99","154.159"],["2485.98","20.054"],["2485.96","1.000"],["2485.95","0.190"],["2485.92","4.463"],["2485.90","1.557"],["2485.87","0.402"],["2485.85","0.114"],["2485.81","0.900"],["2485.76","4.700"],["2485.75","0.300"],["2485.74","0.196"],["2485.73","4.010"],["2485.72","0.323"],["2485.70","0.263"],["2485.69","0.261"],["2485.68","3.688"],["2485.67","0.005"],["2485.64","1.216"],["2485.63","0.005"],["2485.62","0.015"],["2485.61","0.033"],["2485.60","0.004"],["2485.58","2.012"],["2485.56","0.020"],["2485.54","0.699"],["2485.52","0.003"],["2485.51","1.830"],["2485.48","5.964"],["2485.47","0.015"],["2485.44","7.251"],["2485.43","0.006"],["2485.42","0.644"],["2485.40","8.026"],["2485.38","0.489"],["2485.36","0.014"],["2485.35","0.005"],["2485.31","1.507"],["2485.30","2.107"],["2485.29","0.039"],["2485.28","0.642"],["2485.26","1.990"],["2485.25","4.996"],["2485.23","0.003"],["2485.22","0.277"],["2485.21","0.121"],["2485.20","3.952"],["2485.18","0.006"],["2485.17","0.043"],["2485.15","4.008"],["2485.14","4.434"],["2485.13","1.003"],["2485.05","0.204"],["2485.04","0.254"],["2485.02","5.000"],["2485.01","0.050"],["2485.00","80.821"],["2484.96","3.941"],["2484.95","10.023"],["2484.94","13.935"],["2484.92","0.059"],["2484.90","150.000"],["2484.89","0.004"],["2484.88","150.127"],["2484.87","0.004"],["2484.85","0.100"],["2484.83","0.006"],["2484.82","0.030"],["2484.81","1.246"],["2484.80","0.003"],["2484.79","0.045"],["2484.77","0.003"],["2484.74","0.036"],["2484.72","3.919"],["2484.70","0.134"],["2484.68","1.111"],["2484.66","76.955"],["2484.60","2.580"],["2484.59","31.432"],["2484.58","1.468"],["2484.55","1.153"],["2484.54","0.265"],["2484.53","20.024"],["2484.51","1.047"],["2484.50","0.818"],["2484.49","0.022"],["2484.48","3.887"],["2484.46","0.048"],["2484.45","0.224"],["2484.44","0.174"],["2484.43","223.079"],["2484.42","0.014"],["2484.41","1.115"],["2484.39","26.090"],["2484.38","0.066"],["2484.37","0.121"],["2484.34","0.255"],["2484.33","23.968"],["2484.29","0.085"],["2484.27","1.128"],["2484.26","1.456"],["2484.24","3.916"],["2484.23","28.126"],["2484.22","1.329"],["2484.19","2.015"],["2484.18","0.263"],["2484.15","15.489"],["2484.14","1.135"],["2484.13","0.572"],["2484.12","8.032"],["2484.11","0.021"],["2484.09","0.059"],["2484.08","0.038"],["2484.07","0.147"],["2484.05","24.156"],["2484.04","0.008"],["2484.01","1.184"],["2484.00","4.641"],["2483.99","0.006"],["2483.97","0.294"],["2483.96","0.424"],["2483.94","3.660"],["2483.93","2.067"],["2483.92","0.008"],["2483.89","0.141"],["2483.88","1.089"], # ["2483.87","110.000"],["2483.85","4.018"],["2483.81","150.077"],["2483.80","0.003"],["2483.77","0.020"] # ], # "asks":[ # ["2493.57","0.877"], # ["2493.62","0.063"], # ["2493.71","12.054"], # ] # } timestamp = self.safe_integer(response, 'T') orderbook = self.parse_order_book(response, symbol, timestamp) orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId') return orderbook def parse_ticker(self, ticker, market=None): # # { # symbol: 'ETHBTC', # priceChange: '0.00068700', # priceChangePercent: '2.075', # weightedAvgPrice: '0.03342681', # prevClosePrice: '0.03310300', # lastPrice: '0.03378900', # lastQty: '0.07700000', # bidPrice: '0.03378900', # bidQty: '7.16800000', # askPrice: '0.03379000', # askQty: '24.00000000', # openPrice: '0.03310200', # highPrice: '0.03388900', # lowPrice: '0.03306900', # volume: '205478.41000000', # quoteVolume: '6868.48826294', # openTime: 1601469986932, # closeTime: 1601556386932, # firstId: 196098772, # lastId: 196186315, # count: 87544 # } # # coinm # { # baseVolume: '214549.95171161', # closeTime: '1621965286847', # count: '1283779', # firstId: '152560106', # highPrice: '39938.3', # lastId: '153843955', # lastPrice: '37993.4', # lastQty: '1', # lowPrice: '36457.2', # openPrice: '37783.4', # openTime: '1621878840000', # pair: 'BTCUSD', # priceChange: '210.0', # priceChangePercent: '0.556', # symbol: 'BTCUSD_PERP', # volume: '81990451', # weightedAvgPrice: '38215.08713747' # } # timestamp = self.safe_integer(ticker, 'closeTime') marketId = self.safe_string(ticker, 'symbol') symbol = self.safe_symbol(marketId, market) last = self.safe_number(ticker, 'lastPrice') isCoinm = ('baseVolume' in ticker) baseVolume = None quoteVolume = None if isCoinm: baseVolume = self.safe_number(ticker, 'baseVolume') quoteVolume = self.safe_number(ticker, 'volume') else: baseVolume = self.safe_number(ticker, 'volume') quoteVolume = self.safe_number(ticker, 'quoteVolume') return self.safe_ticker({ 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'high': self.safe_number(ticker, 'highPrice'), 'low': self.safe_number(ticker, 'lowPrice'), 'bid': self.safe_number(ticker, 'bidPrice'), 'bidVolume': self.safe_number(ticker, 'bidQty'), 'ask': self.safe_number(ticker, 'askPrice'), 'askVolume': self.safe_number(ticker, 'askQty'), 'vwap': self.safe_number(ticker, 'weightedAvgPrice'), 'open': self.safe_number(ticker, 'openPrice'), 'close': last, 'last': last, 'previousClose': self.safe_number(ticker, 'prevClosePrice'), # previous day close 'change': self.safe_number(ticker, 'priceChange'), 'percentage': self.safe_number(ticker, 'priceChangePercent'), 'average': None, 'baseVolume': baseVolume, 'quoteVolume': quoteVolume, 'info': ticker, }, market) def fetch_status(self, params={}): response = self.sapiGetSystemStatus(params) status = self.safe_string(response, 'status') if status is not None: status = 'ok' if (status == '0') else 'maintenance' self.status = self.extend(self.status, { 'status': status, 'updated': self.milliseconds(), }) return self.status def fetch_ticker(self, symbol, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } method = 'publicGetTicker24hr' if market['linear']: method = 'fapiPublicGetTicker24hr' elif market['inverse']: method = 'dapiPublicGetTicker24hr' response = getattr(self, method)(self.extend(request, params)) if isinstance(response, list): firstTicker = self.safe_value(response, 0, {}) return self.parse_ticker(firstTicker, market) return self.parse_ticker(response, market) def fetch_bids_asks(self, symbols=None, params={}): self.load_markets() defaultType = self.safe_string_2(self.options, 'fetchBidsAsks', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = None if type == 'future': method = 'fapiPublicGetTickerBookTicker' elif type == 'delivery': method = 'dapiPublicGetTickerBookTicker' else: method = 'publicGetTickerBookTicker' response = getattr(self, method)(query) return self.parse_tickers(response, symbols) def fetch_tickers(self, symbols=None, params={}): self.load_markets() defaultType = self.safe_string_2(self.options, 'fetchTickers', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') defaultMethod = None if type == 'future': defaultMethod = 'fapiPublicGetTicker24hr' elif type == 'delivery': defaultMethod = 'dapiPublicGetTicker24hr' else: defaultMethod = 'publicGetTicker24hr' method = self.safe_string(self.options, 'fetchTickersMethod', defaultMethod) response = getattr(self, method)(query) return self.parse_tickers(response, symbols) def parse_ohlcv(self, ohlcv, market=None): # when api method = publicGetKlines or fapiPublicGetKlines or dapiPublicGetKlines # [ # 1591478520000, # open time # "0.02501300", # open # "0.02501800", # high # "0.02500000", # low # "0.02500000", # close # "22.19000000", # volume # 1591478579999, # close time # "0.55490906", # quote asset volume # 40, # number of trades # "10.92900000", # taker buy base asset volume # "0.27336462", # taker buy quote asset volume # "0" # ignore # ] # # when api method = fapiPublicGetMarkPriceKlines or fapiPublicGetIndexPriceKlines # [ # [ # 1591256460000, # Open time # "9653.29201333", # Open # "9654.56401333", # High # "9653.07367333", # Low # "9653.07367333", # Close(or latest price) # "0", # Ignore # 1591256519999, # Close time # "0", # Ignore # 60, # Number of bisic data # "0", # Ignore # "0", # Ignore # "0" # Ignore # ] # ] # return [ self.safe_integer(ohlcv, 0), self.safe_number(ohlcv, 1), self.safe_number(ohlcv, 2), self.safe_number(ohlcv, 3), self.safe_number(ohlcv, 4), self.safe_number(ohlcv, 5), ] def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) # binance docs say that the default limit 500, max 1500 for futures, max 1000 for spot markets # the reality is that the time range wider than 500 candles won't work right defaultLimit = 500 maxLimit = 1500 price = self.safe_string(params, 'price') params = self.omit(params, 'price') limit = defaultLimit if (limit is None) else min(limit, maxLimit) request = { 'interval': self.timeframes[timeframe], 'limit': limit, } if price == 'index': request['pair'] = market['id'] # Index price takes self argument instead of symbol else: request['symbol'] = market['id'] # duration = self.parse_timeframe(timeframe) if since is not None: request['startTime'] = since # # It didn't work before without the endTime # https://github.com/ccxt/ccxt/issues/8454 # # if since > 0: # endTime = self.sum(since, limit * duration * 1000 - 1) # now = self.milliseconds() # request['endTime'] = min(now, endTime) # } method = 'publicGetKlines' if price == 'mark': if market['inverse']: method = 'dapiPublicGetMarkPriceKlines' else: method = 'fapiPublicGetMarkPriceKlines' elif price == 'index': if market['inverse']: method = 'dapiPublicGetIndexPriceKlines' else: method = 'fapiPublicGetIndexPriceKlines' elif market['linear']: method = 'fapiPublicGetKlines' elif market['inverse']: method = 'dapiPublicGetKlines' response = getattr(self, method)(self.extend(request, params)) # # [ # [1591478520000,"0.02501300","0.02501800","0.02500000","0.02500000","22.19000000",1591478579999,"0.55490906",40,"10.92900000","0.27336462","0"], # [1591478580000,"0.02499600","0.02500900","0.02499400","0.02500300","21.34700000",1591478639999,"0.53370468",24,"7.53800000","0.18850725","0"], # [1591478640000,"0.02500800","0.02501100","0.02500300","0.02500800","154.14200000",1591478699999,"3.85405839",97,"5.32300000","0.13312641","0"], # ] # return self.parse_ohlcvs(response, market, timeframe, since, limit) def fetch_mark_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): request = { 'price': 'mark', } return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params)) def fetch_index_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): request = { 'price': 'index', } return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params)) def parse_trade(self, trade, market=None): if 'isDustTrade' in trade: return self.parse_dust_trade(trade, market) # # aggregate trades # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#compressedaggregate-trades-list # # { # "a": 26129, # Aggregate tradeId # "p": "0.01633102", # Price # "q": "4.70443515", # Quantity # "f": 27781, # First tradeId # "l": 27781, # Last tradeId # "T": 1498793709153, # Timestamp # "m": True, # Was the buyer the maker? # "M": True # Was the trade the best price match? # } # # recent public trades and old public trades # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#recent-trades-list # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#old-trade-lookup-market_data # # { # "id": 28457, # "price": "4.00000100", # "qty": "12.00000000", # "time": 1499865549590, # "isBuyerMaker": True, # "isBestMatch": True # } # # private trades # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#account-trade-list-user_data # # { # "symbol": "BNBBTC", # "id": 28457, # "orderId": 100234, # "price": "4.00000100", # "qty": "12.00000000", # "commission": "10.10000000", # "commissionAsset": "BNB", # "time": 1499865549590, # "isBuyer": True, # "isMaker": False, # "isBestMatch": True # } # # futures trades # https://binance-docs.github.io/apidocs/futures/en/#account-trade-list-user_data # # { # "accountId": 20, # "buyer": False, # "commission": "-0.07819010", # "commissionAsset": "USDT", # "counterPartyId": 653, # "id": 698759, # "maker": False, # "orderId": 25851813, # "price": "7819.01", # "qty": "0.002", # "quoteQty": "0.01563", # "realizedPnl": "-0.91539999", # "side": "SELL", # "symbol": "BTCUSDT", # "time": 1569514978020 # } # { # "symbol": "BTCUSDT", # "id": 477128891, # "orderId": 13809777875, # "side": "SELL", # "price": "38479.55", # "qty": "0.001", # "realizedPnl": "-0.00009534", # "marginAsset": "USDT", # "quoteQty": "38.47955", # "commission": "-0.00076959", # "commissionAsset": "USDT", # "time": 1612733566708, # "positionSide": "BOTH", # "maker": True, # "buyer": False # } # # {respType: FULL} # # { # "price": "4000.00000000", # "qty": "1.00000000", # "commission": "4.00000000", # "commissionAsset": "USDT", # "tradeId": "1234", # } # timestamp = self.safe_integer_2(trade, 'T', 'time') price = self.safe_string_2(trade, 'p', 'price') amount = self.safe_string_2(trade, 'q', 'qty') cost = self.safe_string_2(trade, 'quoteQty', 'baseQty') # inverse futures marketId = self.safe_string(trade, 'symbol') symbol = self.safe_symbol(marketId, market) id = self.safe_string_2(trade, 't', 'a') id = self.safe_string_2(trade, 'id', 'tradeId', id) side = None orderId = self.safe_string(trade, 'orderId') if 'm' in trade: side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally elif 'isBuyerMaker' in trade: side = 'sell' if trade['isBuyerMaker'] else 'buy' elif 'side' in trade: side = self.safe_string_lower(trade, 'side') else: if 'isBuyer' in trade: side = 'buy' if trade['isBuyer'] else 'sell' # self is a True side fee = None if 'commission' in trade: fee = { 'cost': self.safe_string(trade, 'commission'), 'currency': self.safe_currency_code(self.safe_string(trade, 'commissionAsset')), } takerOrMaker = None if 'isMaker' in trade: takerOrMaker = 'maker' if trade['isMaker'] else 'taker' if 'maker' in trade: takerOrMaker = 'maker' if trade['maker'] else 'taker' return self.safe_trade({ 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': id, 'order': orderId, 'type': None, 'side': side, 'takerOrMaker': takerOrMaker, 'price': price, 'amount': amount, 'cost': cost, 'fee': fee, }, market) def fetch_trades(self, symbol, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], # 'fromId': 123, # ID to get aggregate trades from INCLUSIVE. # 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE. # 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE. # 'limit': 500, # default = 500, maximum = 1000 } defaultType = self.safe_string_2(self.options, 'fetchTrades', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') defaultMethod = None if type == 'future': defaultMethod = 'fapiPublicGetAggTrades' elif type == 'delivery': defaultMethod = 'dapiPublicGetAggTrades' else: defaultMethod = 'publicGetAggTrades' method = self.safe_string(self.options, 'fetchTradesMethod', defaultMethod) if method == 'publicGetAggTrades': if since is not None: request['startTime'] = since # https://github.com/ccxt/ccxt/issues/6400 # https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md#compressedaggregate-trades-list request['endTime'] = self.sum(since, 3600000) if type == 'future': method = 'fapiPublicGetAggTrades' elif type == 'delivery': method = 'dapiPublicGetAggTrades' elif method == 'publicGetHistoricalTrades': if type == 'future': method = 'fapiPublicGetHistoricalTrades' elif type == 'delivery': method = 'dapiPublicGetHistoricalTrades' if limit is not None: request['limit'] = limit # default = 500, maximum = 1000 # # Caveats: # - default limit(500) applies only if no other parameters set, trades up # to the maximum limit may be returned to satisfy other parameters # - if both limit and time window is set and time window contains more # trades than the limit then the last trades from the window are returned # - 'tradeId' accepted and returned by self method is "aggregate" trade id # which is different from actual trade id # - setting both fromId and time window results in error response = getattr(self, method)(self.extend(request, query)) # # aggregate trades # # [ # { # "a": 26129, # Aggregate tradeId # "p": "0.01633102", # Price # "q": "4.70443515", # Quantity # "f": 27781, # First tradeId # "l": 27781, # Last tradeId # "T": 1498793709153, # Timestamp # "m": True, # Was the buyer the maker? # "M": True # Was the trade the best price match? # } # ] # # recent public trades and historical public trades # # [ # { # "id": 28457, # "price": "4.00000100", # "qty": "12.00000000", # "time": 1499865549590, # "isBuyerMaker": True, # "isBestMatch": True # } # ] # return self.parse_trades(response, market, since, limit) def parse_order_status(self, status): statuses = { 'NEW': 'open', 'PARTIALLY_FILLED': 'open', 'FILLED': 'closed', 'CANCELED': 'canceled', 'PENDING_CANCEL': 'canceling', # currently unused 'REJECTED': 'rejected', 'EXPIRED': 'expired', } return self.safe_string(statuses, status, status) def parse_order(self, order, market=None): # # spot # # { # "symbol": "LTCBTC", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "0.0", # "cummulativeQuoteQty": "0.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "icebergQty": "0.0", # "time": 1499827319559, # "updateTime": 1499827319559, # "isWorking": True # } # # futures # # { # "symbol": "BTCUSDT", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "1.0", # "cumQuote": "10.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "updateTime": 1499827319559 # } # # createOrder with {"newOrderRespType": "FULL"} # # { # "symbol": "BTCUSDT", # "orderId": 5403233939, # "orderListId": -1, # "clientOrderId": "x-R4BD3S825e669e75b6c14f69a2c43e", # "transactTime": 1617151923742, # "price": "0.00000000", # "origQty": "0.00050000", # "executedQty": "0.00050000", # "cummulativeQuoteQty": "29.47081500", # "status": "FILLED", # "timeInForce": "GTC", # "type": "MARKET", # "side": "BUY", # "fills": [ # { # "price": "58941.63000000", # "qty": "0.00050000", # "commission": "0.00007050", # "commissionAsset": "BNB", # "tradeId": 737466631 # } # ] # } # # delivery # # { # "orderId": "18742727411", # "symbol": "ETHUSD_PERP", # "pair": "ETHUSD", # "status": "FILLED", # "clientOrderId": "x-xcKtGhcu3e2d1503fdd543b3b02419", # "price": "0", # "avgPrice": "4522.14", # "origQty": "1", # "executedQty": "1", # "cumBase": "0.00221134", # "timeInForce": "GTC", # "type": "MARKET", # "reduceOnly": False, # "closePosition": False, # "side": "SELL", # "positionSide": "BOTH", # "stopPrice": "0", # "workingType": "CONTRACT_PRICE", # "priceProtect": False, # "origType": "MARKET", # "time": "1636061952660", # "updateTime": "1636061952660" # } # status = self.parse_order_status(self.safe_string(order, 'status')) marketId = self.safe_string(order, 'symbol') symbol = self.safe_symbol(marketId, market) filled = self.safe_string(order, 'executedQty', '0') timestamp = None lastTradeTimestamp = None if 'time' in order: timestamp = self.safe_integer(order, 'time') elif 'transactTime' in order: timestamp = self.safe_integer(order, 'transactTime') elif 'updateTime' in order: if status == 'open': if Precise.string_gt(filled, '0'): lastTradeTimestamp = self.safe_integer(order, 'updateTime') else: timestamp = self.safe_integer(order, 'updateTime') average = self.safe_string(order, 'avgPrice') price = self.safe_string(order, 'price') amount = self.safe_string(order, 'origQty') # - Spot/Margin market: cummulativeQuoteQty # - Futures market: cumQuote. # Note self is not the actual cost, since Binance futures uses leverage to calculate margins. cost = self.safe_string_2(order, 'cummulativeQuoteQty', 'cumQuote') cost = self.safe_string(order, 'cumBase', cost) id = self.safe_string(order, 'orderId') type = self.safe_string_lower(order, 'type') side = self.safe_string_lower(order, 'side') fills = self.safe_value(order, 'fills', []) clientOrderId = self.safe_string(order, 'clientOrderId') timeInForce = self.safe_string(order, 'timeInForce') postOnly = (type == 'limit_maker') or (timeInForce == 'GTX') if type == 'limit_maker': type = 'limit' stopPriceString = self.safe_string(order, 'stopPrice') stopPrice = self.parse_number(self.omit_zero(stopPriceString)) return self.safe_order2({ 'info': order, 'id': id, 'clientOrderId': clientOrderId, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'lastTradeTimestamp': lastTradeTimestamp, 'symbol': symbol, 'type': type, 'timeInForce': timeInForce, 'postOnly': postOnly, 'side': side, 'price': price, 'stopPrice': stopPrice, 'amount': amount, 'cost': cost, 'average': average, 'filled': filled, 'remaining': None, 'status': status, 'fee': None, 'trades': fills, }, market) def create_reduce_only_order(self, symbol, type, side, amount, price=None, params={}): request = { 'reduceOnly': True, } return self.create_order(symbol, type, side, amount, price, self.extend(request, params)) def create_order(self, symbol, type, side, amount, price=None, params={}): self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'createOrder', 'defaultType', 'spot') orderType = self.safe_string(params, 'type', defaultType) clientOrderId = self.safe_string_2(params, 'newClientOrderId', 'clientOrderId') postOnly = self.safe_value(params, 'postOnly', False) params = self.omit(params, ['type', 'newClientOrderId', 'clientOrderId', 'postOnly']) reduceOnly = self.safe_value(params, 'reduceOnly') if reduceOnly is not None: if (orderType != 'future') and (orderType != 'delivery'): raise InvalidOrder(self.id + ' createOrder() does not support reduceOnly for ' + orderType + ' orders, reduceOnly orders are supported for futures and perpetuals only') method = 'privatePostOrder' if orderType == 'future': method = 'fapiPrivatePostOrder' elif orderType == 'delivery': method = 'dapiPrivatePostOrder' elif orderType == 'margin': method = 'sapiPostMarginOrder' # the next 5 lines are added to support for testing orders if market['spot']: test = self.safe_value(params, 'test', False) if test: method += 'Test' params = self.omit(params, 'test') # only supported for spot/margin api(all margin markets are spot markets) if postOnly: type = 'LIMIT_MAKER' uppercaseType = type.upper() validOrderTypes = self.safe_value(market['info'], 'orderTypes') if not self.in_array(uppercaseType, validOrderTypes): raise InvalidOrder(self.id + ' ' + type + ' is not a valid order type in market ' + symbol) request = { 'symbol': market['id'], 'type': uppercaseType, 'side': side.upper(), } if clientOrderId is None: broker = self.safe_value(self.options, 'broker') if broker is not None: brokerId = self.safe_string(broker, orderType) if brokerId is not None: request['newClientOrderId'] = brokerId + self.uuid22() else: request['newClientOrderId'] = clientOrderId if (orderType == 'spot') or (orderType == 'margin'): request['newOrderRespType'] = self.safe_value(self.options['newOrderRespType'], type, 'RESULT') # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills else: # delivery and future request['newOrderRespType'] = 'RESULT' # "ACK", "RESULT", default "ACK" # additional required fields depending on the order type timeInForceIsRequired = False priceIsRequired = False stopPriceIsRequired = False quantityIsRequired = False # # spot/margin # # LIMIT timeInForce, quantity, price # MARKET quantity or quoteOrderQty # STOP_LOSS quantity, stopPrice # STOP_LOSS_LIMIT timeInForce, quantity, price, stopPrice # TAKE_PROFIT quantity, stopPrice # TAKE_PROFIT_LIMIT timeInForce, quantity, price, stopPrice # LIMIT_MAKER quantity, price # # futures # # LIMIT timeInForce, quantity, price # MARKET quantity # STOP/TAKE_PROFIT quantity, price, stopPrice # STOP_MARKET stopPrice # TAKE_PROFIT_MARKET stopPrice # TRAILING_STOP_MARKET callbackRate # if uppercaseType == 'MARKET': quoteOrderQty = self.safe_value(self.options, 'quoteOrderQty', False) if quoteOrderQty: quoteOrderQty = self.safe_number(params, 'quoteOrderQty') precision = market['precision']['price'] if quoteOrderQty is not None: request['quoteOrderQty'] = self.decimal_to_precision(quoteOrderQty, TRUNCATE, precision, self.precisionMode) params = self.omit(params, 'quoteOrderQty') elif price is not None: request['quoteOrderQty'] = self.decimal_to_precision(amount * price, TRUNCATE, precision, self.precisionMode) else: quantityIsRequired = True else: quantityIsRequired = True elif uppercaseType == 'LIMIT': priceIsRequired = True timeInForceIsRequired = True quantityIsRequired = True elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'): stopPriceIsRequired = True quantityIsRequired = True if market['linear'] or market['inverse']: priceIsRequired = True elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'): quantityIsRequired = True stopPriceIsRequired = True priceIsRequired = True timeInForceIsRequired = True elif uppercaseType == 'LIMIT_MAKER': priceIsRequired = True quantityIsRequired = True elif uppercaseType == 'STOP': quantityIsRequired = True stopPriceIsRequired = True priceIsRequired = True elif (uppercaseType == 'STOP_MARKET') or (uppercaseType == 'TAKE_PROFIT_MARKET'): closePosition = self.safe_value(params, 'closePosition') if closePosition is None: quantityIsRequired = True stopPriceIsRequired = True elif uppercaseType == 'TRAILING_STOP_MARKET': quantityIsRequired = True callbackRate = self.safe_number(params, 'callbackRate') if callbackRate is None: raise InvalidOrder(self.id + ' createOrder() requires a callbackRate extra param for a ' + type + ' order') if quantityIsRequired: request['quantity'] = self.amount_to_precision(symbol, amount) if priceIsRequired: if price is None: raise InvalidOrder(self.id + ' createOrder() requires a price argument for a ' + type + ' order') request['price'] = self.price_to_precision(symbol, price) if timeInForceIsRequired: request['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel if stopPriceIsRequired: stopPrice = self.safe_number(params, 'stopPrice') if stopPrice is None: raise InvalidOrder(self.id + ' createOrder() requires a stopPrice extra param for a ' + type + ' order') else: params = self.omit(params, 'stopPrice') request['stopPrice'] = self.price_to_precision(symbol, stopPrice) response = getattr(self, method)(self.extend(request, params)) return self.parse_order(response, market) def fetch_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchOrder', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) method = 'privateGetOrder' if type == 'future': method = 'fapiPrivateGetOrder' elif type == 'delivery': method = 'dapiPrivateGetOrder' elif type == 'margin': method = 'sapiGetMarginOrder' request = { 'symbol': market['id'], } clientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId') if clientOrderId is not None: request['origClientOrderId'] = clientOrderId else: request['orderId'] = id query = self.omit(params, ['type', 'clientOrderId', 'origClientOrderId']) response = getattr(self, method)(self.extend(request, query)) return self.parse_order(response, market) def fetch_orders(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) method = 'privateGetAllOrders' if type == 'future': method = 'fapiPrivateGetAllOrders' elif type == 'delivery': method = 'dapiPrivateGetAllOrders' elif type == 'margin': method = 'sapiGetMarginAllOrders' request = { 'symbol': market['id'], } if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit query = self.omit(params, 'type') response = getattr(self, method)(self.extend(request, query)) # # spot # # [ # { # "symbol": "LTCBTC", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "0.0", # "cummulativeQuoteQty": "0.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "icebergQty": "0.0", # "time": 1499827319559, # "updateTime": 1499827319559, # "isWorking": True # } # ] # # futures # # [ # { # "symbol": "BTCUSDT", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "1.0", # "cumQuote": "10.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "updateTime": 1499827319559 # } # ] # return self.parse_orders(response, market, since, limit) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None query = None type = None request = {} if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') elif self.options['warnOnFetchOpenOrdersWithoutSymbol']: symbols = self.symbols numSymbols = len(symbols) fetchOpenOrdersRateLimit = int(numSymbols / 2) raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.') else: defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = 'privateGetOpenOrders' if type == 'future': method = 'fapiPrivateGetOpenOrders' elif type == 'delivery': method = 'dapiPrivateGetOpenOrders' elif type == 'margin': method = 'sapiGetMarginOpenOrders' response = getattr(self, method)(self.extend(request, query)) return self.parse_orders(response, market, since, limit) def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}): orders = self.fetch_orders(symbol, since, limit, params) return self.filter_by(orders, 'status', 'closed') def cancel_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchOpenOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) # https://github.com/ccxt/ccxt/issues/6507 origClientOrderId = self.safe_value_2(params, 'origClientOrderId', 'clientOrderId') request = { 'symbol': market['id'], # 'orderId': id, # 'origClientOrderId': id, } if origClientOrderId is None: request['orderId'] = id else: request['origClientOrderId'] = origClientOrderId method = 'privateDeleteOrder' if type == 'future': method = 'fapiPrivateDeleteOrder' elif type == 'delivery': method = 'dapiPrivateDeleteOrder' elif type == 'margin': method = 'sapiDeleteMarginOrder' query = self.omit(params, ['type', 'origClientOrderId', 'clientOrderId']) response = getattr(self, method)(self.extend(request, query)) return self.parse_order(response, market) def cancel_all_orders(self, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } defaultType = self.safe_string_2(self.options, 'cancelAllOrders', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') method = 'privateDeleteOpenOrders' if type == 'margin': method = 'sapiDeleteMarginOpenOrders' elif type == 'future': method = 'fapiPrivateDeleteAllOpenOrders' elif type == 'delivery': method = 'dapiPrivateDeleteAllOpenOrders' response = getattr(self, method)(self.extend(request, query)) if isinstance(response, list): return self.parse_orders(response, market) else: return response def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument') self.load_markets() market = self.market(symbol) defaultType = self.safe_string_2(self.options, 'fetchMyTrades', 'defaultType', 'spot') type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') method = None if type == 'spot': method = 'privateGetMyTrades' elif type == 'margin': method = 'sapiGetMarginMyTrades' elif type == 'future': method = 'fapiPrivateGetUserTrades' elif type == 'delivery': method = 'dapiPrivateGetUserTrades' request = { 'symbol': market['id'], } if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit response = getattr(self, method)(self.extend(request, params)) # # spot trade # # [ # { # "symbol": "BNBBTC", # "id": 28457, # "orderId": 100234, # "price": "4.00000100", # "qty": "12.00000000", # "commission": "10.10000000", # "commissionAsset": "BNB", # "time": 1499865549590, # "isBuyer": True, # "isMaker": False, # "isBestMatch": True, # } # ] # # futures trade # # [ # { # "accountId": 20, # "buyer": False, # "commission": "-0.07819010", # "commissionAsset": "USDT", # "counterPartyId": 653, # "id": 698759, # "maker": False, # "orderId": 25851813, # "price": "7819.01", # "qty": "0.002", # "quoteQty": "0.01563", # "realizedPnl": "-0.91539999", # "side": "SELL", # "symbol": "BTCUSDT", # "time": 1569514978020 # } # ] # return self.parse_trades(response, market, since, limit) def fetch_my_dust_trades(self, symbol=None, since=None, limit=None, params={}): # # Binance provides an opportunity to trade insignificant(i.e. non-tradable and non-withdrawable) # token leftovers(of any asset) into `BNB` coin which in turn can be used to pay trading fees with it. # The corresponding trades history is called the `Dust Log` and can be requested via the following end-point: # https://github.com/binance-exchange/binance-official-api-docs/blob/master/wapi-api.md#dustlog-user_data # self.load_markets() request = {} if since is not None: request['startTime'] = since request['endTime'] = self.sum(since, 7776000000) response = self.sapiGetAssetDribblet(self.extend(request, params)) # { # "total": "4", # "userAssetDribblets": [ # { # "operateTime": "1627575731000", # "totalServiceChargeAmount": "0.00001453", # "totalTransferedAmount": "0.00072693", # "transId": "70899815863", # "userAssetDribbletDetails": [ # { # "fromAsset": "LTC", # "amount": "0.000006", # "transferedAmount": "0.00000267", # "serviceChargeAmount": "0.00000005", # "operateTime": "1627575731000", # "transId": "70899815863" # }, # { # "fromAsset": "GBP", # "amount": "0.15949157", # "transferedAmount": "0.00072426", # "serviceChargeAmount": "0.00001448", # "operateTime": "1627575731000", # "transId": "70899815863" # } # ] # }, # ] # } results = self.safe_value(response, 'userAssetDribblets', []) rows = self.safe_integer(response, 'total', 0) data = [] for i in range(0, rows): logs = self.safe_value(results[i], 'userAssetDribbletDetails', []) for j in range(0, len(logs)): logs[j]['isDustTrade'] = True data.append(logs[j]) trades = self.parse_trades(data, None, since, limit) return self.filter_by_since_limit(trades, since, limit) def parse_dust_trade(self, trade, market=None): # # { # "fromAsset": "USDT", # "amount": "0.009669", # "transferedAmount": "0.00002992", # "serviceChargeAmount": "0.00000059", # "operateTime": "1628076010000", # "transId": "71416578712", # "isDustTrade": True # } # orderId = self.safe_string(trade, 'transId') timestamp = self.safe_integer(trade, 'operateTime') currencyId = self.safe_string(trade, 'fromAsset') tradedCurrency = self.safe_currency_code(currencyId) bnb = self.currency('BNB') earnedCurrency = bnb['code'] applicantSymbol = earnedCurrency + '/' + tradedCurrency tradedCurrencyIsQuote = False if applicantSymbol in self.markets: tradedCurrencyIsQuote = True feeCostString = self.safe_string(trade, 'serviceChargeAmount') fee = { 'currency': earnedCurrency, 'cost': self.parse_number(feeCostString), } symbol = None amountString = None costString = None side = None if tradedCurrencyIsQuote: symbol = applicantSymbol amountString = self.safe_string(trade, 'transferedAmount') costString = self.safe_string(trade, 'amount') side = 'buy' else: symbol = tradedCurrency + '/' + earnedCurrency amountString = self.safe_string(trade, 'amount') costString = self.safe_string(trade, 'transferedAmount') side = 'sell' priceString = None if costString is not None: if amountString: priceString = Precise.string_div(costString, amountString) id = None amount = self.parse_number(amountString) price = self.parse_number(priceString) cost = self.parse_number(costString) type = None takerOrMaker = None return { 'id': id, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'order': orderId, 'type': type, 'takerOrMaker': takerOrMaker, 'side': side, 'amount': amount, 'price': price, 'cost': cost, 'fee': fee, 'info': trade, } def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None response = None request = {} legalMoney = self.safe_value(self.options, 'legalMoney', {}) if code in legalMoney: if code is not None: currency = self.currency(code) request['transactionType'] = 0 if since is not None: request['beginTime'] = since raw = self.sapiGetFiatOrders(self.extend(request, params)) response = self.safe_value(raw, 'data') # { # "code": "000000", # "message": "success", # "data": [ # { # "orderNo": "25ced37075c1470ba8939d0df2316e23", # "fiatCurrency": "EUR", # "indicatedAmount": "15.00", # "amount": "15.00", # "totalFee": "0.00", # "method": "card", # "status": "Failed", # "createTime": 1627501026000, # "updateTime": 1627501027000 # } # ], # "total": 1, # "success": True # } else: if code is not None: currency = self.currency(code) request['coin'] = currency['id'] if since is not None: request['startTime'] = since # max 3 months range https://github.com/ccxt/ccxt/issues/6495 request['endTime'] = self.sum(since, 7776000000) if limit is not None: request['limit'] = limit response = self.sapiGetCapitalDepositHisrec(self.extend(request, params)) # [ # { # "amount": "0.01844487", # "coin": "BCH", # "network": "BCH", # "status": 1, # "address": "1NYxAJhW2281HK1KtJeaENBqHeygA88FzR", # "addressTag": "", # "txId": "bafc5902504d6504a00b7d0306a41154cbf1d1b767ab70f3bc226327362588af", # "insertTime": 1610784980000, # "transferType": 0, # "confirmTimes": "2/2" # }, # { # "amount": "4500", # "coin": "USDT", # "network": "BSC", # "status": 1, # "address": "0xc9c923c87347ca0f3451d6d308ce84f691b9f501", # "addressTag": "", # "txId": "Internal transfer 51376627901", # "insertTime": 1618394381000, # "transferType": 1, # "confirmTimes": "1/15" # } # ] return self.parse_transactions(response, currency, since, limit) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): self.load_markets() legalMoney = self.safe_value(self.options, 'legalMoney', {}) request = {} response = None currency = None if code in legalMoney: if code is not None: currency = self.currency(code) request['transactionType'] = 1 if since is not None: request['beginTime'] = since raw = self.sapiGetFiatOrders(self.extend(request, params)) response = self.safe_value(raw, 'data') # { # "code": "000000", # "message": "success", # "data": [ # { # "orderNo": "CJW706452266115170304", # "fiatCurrency": "GBP", # "indicatedAmount": "10001.50", # "amount": "100.00", # "totalFee": "1.50", # "method": "bank transfer", # "status": "Successful", # "createTime": 1620037745000, # "updateTime": 1620038480000 # }, # { # "orderNo": "CJW706287492781891584", # "fiatCurrency": "GBP", # "indicatedAmount": "10001.50", # "amount": "100.00", # "totalFee": "1.50", # "method": "bank transfer", # "status": "Successful", # "createTime": 1619998460000, # "updateTime": 1619998823000 # } # ], # "total": 39, # "success": True # } else: if code is not None: currency = self.currency(code) request['coin'] = currency['id'] if since is not None: request['startTime'] = since # max 3 months range https://github.com/ccxt/ccxt/issues/6495 request['endTime'] = self.sum(since, 7776000000) if limit is not None: request['limit'] = limit response = self.sapiGetCapitalWithdrawHistory(self.extend(request, params)) # [ # { # "id": "69e53ad305124b96b43668ceab158a18", # "amount": "28.75", # "transactionFee": "0.25", # "coin": "XRP", # "status": 6, # "address": "r3T75fuLjX51mmfb5Sk1kMNuhBgBPJsjza", # "addressTag": "101286922", # "txId": "19A5B24ED0B697E4F0E9CD09FCB007170A605BC93C9280B9E6379C5E6EF0F65A", # "applyTime": "2021-04-15 12:09:16", # "network": "XRP", # "transferType": 0 # }, # { # "id": "9a67628b16ba4988ae20d329333f16bc", # "amount": "20", # "transactionFee": "20", # "coin": "USDT", # "status": 6, # "address": "0x0AB991497116f7F5532a4c2f4f7B1784488628e1", # "txId": "0x77fbf2cf2c85b552f0fd31fd2e56dc95c08adae031d96f3717d8b17e1aea3e46", # "applyTime": "2021-04-15 12:06:53", # "network": "ETH", # "transferType": 0 # }, # { # "id": "a7cdc0afbfa44a48bd225c9ece958fe2", # "amount": "51", # "transactionFee": "1", # "coin": "USDT", # "status": 6, # "address": "TYDmtuWL8bsyjvcauUTerpfYyVhFtBjqyo", # "txId": "168a75112bce6ceb4823c66726ad47620ad332e69fe92d9cb8ceb76023f9a028", # "applyTime": "2021-04-13 12:46:59", # "network": "TRX", # "transferType": 0 # } # ] return self.parse_transactions(response, currency, since, limit) def parse_transaction_status_by_type(self, status, type=None): statusesByType = { 'deposit': { '0': 'pending', '1': 'ok', # Fiat # Processing, Failed, Successful, Finished, Refunding, Refunded, Refund Failed, Order Partial credit Stopped 'Processing': 'pending', 'Failed': 'failed', 'Successful': 'ok', 'Refunding': 'canceled', 'Refunded': 'canceled', 'Refund Failed': 'failed', }, 'withdrawal': { '0': 'pending', # Email Sent '1': 'canceled', # Cancelled(different from 1 = ok in deposits) '2': 'pending', # Awaiting Approval '3': 'failed', # Rejected '4': 'pending', # Processing '5': 'failed', # Failure '6': 'ok', # Completed # Fiat # Processing, Failed, Successful, Finished, Refunding, Refunded, Refund Failed, Order Partial credit Stopped 'Processing': 'pending', 'Failed': 'failed', 'Successful': 'ok', 'Refunding': 'canceled', 'Refunded': 'canceled', 'Refund Failed': 'failed', }, } statuses = self.safe_value(statusesByType, type, {}) return self.safe_string(statuses, status, status) def parse_transaction(self, transaction, currency=None): # # fetchDeposits # # { # "amount": "4500", # "coin": "USDT", # "network": "BSC", # "status": 1, # "address": "0xc9c923c87347ca0f3451d6d308ce84f691b9f501", # "addressTag": "", # "txId": "Internal transfer 51376627901", # "insertTime": 1618394381000, # "transferType": 1, # "confirmTimes": "1/15" # } # # fetchWithdrawals # # { # "id": "69e53ad305124b96b43668ceab158a18", # "amount": "28.75", # "transactionFee": "0.25", # "coin": "XRP", # "status": 6, # "address": "r3T75fuLjX51mmfb5Sk1kMNuhBgBPJsjza", # "addressTag": "101286922", # "txId": "19A5B24ED0B697E4F0E9CD09FCB007170A605BC93C9280B9E6379C5E6EF0F65A", # "applyTime": "2021-04-15 12:09:16", # "network": "XRP", # "transferType": 0 # } # # fiat transaction # withdraw # { # "orderNo": "CJW684897551397171200", # "fiatCurrency": "GBP", # "indicatedAmount": "29.99", # "amount": "28.49", # "totalFee": "1.50", # "method": "bank transfer", # "status": "Successful", # "createTime": 1614898701000, # "updateTime": 1614898820000 # } # # deposit # { # "orderNo": "25ced37075c1470ba8939d0df2316e23", # "fiatCurrency": "EUR", # "indicatedAmount": "15.00", # "amount": "15.00", # "totalFee": "0.00", # "method": "card", # "status": "Failed", # "createTime": "1627501026000", # "updateTime": "1627501027000" # } # id = self.safe_string_2(transaction, 'id', 'orderNo') address = self.safe_string(transaction, 'address') tag = self.safe_string(transaction, 'addressTag') # set but unused if tag is not None: if len(tag) < 1: tag = None txid = self.safe_string(transaction, 'txId') if (txid is not None) and (txid.find('Internal transfer ') >= 0): txid = txid[18:] currencyId = self.safe_string_2(transaction, 'coin', 'fiatCurrency') code = self.safe_currency_code(currencyId, currency) timestamp = None insertTime = self.safe_integer_2(transaction, 'insertTime', 'createTime') applyTime = self.parse8601(self.safe_string(transaction, 'applyTime')) type = self.safe_string(transaction, 'type') if type is None: if (insertTime is not None) and (applyTime is None): type = 'deposit' timestamp = insertTime elif (insertTime is None) and (applyTime is not None): type = 'withdrawal' timestamp = applyTime status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type) amount = self.safe_number(transaction, 'amount') feeCost = self.safe_number_2(transaction, 'transactionFee', 'totalFee') fee = None if feeCost is not None: fee = {'currency': code, 'cost': feeCost} updated = self.safe_integer_2(transaction, 'successTime', 'updateTime') internal = self.safe_integer(transaction, 'transferType', False) internal = True if internal else False return { 'info': transaction, 'id': id, 'txid': txid, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'address': address, 'addressTo': address, 'addressFrom': None, 'tag': tag, 'tagTo': tag, 'tagFrom': None, 'type': type, 'amount': amount, 'currency': code, 'status': status, 'updated': updated, 'internal': internal, 'fee': fee, } def parse_transfer_status(self, status): statuses = { 'CONFIRMED': 'ok', } return self.safe_string(statuses, status, status) def parse_transfer(self, transfer, currency=None): # # transfer # # { # "tranId":13526853623 # } # # fetchTransfers # # { # timestamp: 1614640878000, # asset: 'USDT', # amount: '25', # type: 'MAIN_UMFUTURE', # status: 'CONFIRMED', # tranId: 43000126248 # } # id = self.safe_string(transfer, 'tranId') currencyId = self.safe_string(transfer, 'asset') code = self.safe_currency_code(currencyId, currency) amount = self.safe_number(transfer, 'amount') type = self.safe_string(transfer, 'type') fromAccount = None toAccount = None typesByAccount = self.safe_value(self.options, 'typesByAccount', {}) if type is not None: parts = type.split('_') fromAccount = self.safe_value(parts, 0) toAccount = self.safe_value(parts, 1) fromAccount = self.safe_string(typesByAccount, fromAccount, fromAccount) toAccount = self.safe_string(typesByAccount, toAccount, toAccount) timestamp = self.safe_integer(transfer, 'timestamp') status = self.parse_transfer_status(self.safe_string(transfer, 'status')) return { 'info': transfer, 'id': id, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'currency': code, 'amount': amount, 'fromAccount': fromAccount, 'toAccount': toAccount, 'status': status, } def parse_income(self, income, market=None): # # { # "symbol": "ETHUSDT", # "incomeType": "FUNDING_FEE", # "income": "0.00134317", # "asset": "USDT", # "time": "1621584000000", # "info": "FUNDING_FEE", # "tranId": "4480321991774044580", # "tradeId": "" # } # marketId = self.safe_string(income, 'symbol') symbol = self.safe_symbol(marketId, market) amount = self.safe_number(income, 'income') currencyId = self.safe_string(income, 'asset') code = self.safe_currency_code(currencyId) id = self.safe_string(income, 'tranId') timestamp = self.safe_integer(income, 'time') return { 'info': income, 'symbol': symbol, 'code': code, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'id': id, 'amount': amount, } def parse_incomes(self, incomes, market=None, since=None, limit=None): result = [] for i in range(0, len(incomes)): entry = incomes[i] parsed = self.parse_income(entry, market) result.append(parsed) sorted = self.sort_by(result, 'timestamp') return self.filter_by_since_limit(sorted, since, limit) def transfer(self, code, amount, fromAccount, toAccount, params={}): self.load_markets() currency = self.currency(code) type = self.safe_string(params, 'type') if type is None: accountsByType = self.safe_value(self.options, 'accountsByType', {}) fromAccount = fromAccount.lower() toAccount = toAccount.lower() fromId = self.safe_string(accountsByType, fromAccount) toId = self.safe_string(accountsByType, toAccount) if fromId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' fromAccount must be one of ' + ', '.join(keys)) if toId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' toAccount must be one of ' + ', '.join(keys)) type = fromId + '_' + toId request = { 'asset': currency['id'], 'amount': self.currency_to_precision(code, amount), 'type': type, } response = self.sapiPostAssetTransfer(self.extend(request, params)) # # { # "tranId":13526853623 # } # transfer = self.parse_transfer(response, currency) return self.extend(transfer, { 'amount': amount, 'currency': code, 'fromAccount': fromAccount, 'toAccount': toAccount, }) def fetch_transfers(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None if code is not None: currency = self.currency(code) defaultType = self.safe_string_2(self.options, 'fetchTransfers', 'defaultType', 'spot') fromAccount = self.safe_string(params, 'fromAccount', defaultType) defaultTo = 'spot' if (fromAccount == 'future') else 'future' toAccount = self.safe_string(params, 'toAccount', defaultTo) type = self.safe_string(params, 'type') accountsByType = self.safe_value(self.options, 'accountsByType', {}) fromId = self.safe_string(accountsByType, fromAccount) toId = self.safe_string(accountsByType, toAccount) if type is None: if fromId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' fromAccount parameter must be one of ' + ', '.join(keys)) if toId is None: keys = list(accountsByType.keys()) raise ExchangeError(self.id + ' toAccount parameter must be one of ' + ', '.join(keys)) type = fromId + '_' + toId request = { 'type': type, } if since is not None: request['startTime'] = since if limit is not None: request['size'] = limit response = self.sapiGetAssetTransfer(self.extend(request, params)) # # { # total: 3, # rows: [ # { # timestamp: 1614640878000, # asset: 'USDT', # amount: '25', # type: 'MAIN_UMFUTURE', # status: 'CONFIRMED', # tranId: 43000126248 # }, # ] # } # rows = self.safe_value(response, 'rows', []) return self.parse_transfers(rows, currency, since, limit) def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'coin': currency['id'], # 'network': 'ETH', # 'BSC', 'XMR', you can get network and isDefault in networkList in the response of sapiGetCapitalConfigDetail } networks = self.safe_value(self.options, 'networks', {}) network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH network = self.safe_string(networks, network, network) # handle ERC20>ETH alias if network is not None: request['network'] = network params = self.omit(params, 'network') # has support for the 'network' parameter # https://binance-docs.github.io/apidocs/spot/en/#deposit-address-supporting-network-user_data response = self.sapiGetCapitalDepositAddress(self.extend(request, params)) # # { # currency: 'XRP', # address: 'rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh', # tag: '108618262', # info: { # coin: 'XRP', # address: 'rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh', # tag: '108618262', # url: 'https://bithomp.com/explorer/rEb8TK3gBgk5auZkwc6sHnwrGVJH8DuaLh' # } # } # address = self.safe_string(response, 'address') url = self.safe_string(response, 'url') impliedNetwork = None if url is not None: reverseNetworks = self.safe_value(self.options, 'reverseNetworks', {}) parts = url.split('/') topLevel = self.safe_string(parts, 2) if (topLevel == 'blockchair.com') or (topLevel == 'viewblock.io'): subLevel = self.safe_string(parts, 3) if subLevel is not None: topLevel = topLevel + '/' + subLevel impliedNetwork = self.safe_string(reverseNetworks, topLevel) impliedNetworks = self.safe_value(self.options, 'impliedNetworks', { 'ETH': {'ERC20': 'ETH'}, 'TRX': {'TRC20': 'TRX'}, }) if code in impliedNetworks: conversion = self.safe_value(impliedNetworks, code, {}) impliedNetwork = self.safe_string(conversion, impliedNetwork, impliedNetwork) tag = self.safe_string(response, 'tag', '') if len(tag) == 0: tag = None self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'network': impliedNetwork, 'info': response, } def fetch_funding_fees(self, codes=None, params={}): self.load_markets() response = self.sapiGetCapitalConfigGetall(params) # # [ # { # coin: 'BAT', # depositAllEnable: True, # withdrawAllEnable: True, # name: 'Basic Attention Token', # free: '0', # locked: '0', # freeze: '0', # withdrawing: '0', # ipoing: '0', # ipoable: '0', # storage: '0', # isLegalMoney: False, # trading: True, # networkList: [ # { # network: 'BNB', # coin: 'BAT', # withdrawIntegerMultiple: '0.00000001', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'The name of self asset is Basic Attention Token(BAT). Both a MEMO and an Address are required to successfully deposit your BEP2 tokens to Binance.', # name: 'BEP2', # resetAddressStatus: False, # addressRegex: '^(bnb1)[0-9a-z]{38}$', # memoRegex: '^[0-9A-Za-z\\-_]{1,120}$', # withdrawFee: '0.27', # withdrawMin: '0.54', # withdrawMax: '10000000000', # minConfirm: '1', # unLockConfirm: '0' # }, # { # network: 'BSC', # coin: 'BAT', # withdrawIntegerMultiple: '0.00000001', # isDefault: False, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'The name of self asset is Basic Attention Token. Please ensure you are depositing Basic Attention Token(BAT) tokens under the contract address ending in 9766e.', # name: 'BEP20(BSC)', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '0.27', # withdrawMin: '0.54', # withdrawMax: '10000000000', # minConfirm: '15', # unLockConfirm: '0' # }, # { # network: 'ETH', # coin: 'BAT', # withdrawIntegerMultiple: '0.00000001', # isDefault: True, # depositEnable: True, # withdrawEnable: True, # depositDesc: '', # withdrawDesc: '', # specialTips: 'The name of self asset is Basic Attention Token. Please ensure you are depositing Basic Attention Token(BAT) tokens under the contract address ending in 887ef.', # name: 'ERC20', # resetAddressStatus: False, # addressRegex: '^(0x)[0-9A-Fa-f]{40}$', # memoRegex: '', # withdrawFee: '27', # withdrawMin: '54', # withdrawMax: '10000000000', # minConfirm: '12', # unLockConfirm: '0' # } # ] # } # ] # withdrawFees = {} for i in range(0, len(response)): entry = response[i] currencyId = self.safe_string(entry, 'coin') code = self.safe_currency_code(currencyId) networkList = self.safe_value(entry, 'networkList') withdrawFees[code] = {} for j in range(0, len(networkList)): networkEntry = networkList[j] networkId = self.safe_string(networkEntry, 'network') networkCode = self.safe_currency_code(networkId) fee = self.safe_number(networkEntry, 'withdrawFee') withdrawFees[code][networkCode] = fee return { 'withdraw': withdrawFees, 'deposit': {}, 'info': response, } def withdraw(self, code, amount, address, tag=None, params={}): tag, params = self.handle_withdraw_tag_and_params(tag, params) self.check_address(address) self.load_markets() currency = self.currency(code) request = { 'coin': currency['id'], 'address': address, 'amount': amount, # https://binance-docs.github.io/apidocs/spot/en/#withdraw-sapi # issue sapiGetCapitalConfigGetall() to get networks for withdrawing USDT ERC20 vs USDT Omni # 'network': 'ETH', # 'BTC', 'TRX', etc, optional } if tag is not None: request['addressTag'] = tag networks = self.safe_value(self.options, 'networks', {}) network = self.safe_string_upper(params, 'network') # self line allows the user to specify either ERC20 or ETH network = self.safe_string(networks, network, network) # handle ERC20>ETH alias if network is not None: request['network'] = network params = self.omit(params, 'network') response = self.sapiPostCapitalWithdrawApply(self.extend(request, params)) # {id: '9a67628b16ba4988ae20d329333f16bc'} return { 'info': response, 'id': self.safe_string(response, 'id'), } def parse_trading_fee(self, fee, market=None): # # { # "symbol": "ADABNB", # "makerCommission": 0.001, # "takerCommission": 0.001 # } # marketId = self.safe_string(fee, 'symbol') symbol = self.safe_symbol(marketId) return { 'info': fee, 'symbol': symbol, 'maker': self.safe_number(fee, 'makerCommission'), 'taker': self.safe_number(fee, 'takerCommission'), } def fetch_trading_fee(self, symbol, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } response = self.sapiGetAssetTradeFee(self.extend(request, params)) # # [ # { # "symbol": "BTCUSDT", # "makerCommission": "0.001", # "takerCommission": "0.001" # } # ] # first = self.safe_value(response, 0, {}) return self.parse_trading_fee(first) def fetch_trading_fees(self, params={}): self.load_markets() method = None defaultType = self.safe_string_2(self.options, 'fetchFundingRates', 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if (type == 'spot') or (type == 'margin'): method = 'sapiGetAssetTradeFee' elif type == 'future': method = 'fapiPrivateGetAccount' elif type == 'delivery': method = 'dapiPrivateGetAccount' response = getattr(self, method)(query) # # sapi / spot # # [ # { # "symbol": "ZRXBNB", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # { # "symbol": "ZRXBTC", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # ] # # fapi / future / linear # # { # "feeTier": 0, # account commisssion tier # "canTrade": True, # if can trade # "canDeposit": True, # if can transfer in asset # "canWithdraw": True, # if can transfer out asset # "updateTime": 0, # "totalInitialMargin": "0.00000000", # total initial margin required with current mark price(useless with isolated positions), only for USDT asset # "totalMaintMargin": "0.00000000", # total maintenance margin required, only for USDT asset # "totalWalletBalance": "23.72469206", # total wallet balance, only for USDT asset # "totalUnrealizedProfit": "0.00000000", # total unrealized profit, only for USDT asset # "totalMarginBalance": "23.72469206", # total margin balance, only for USDT asset # "totalPositionInitialMargin": "0.00000000", # initial margin required for positions with current mark price, only for USDT asset # "totalOpenOrderInitialMargin": "0.00000000", # initial margin required for open orders with current mark price, only for USDT asset # "totalCrossWalletBalance": "23.72469206", # crossed wallet balance, only for USDT asset # "totalCrossUnPnl": "0.00000000", # unrealized profit of crossed positions, only for USDT asset # "availableBalance": "23.72469206", # available balance, only for USDT asset # "maxWithdrawAmount": "23.72469206" # maximum amount for transfer out, only for USDT asset # ... # } # # dapi / delivery / inverse # # { # "canDeposit": True, # "canTrade": True, # "canWithdraw": True, # "feeTier": 2, # "updateTime": 0 # } # if (type == 'spot') or (type == 'margin'): # # [ # { # "symbol": "ZRXBNB", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # { # "symbol": "ZRXBTC", # "makerCommission": "0.001", # "takerCommission": "0.001" # }, # ] # result = {} for i in range(0, len(response)): fee = self.parse_trading_fee(response[i]) symbol = fee['symbol'] result[symbol] = fee return result elif type == 'future': # # { # "feeTier": 0, # account commisssion tier # "canTrade": True, # if can trade # "canDeposit": True, # if can transfer in asset # "canWithdraw": True, # if can transfer out asset # "updateTime": 0, # "totalInitialMargin": "0.00000000", # total initial margin required with current mark price(useless with isolated positions), only for USDT asset # "totalMaintMargin": "0.00000000", # total maintenance margin required, only for USDT asset # "totalWalletBalance": "23.72469206", # total wallet balance, only for USDT asset # "totalUnrealizedProfit": "0.00000000", # total unrealized profit, only for USDT asset # "totalMarginBalance": "23.72469206", # total margin balance, only for USDT asset # "totalPositionInitialMargin": "0.00000000", # initial margin required for positions with current mark price, only for USDT asset # "totalOpenOrderInitialMargin": "0.00000000", # initial margin required for open orders with current mark price, only for USDT asset # "totalCrossWalletBalance": "23.72469206", # crossed wallet balance, only for USDT asset # "totalCrossUnPnl": "0.00000000", # unrealized profit of crossed positions, only for USDT asset # "availableBalance": "23.72469206", # available balance, only for USDT asset # "maxWithdrawAmount": "23.72469206" # maximum amount for transfer out, only for USDT asset # ... # } # symbols = list(self.markets.keys()) result = {} feeTier = self.safe_integer(response, 'feeTier') feeTiers = self.fees[type]['trading']['tiers'] maker = feeTiers['maker'][feeTier][1] taker = feeTiers['taker'][feeTier][1] for i in range(0, len(symbols)): symbol = symbols[i] result[symbol] = { 'info': { 'feeTier': feeTier, }, 'symbol': symbol, 'maker': maker, 'taker': taker, } return result elif type == 'delivery': # # { # "canDeposit": True, # "canTrade": True, # "canWithdraw": True, # "feeTier": 2, # "updateTime": 0 # } # symbols = list(self.markets.keys()) result = {} feeTier = self.safe_integer(response, 'feeTier') feeTiers = self.fees[type]['trading']['tiers'] maker = feeTiers['maker'][feeTier][1] taker = feeTiers['taker'][feeTier][1] for i in range(0, len(symbols)): symbol = symbols[i] result[symbol] = { 'info': { 'feeTier': feeTier, }, 'symbol': symbol, 'maker': maker, 'taker': taker, } return result def futures_transfer(self, code, amount, type, params={}): if (type < 1) or (type > 4): raise ArgumentsRequired(self.id + ' type must be between 1 and 4') self.load_markets() currency = self.currency(code) request = { 'asset': currency['id'], 'amount': amount, 'type': type, } response = self.sapiPostFuturesTransfer(self.extend(request, params)) # # { # "tranId": 100000001 # } # return self.parse_transfer(response, currency) def fetch_funding_rate(self, symbol, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } method = None if market['linear']: method = 'fapiPublicGetPremiumIndex' elif market['inverse']: method = 'dapiPublicGetPremiumIndex' else: raise NotSupported(self.id + ' fetchFundingRate() supports linear and inverse contracts only') response = getattr(self, method)(self.extend(request, params)) if market['inverse']: response = response[0] # # { # "symbol": "BTCUSDT", # "markPrice": "45802.81129892", # "indexPrice": "45745.47701915", # "estimatedSettlePrice": "45133.91753671", # "lastFundingRate": "0.00063521", # "interestRate": "0.00010000", # "nextFundingTime": "1621267200000", # "time": "1621252344001" # } # return self.parse_funding_rate(response, market) def fetch_funding_rate_history(self, symbol=None, since=None, limit=None, params={}): # # Gets a history of funding rates with their timestamps # (param) symbol: Future currency pair(e.g. "BTC/USDT") # (param) limit: maximum number of data points returned # (param) since: Unix timestamp in miliseconds for the time of the earliest requested funding rate # (param) params: Object containing more params for the request # - until: Unix timestamp in miliseconds for the time of the earliest requested funding rate # return: [{symbol, fundingRate, timestamp}] # self.load_markets() request = {} method = None defaultType = self.safe_string_2(self.options, 'fetchFundingRateHistory', 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') if type == 'future': method = 'fapiPublicGetFundingRate' elif type == 'delivery': method = 'dapiPublicGetFundingRate' if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] if market['linear']: method = 'fapiPublicGetFundingRate' elif market['inverse']: method = 'dapiPublicGetFundingRate' if method is None: raise NotSupported(self.id + ' fetchFundingRateHistory() not supported for ' + type + ' markets') if since is not None: request['startTime'] = since till = self.safe_integer(params, 'till') # unified in milliseconds endTime = self.safe_string(params, 'endTime', till) # exchange-specific in milliseconds params = self.omit(params, ['endTime', 'till']) if endTime is not None: request['endTime'] = endTime if limit is not None: request['limit'] = limit response = getattr(self, method)(self.extend(request, params)) # # { # "symbol": "BTCUSDT", # "fundingRate": "0.00063521", # "fundingTime": "1621267200000", # } # rates = [] for i in range(0, len(response)): entry = response[i] timestamp = self.safe_integer(entry, 'fundingTime') rates.append({ 'info': entry, 'symbol': self.safe_symbol(self.safe_string(entry, 'symbol')), 'fundingRate': self.safe_number(entry, 'fundingRate'), 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), }) sorted = self.sort_by(rates, 'timestamp') return self.filter_by_symbol_since_limit(sorted, symbol, since, limit) def fetch_funding_rates(self, symbols=None, params={}): self.load_markets() method = None defaultType = self.safe_string_2(self.options, 'fetchFundingRates', 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if type == 'future': method = 'fapiPublicGetPremiumIndex' elif type == 'delivery': method = 'dapiPublicGetPremiumIndex' else: raise NotSupported(self.id + ' fetchFundingRates() supports linear and inverse contracts only') response = getattr(self, method)(query) result = [] for i in range(0, len(response)): entry = response[i] parsed = self.parse_funding_rate(entry) result.append(parsed) return self.filter_by_array(result, 'symbol', symbols) def parse_funding_rate(self, premiumIndex, market=None): # ensure it matches with https://www.binance.com/en/futures/funding-history/0 # # { # "symbol": "BTCUSDT", # "markPrice": "45802.81129892", # "indexPrice": "45745.47701915", # "estimatedSettlePrice": "45133.91753671", # "lastFundingRate": "0.00063521", # "interestRate": "0.00010000", # "nextFundingTime": "1621267200000", # "time": "1621252344001" # } # timestamp = self.safe_integer(premiumIndex, 'time') marketId = self.safe_string(premiumIndex, 'symbol') symbol = self.safe_symbol(marketId, market) markPrice = self.safe_number(premiumIndex, 'markPrice') indexPrice = self.safe_number(premiumIndex, 'indexPrice') interestRate = self.safe_number(premiumIndex, 'interestRate') estimatedSettlePrice = self.safe_number(premiumIndex, 'estimatedSettlePrice') nextFundingRate = self.safe_number(premiumIndex, 'lastFundingRate') nextFundingTime = self.safe_integer(premiumIndex, 'nextFundingTime') previousFundingTime = nextFundingTime - (8 * 3600000) return { 'info': premiumIndex, 'symbol': symbol, 'markPrice': markPrice, 'indexPrice': indexPrice, 'interestRate': interestRate, 'estimatedSettlePrice': estimatedSettlePrice, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'previousFundingRate': None, 'nextFundingRate': nextFundingRate, 'previousFundingTimestamp': previousFundingTime, # subtract 8 hours 'nextFundingTimestamp': nextFundingTime, 'previousFundingDatetime': self.iso8601(previousFundingTime), 'nextFundingDatetime': self.iso8601(nextFundingTime), } def parse_account_positions(self, account): positions = self.safe_value(account, 'positions') assets = self.safe_value(account, 'assets') balances = {} for i in range(0, len(assets)): entry = assets[i] currencyId = self.safe_string(entry, 'asset') code = self.safe_currency_code(currencyId) crossWalletBalance = self.safe_string(entry, 'crossWalletBalance') crossUnPnl = self.safe_string(entry, 'crossUnPnl') balances[code] = { 'crossMargin': Precise.string_add(crossWalletBalance, crossUnPnl), 'crossWalletBalance': crossWalletBalance, } result = [] for i in range(0, len(positions)): position = positions[i] marketId = self.safe_string(position, 'symbol') market = self.safe_market(marketId) code = market['quote'] if (self.options['defaultType'] == 'future') else market['base'] # sometimes not all the codes are correctly returned... if code in balances: parsed = self.parse_account_position(self.extend(position, { 'crossMargin': balances[code]['crossMargin'], 'crossWalletBalance': balances[code]['crossWalletBalance'], }), market) result.append(parsed) return result def parse_account_position(self, position, market=None): # # usdm # { # "symbol": "BTCBUSD", # "initialMargin": "0", # "maintMargin": "0", # "unrealizedProfit": "0.00000000", # "positionInitialMargin": "0", # "openOrderInitialMargin": "0", # "leverage": "20", # "isolated": False, # "entryPrice": "0.0000", # "maxNotional": "100000", # "positionSide": "BOTH", # "positionAmt": "0.000", # "notional": "0", # "isolatedWallet": "0", # "updateTime": "0", # "crossMargin": "100.93634809", # } # # coinm # { # "symbol": "BTCUSD_210625", # "initialMargin": "0.00024393", # "maintMargin": "0.00002439", # "unrealizedProfit": "-0.00000163", # "positionInitialMargin": "0.00024393", # "openOrderInitialMargin": "0", # "leverage": "10", # "isolated": False, # "positionSide": "BOTH", # "entryPrice": "41021.20000069", # "maxQty": "100", # "notionalValue": "0.00243939", # "isolatedWallet": "0", # "crossMargin": "0.314" # "crossWalletBalance": "34", # } # marketId = self.safe_string(position, 'symbol') market = self.safe_market(marketId, market) symbol = market['symbol'] leverageString = self.safe_string(position, 'leverage') leverage = int(leverageString) initialMarginString = self.safe_string(position, 'initialMargin') initialMargin = self.parse_number(initialMarginString) initialMarginPercentageString = Precise.string_div('1', leverageString, 8) rational = (1000 % leverage) == 0 if not rational: initialMarginPercentageString = Precise.string_div(Precise.string_add(initialMarginPercentageString, '1e-8'), '1', 8) usdm = ('notional' in position) maintenanceMarginString = self.safe_string(position, 'maintMargin') maintenanceMargin = self.parse_number(maintenanceMarginString) entryPriceString = self.safe_string(position, 'entryPrice') entryPrice = self.parse_number(entryPriceString) notionalString = self.safe_string_2(position, 'notional', 'notionalValue') notionalStringAbs = Precise.string_abs(notionalString) notionalFloat = float(notionalString) notionalFloatAbs = float(notionalStringAbs) notional = self.parse_number(Precise.string_abs(notionalString)) contractsString = self.safe_string(position, 'positionAmt') contractsStringAbs = Precise.string_abs(contractsString) if contractsString is None: entryNotional = Precise.string_mul(Precise.string_mul(leverageString, initialMarginString), entryPriceString) contractsString = Precise.string_div(entryNotional, market['contractSize']) contractsStringAbs = Precise.string_div(Precise.string_add(contractsString, '0.5'), '1', 0) contracts = self.parse_number(contractsStringAbs) leverageBrackets = self.safe_value(self.options, 'leverageBrackets', {}) leverageBracket = self.safe_value(leverageBrackets, symbol, []) maintenanceMarginPercentageString = None for i in range(0, len(leverageBracket)): bracket = leverageBracket[i] if notionalFloatAbs < bracket[0]: break maintenanceMarginPercentageString = bracket[1] maintenanceMarginPercentage = self.parse_number(maintenanceMarginPercentageString) unrealizedPnlString = self.safe_string(position, 'unrealizedProfit') unrealizedPnl = self.parse_number(unrealizedPnlString) timestamp = self.safe_integer(position, 'updateTime') if timestamp == 0: timestamp = None isolated = self.safe_value(position, 'isolated') marginType = None collateralString = None walletBalance = None if isolated: marginType = 'isolated' walletBalance = self.safe_string(position, 'isolatedWallet') collateralString = Precise.string_add(walletBalance, unrealizedPnlString) else: marginType = 'cross' walletBalance = self.safe_string(position, 'crossWalletBalance') collateralString = self.safe_string(position, 'crossMargin') collateral = self.parse_number(collateralString) marginRatio = None side = None percentage = None liquidationPriceStringRaw = None liquidationPrice = None if notionalFloat == 0.0: entryPrice = None else: side = 'short' if (notionalFloat < 0) else 'long' marginRatio = self.parse_number(Precise.string_div(Precise.string_add(Precise.string_div(maintenanceMarginString, collateralString), '5e-5'), '1', 4)) percentage = self.parse_number(Precise.string_mul(Precise.string_div(unrealizedPnlString, initialMarginString, 4), '100')) if usdm: # calculate liquidation price # # liquidationPrice = (walletBalance / (contracts * (±1 + mmp))) + (±entryPrice / (±1 + mmp)) # # mmp = maintenanceMarginPercentage # where ± is negative for long and positive for short # TODO: calculate liquidation price for coinm contracts onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_add('1', maintenanceMarginPercentageString) else: onePlusMaintenanceMarginPercentageString = Precise.string_add('-1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) leftSide = Precise.string_div(walletBalance, Precise.string_mul(contractsStringAbs, onePlusMaintenanceMarginPercentageString)) rightSide = Precise.string_div(entryPriceSignString, onePlusMaintenanceMarginPercentageString) liquidationPriceStringRaw = Precise.string_add(leftSide, rightSide) else: # calculate liquidation price # # liquidationPrice = (contracts * contractSize(±1 - mmp)) / (±1/entryPrice * contracts * contractSize - walletBalance) # onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_sub('1', maintenanceMarginPercentageString) else: onePlusMaintenanceMarginPercentageString = Precise.string_sub('-1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) size = Precise.string_mul(contractsStringAbs, market['contractSize']) leftSide = Precise.string_mul(size, onePlusMaintenanceMarginPercentageString) rightSide = Precise.string_sub(Precise.string_mul(Precise.string_div('1', entryPriceSignString), size), walletBalance) liquidationPriceStringRaw = Precise.string_div(leftSide, rightSide) pricePrecision = market['precision']['price'] pricePrecisionPlusOne = pricePrecision + 1 pricePrecisionPlusOneString = str(pricePrecisionPlusOne) # round half up rounder = Precise('5e-' + pricePrecisionPlusOneString) rounderString = str(rounder) liquidationPriceRoundedString = Precise.string_add(rounderString, liquidationPriceStringRaw) truncatedLiquidationPrice = Precise.string_div(liquidationPriceRoundedString, '1', pricePrecision) if truncatedLiquidationPrice[0] == '-': # user cannot be liquidated # since he has more collateral than the size of the position truncatedLiquidationPrice = None liquidationPrice = self.parse_number(truncatedLiquidationPrice) positionSide = self.safe_string(position, 'positionSide') hedged = positionSide != 'BOTH' return { 'info': position, 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'initialMargin': initialMargin, 'initialMarginPercentage': self.parse_number(initialMarginPercentageString), 'maintenanceMargin': maintenanceMargin, 'maintenanceMarginPercentage': maintenanceMarginPercentage, 'entryPrice': entryPrice, 'notional': notional, 'leverage': self.parse_number(leverageString), 'unrealizedPnl': unrealizedPnl, 'contracts': contracts, 'contractSize': self.parse_number(market['contractSize']), 'marginRatio': marginRatio, 'liquidationPrice': liquidationPrice, 'markPrice': None, 'collateral': collateral, 'marginType': marginType, 'side': side, 'hedged': hedged, 'percentage': percentage, } def parse_position_risk(self, position, market=None): # # usdm # { # "symbol": "BTCUSDT", # "positionAmt": "0.001", # "entryPrice": "43578.07000", # "markPrice": "43532.30000000", # "unRealizedProfit": "-0.04577000", # "liquidationPrice": "21841.24993976", # "leverage": "2", # "maxNotionalValue": "300000000", # "marginType": "isolated", # "isolatedMargin": "21.77841506", # "isAutoAddMargin": "false", # "positionSide": "BOTH", # "notional": "43.53230000", # "isolatedWallet": "21.82418506", # "updateTime": "1621358023886" # } # # coinm # { # "symbol": "BTCUSD_PERP", # "positionAmt": "2", # "entryPrice": "37643.10000021", # "markPrice": "38103.05510455", # "unRealizedProfit": "0.00006413", # "liquidationPrice": "25119.97445760", # "leverage": "2", # "maxQty": "1500", # "marginType": "isolated", # "isolatedMargin": "0.00274471", # "isAutoAddMargin": "false", # "positionSide": "BOTH", # "notionalValue": "0.00524892", # "isolatedWallet": "0.00268058" # } # marketId = self.safe_string(position, 'symbol') market = self.safe_market(marketId, market) symbol = market['symbol'] leverageBrackets = self.safe_value(self.options, 'leverageBrackets', {}) leverageBracket = self.safe_value(leverageBrackets, symbol, []) notionalString = self.safe_string_2(position, 'notional', 'notionalValue') notionalStringAbs = Precise.string_abs(notionalString) notionalFloatAbs = float(notionalStringAbs) notionalFloat = float(notionalString) maintenanceMarginPercentageString = None for i in range(0, len(leverageBracket)): bracket = leverageBracket[i] if notionalFloatAbs < bracket[0]: break maintenanceMarginPercentageString = bracket[1] notional = self.parse_number(notionalStringAbs) contractsAbs = Precise.string_abs(self.safe_string(position, 'positionAmt')) contracts = self.parse_number(contractsAbs) unrealizedPnlString = self.safe_string(position, 'unRealizedProfit') unrealizedPnl = self.parse_number(unrealizedPnlString) leverageString = self.safe_string(position, 'leverage') leverage = int(leverageString) liquidationPriceString = self.omit_zero(self.safe_string(position, 'liquidationPrice')) liquidationPrice = self.parse_number(liquidationPriceString) collateralString = None marginType = self.safe_string(position, 'marginType') side = None if notionalFloat > 0: side = 'long' elif notionalFloat < 0: side = 'short' entryPriceString = self.safe_string(position, 'entryPrice') entryPrice = self.parse_number(entryPriceString) if marginType == 'cross': # calculate collateral if market['linear']: # walletBalance = (liquidationPrice * (±1 + mmp) ± entryPrice) * contracts onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_add('1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) else: onePlusMaintenanceMarginPercentageString = Precise.string_add('-1', maintenanceMarginPercentageString) inner = Precise.string_mul(liquidationPriceString, onePlusMaintenanceMarginPercentageString) leftSide = Precise.string_add(inner, entryPriceSignString) collateralString = Precise.string_div(Precise.string_mul(leftSide, contractsAbs), '1', market['precision']['quote']) else: # walletBalance = (contracts * contractSize) * (±1/entryPrice - (±1 - mmp) / liquidationPrice) onePlusMaintenanceMarginPercentageString = None entryPriceSignString = entryPriceString if side == 'short': onePlusMaintenanceMarginPercentageString = Precise.string_sub('1', maintenanceMarginPercentageString) else: onePlusMaintenanceMarginPercentageString = Precise.string_sub('-1', maintenanceMarginPercentageString) entryPriceSignString = Precise.string_mul('-1', entryPriceSignString) leftSide = Precise.string_mul(contractsAbs, market['contractSize']) rightSide = Precise.string_sub(Precise.string_div('1', entryPriceSignString), Precise.string_div(onePlusMaintenanceMarginPercentageString, liquidationPriceString)) collateralString = Precise.string_div(Precise.string_mul(leftSide, rightSide), '1', market['precision']['base']) else: collateralString = self.safe_string(position, 'isolatedMargin') collateralString = '0' if (collateralString is None) else collateralString collateralFloat = float(collateralString) collateral = self.parse_number(collateralString) markPrice = self.parse_number(self.omit_zero(self.safe_string(position, 'markPrice'))) timestamp = self.safe_integer(position, 'updateTime') if timestamp == 0: timestamp = None maintenanceMarginPercentage = self.parse_number(maintenanceMarginPercentageString) maintenanceMarginString = Precise.string_mul(maintenanceMarginPercentageString, notionalStringAbs) maintenanceMargin = self.parse_number(maintenanceMarginString) initialMarginPercentageString = Precise.string_div('1', leverageString, 8) rational = (1000 % leverage) == 0 if not rational: initialMarginPercentageString = Precise.string_add(initialMarginPercentageString, '1e-8') initialMarginString = Precise.string_div(Precise.string_mul(notionalStringAbs, initialMarginPercentageString), '1', 8) initialMargin = self.parse_number(initialMarginString) marginRatio = None percentage = None if collateralFloat != 0.0: marginRatio = self.parse_number(Precise.string_div(Precise.string_add(Precise.string_div(maintenanceMarginString, collateralString), '5e-5'), '1', 4)) percentage = self.parse_number(Precise.string_mul(Precise.string_div(unrealizedPnlString, initialMarginString, 4), '100')) positionSide = self.safe_string(position, 'positionSide') hedged = positionSide != 'BOTH' return { 'info': position, 'symbol': symbol, 'contracts': contracts, 'contractSize': self.parse_number(market['contractSize']), 'unrealizedPnl': unrealizedPnl, 'leverage': self.parse_number(leverageString), 'liquidationPrice': liquidationPrice, 'collateral': collateral, 'notional': notional, 'markPrice': markPrice, 'entryPrice': entryPrice, 'timestamp': timestamp, 'initialMargin': initialMargin, 'initialMarginPercentage': self.parse_number(initialMarginPercentageString), 'maintenanceMargin': maintenanceMargin, 'maintenanceMarginPercentage': maintenanceMarginPercentage, 'marginRatio': marginRatio, 'datetime': self.iso8601(timestamp), 'marginType': marginType, 'side': side, 'hedged': hedged, 'percentage': percentage, } def load_leverage_brackets(self, reload=False, params={}): self.load_markets() # by default cache the leverage bracket # it contains useful stuff like the maintenance margin and initial margin for positions leverageBrackets = self.safe_value(self.options, 'leverageBrackets') if (leverageBrackets is None) or (reload): method = None defaultType = self.safe_string(self.options, 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if type == 'future': method = 'fapiPrivateGetLeverageBracket' elif type == 'delivery': method = 'dapiPrivateV2GetLeverageBracket' else: raise NotSupported(self.id + ' loadLeverageBrackets() supports linear and inverse contracts only') response = getattr(self, method)(query) self.options['leverageBrackets'] = {} for i in range(0, len(response)): entry = response[i] marketId = self.safe_string(entry, 'symbol') symbol = self.safe_symbol(marketId) brackets = self.safe_value(entry, 'brackets') result = [] for j in range(0, len(brackets)): bracket = brackets[j] # we use floats here internally on purpose floorValue = self.safe_float_2(bracket, 'notionalFloor', 'qtyFloor') maintenanceMarginPercentage = self.safe_string(bracket, 'maintMarginRatio') result.append([floorValue, maintenanceMarginPercentage]) self.options['leverageBrackets'][symbol] = result return self.options['leverageBrackets'] def fetch_positions(self, symbols=None, params={}): defaultMethod = self.safe_string(self.options, 'fetchPositions', 'positionRisk') if defaultMethod == 'positionRisk': return self.fetch_positions_risk(symbols, params) elif defaultMethod == 'account': return self.fetch_account_positions(symbols, params) else: raise NotSupported(self.id + '.options["fetchPositions"] = "' + defaultMethod + '" is invalid, please choose between "account" and "positionRisk"') def fetch_account_positions(self, symbols=None, params={}): if symbols is not None: if not isinstance(symbols, list): raise ArgumentsRequired(self.id + ' fetchPositions requires an array argument for symbols') self.load_markets() self.load_leverage_brackets() method = None defaultType = self.safe_string(self.options, 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) query = self.omit(params, 'type') if type == 'future': method = 'fapiPrivateGetAccount' elif type == 'delivery': method = 'dapiPrivateGetAccount' else: raise NotSupported(self.id + ' fetchPositions() supports linear and inverse contracts only') account = getattr(self, method)(query) result = self.parse_account_positions(account) return self.filter_by_array(result, 'symbol', symbols, False) def fetch_positions_risk(self, symbols=None, params={}): if symbols is not None: if not isinstance(symbols, list): raise ArgumentsRequired(self.id + ' fetchPositions requires an array argument for symbols') self.load_markets() self.load_leverage_brackets() request = {} method = None defaultType = 'future' defaultType = self.safe_string(self.options, 'defaultType', defaultType) type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') if (type == 'future') or (type == 'linear'): method = 'fapiPrivateGetPositionRisk' elif (type == 'delivery') or (type == 'inverse'): method = 'dapiPrivateGetPositionRisk' else: raise NotSupported(self.id + ' fetchIsolatedPositions() supports linear and inverse contracts only') response = getattr(self, method)(self.extend(request, params)) result = [] for i in range(0, len(response)): parsed = self.parse_position_risk(response[i]) result.append(parsed) return self.filter_by_array(result, 'symbol', symbols, False) def fetch_funding_history(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None method = None defaultType = 'future' request = { 'incomeType': 'FUNDING_FEE', # "TRANSFER","WELCOME_BONUS", "REALIZED_PNL","FUNDING_FEE", "COMMISSION" and "INSURANCE_CLEAR" } if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] if market['linear']: defaultType = 'future' elif market['inverse']: defaultType = 'delivery' else: raise NotSupported(self.id + ' fetchFundingHistory() supports linear and inverse contracts only') if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit defaultType = self.safe_string_2(self.options, 'fetchFundingHistory', 'defaultType', defaultType) type = self.safe_string(params, 'type', defaultType) params = self.omit(params, 'type') if (type == 'future') or (type == 'linear'): method = 'fapiPrivateGetIncome' elif (type == 'delivery') or (type == 'inverse'): method = 'dapiPrivateGetIncome' else: raise NotSupported(self.id + ' fetchFundingHistory() supports linear and inverse contracts only') response = getattr(self, method)(self.extend(request, params)) return self.parse_incomes(response, market, since, limit) def set_leverage(self, leverage, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' setLeverage() requires a symbol argument') # WARNING: THIS WILL INCREASE LIQUIDATION PRICE FOR OPEN ISOLATED LONG POSITIONS # AND DECREASE LIQUIDATION PRICE FOR OPEN ISOLATED SHORT POSITIONS if (leverage < 1) or (leverage > 125): raise BadRequest(self.id + ' leverage should be between 1 and 125') self.load_markets() market = self.market(symbol) method = None if market['linear']: method = 'fapiPrivatePostLeverage' elif market['inverse']: method = 'dapiPrivatePostLeverage' else: raise NotSupported(self.id + ' setLeverage() supports linear and inverse contracts only') request = { 'symbol': market['id'], 'leverage': leverage, } return getattr(self, method)(self.extend(request, params)) def set_margin_mode(self, marginType, symbol=None, params={}): # # {"code": -4048 , "msg": "Margin type cannot be changed if there exists position."} # # or # # {"code": 200, "msg": "success"} # marginType = marginType.upper() if (marginType != 'ISOLATED') and (marginType != 'CROSSED'): raise BadRequest(self.id + ' marginType must be either isolated or crossed') self.load_markets() market = self.market(symbol) method = None if market['linear']: method = 'fapiPrivatePostMarginType' elif market['inverse']: method = 'dapiPrivatePostMarginType' else: raise NotSupported(self.id + ' setMarginMode() supports linear and inverse contracts only') request = { 'symbol': market['id'], 'marginType': marginType, } return getattr(self, method)(self.extend(request, params)) def set_position_mode(self, hedged, symbol=None, params={}): defaultType = self.safe_string(self.options, 'defaultType', 'future') type = self.safe_string(params, 'type', defaultType) params = self.omit(params, ['type']) dualSidePosition = None if hedged: dualSidePosition = 'true' else: dualSidePosition = 'false' request = { 'dualSidePosition': dualSidePosition, } method = None if type == 'delivery': method = 'dapiPrivatePostPositionSideDual' else: # default to future method = 'fapiPrivatePostPositionSideDual' # # { # "code": 200, # "msg": "success" # } # return getattr(self, method)(self.extend(request, params)) def sign(self, path, api='public', method='GET', params={}, headers=None, body=None): if not (api in self.urls['api']): raise NotSupported(self.id + ' does not have a testnet/sandbox URL for ' + api + ' endpoints') url = self.urls['api'][api] url += '/' + path if api == 'wapi': url += '.html' if path == 'historicalTrades': if self.apiKey: headers = { 'X-MBX-APIKEY': self.apiKey, } else: raise AuthenticationError(self.id + ' historicalTrades endpoint requires `apiKey` credential') userDataStream = (path == 'userDataStream') or (path == 'listenKey') if userDataStream: if self.apiKey: # v1 special case for userDataStream headers = { 'X-MBX-APIKEY': self.apiKey, 'Content-Type': 'application/x-www-form-urlencoded', } if method != 'GET': body = self.urlencode(params) else: raise AuthenticationError(self.id + ' userDataStream endpoint requires `apiKey` credential') elif (api == 'private') or (api == 'sapi') or (api == 'wapi' and path != 'systemStatus') or (api == 'dapiPrivate') or (api == 'dapiPrivateV2') or (api == 'fapiPrivate') or (api == 'fapiPrivateV2'): self.check_required_credentials() query = None recvWindow = self.safe_integer(self.options, 'recvWindow', 5000) if (api == 'sapi') and (path == 'asset/dust'): query = self.urlencode_with_array_repeat(self.extend({ 'timestamp': self.nonce(), 'recvWindow': recvWindow, }, params)) elif (path == 'batchOrders') or (path.find('sub-account') >= 0): query = self.rawencode(self.extend({ 'timestamp': self.nonce(), 'recvWindow': recvWindow, }, params)) else: query = self.urlencode(self.extend({ 'timestamp': self.nonce(), 'recvWindow': recvWindow, }, params)) signature = self.hmac(self.encode(query), self.encode(self.secret)) query += '&' + 'signature=' + signature headers = { 'X-MBX-APIKEY': self.apiKey, } if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'): url += '?' + query else: body = query headers['Content-Type'] = 'application/x-www-form-urlencoded' else: if params: url += '?' + self.urlencode(params) return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody): if (code == 418) or (code == 429): raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body) # error response in a form: {"code": -1013, "msg": "Invalid quantity."} # following block cointains legacy checks against message patterns in "msg" property # will switch "code" checks eventually, when we know all of them if code >= 400: if body.find('Price * QTY is zero or less') >= 0: raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body) if body.find('LOT_SIZE') >= 0: raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body) if body.find('PRICE_FILTER') >= 0: raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body) if response is None: return # fallback to default error handler # check success value for wapi endpoints # response in format {'msg': 'The coin does not exist.', 'success': True/false} success = self.safe_value(response, 'success', True) if not success: message = self.safe_string(response, 'msg') parsedMessage = None if message is not None: try: parsedMessage = json.loads(message) except Exception as e: # do nothing parsedMessage = None if parsedMessage is not None: response = parsedMessage message = self.safe_string(response, 'msg') if message is not None: self.throw_exactly_matched_exception(self.exceptions['exact'], message, self.id + ' ' + message) self.throw_broadly_matched_exception(self.exceptions['broad'], message, self.id + ' ' + message) # checks against error codes error = self.safe_string(response, 'code') if error is not None: # https://github.com/ccxt/ccxt/issues/6501 # https://github.com/ccxt/ccxt/issues/7742 if (error == '200') or Precise.string_equals(error, '0'): return # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} # despite that their message is very confusing, it is raised by Binance # on a temporary ban, the API key is valid, but disabled for a while if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']: raise DDoSProtection(self.id + ' temporary banned: ' + body) feedback = self.id + ' ' + body self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback) raise ExchangeError(feedback) if not success: raise ExchangeError(self.id + ' ' + body) def calculate_rate_limiter_cost(self, api, method, path, params, config={}, context={}): if ('noSymbol' in config) and not ('symbol' in params): return config['noSymbol'] elif ('noPoolId' in config) and not ('poolId' in params): return config['noPoolId'] elif ('byLimit' in config) and ('limit' in params): limit = params['limit'] byLimit = config['byLimit'] for i in range(0, len(byLimit)): entry = byLimit[i] if limit <= entry[0]: return entry[1] return self.safe_integer(config, 'cost', 1) def request(self, path, api='public', method='GET', params={}, headers=None, body=None, config={}, context={}): response = self.fetch2(path, api, method, params, headers, body, config, context) # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} if (api == 'private') or (api == 'wapi'): self.options['hasAlreadyAuthenticatedSuccessfully'] = True return response def modify_margin_helper(self, symbol, amount, addOrReduce, params={}): # used to modify isolated positions defaultType = self.safe_string(self.options, 'defaultType', 'future') if defaultType == 'spot': defaultType = 'future' type = self.safe_string(params, 'type', defaultType) if (type == 'margin') or (type == 'spot'): raise NotSupported(self.id + ' add / reduce margin only supported with type future or delivery') self.load_markets() market = self.market(symbol) request = { 'type': addOrReduce, 'symbol': market['id'], 'amount': amount, } method = None code = None if type == 'future': method = 'fapiPrivatePostPositionMargin' code = market['quote'] else: method = 'dapiPrivatePostPositionMargin' code = market['base'] response = getattr(self, method)(self.extend(request, params)) # # { # "code": 200, # "msg": "Successfully modify position margin.", # "amount": 0.001, # "type": 1 # } # rawType = self.safe_integer(response, 'type') resultType = 'add' if (rawType == 1) else 'reduce' resultAmount = self.safe_number(response, 'amount') errorCode = self.safe_string(response, 'code') status = 'ok' if (errorCode == '200') else 'failed' return { 'info': response, 'type': resultType, 'amount': resultAmount, 'code': code, 'symbol': market['symbol'], 'status': status, } def reduce_margin(self, symbol, amount, params={}): return self.modify_margin_helper(symbol, amount, 2, params) def add_margin(self, symbol, amount, params={}): return self.modify_margin_helper(symbol, amount, 1, params) def fetch_borrow_rate(self, code, params={}): self.load_markets() currency = self.currency(code) request = { 'asset': currency['id'], # 'vipLevel': self.safe_integer(params, 'vipLevel'), } response = self.sapiGetMarginInterestRateHistory(self.extend(request, params)) # # [ # { # "asset": "USDT", # "timestamp": 1638230400000, # "dailyInterestRate": "0.0006", # "vipLevel": 0 # }, # ... # ] # rate = self.safe_value(response, 0) timestamp = self.safe_number(rate, 'timestamp') return { 'currency': code, 'rate': self.safe_number(rate, 'dailyInterestRate'), 'period': 86400000, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'info': response, }
true
true
f700255c2c2445ca4d7fc8529c35e511ce7c0578
579
py
Python
blender/arm/logicnode/transform/LN_set_object_transform.py
Lykdraft/armory
da1cf33930ce9a8b1865d35c128fe4842bef2933
[ "Zlib" ]
null
null
null
blender/arm/logicnode/transform/LN_set_object_transform.py
Lykdraft/armory
da1cf33930ce9a8b1865d35c128fe4842bef2933
[ "Zlib" ]
null
null
null
blender/arm/logicnode/transform/LN_set_object_transform.py
Lykdraft/armory
da1cf33930ce9a8b1865d35c128fe4842bef2933
[ "Zlib" ]
null
null
null
from arm.logicnode.arm_nodes import * class SetTransformNode(ArmLogicTreeNode): """Use to set the transform of an object.""" bl_idname = 'LNSetTransformNode' bl_label = 'Set Object Transform' arm_version = 1 def init(self, context): super(SetTransformNode, self).init(context) self.add_input('ArmNodeSocketAction', 'In') self.add_input('ArmNodeSocketObject', 'Object') self.add_input('NodeSocketShader', 'Transform') self.add_output('ArmNodeSocketAction', 'Out') add_node(SetTransformNode, category=PKG_AS_CATEGORY)
34.058824
55
0.708117
from arm.logicnode.arm_nodes import * class SetTransformNode(ArmLogicTreeNode): bl_idname = 'LNSetTransformNode' bl_label = 'Set Object Transform' arm_version = 1 def init(self, context): super(SetTransformNode, self).init(context) self.add_input('ArmNodeSocketAction', 'In') self.add_input('ArmNodeSocketObject', 'Object') self.add_input('NodeSocketShader', 'Transform') self.add_output('ArmNodeSocketAction', 'Out') add_node(SetTransformNode, category=PKG_AS_CATEGORY)
true
true
f70026be89262dcabc1e78f9a1ffaa22cafad438
2,272
py
Python
setup.py
tekktrik/CircuitPython_Org_DisplayIO_Cartesian
cef9af2765aa8cdfc8b6434e4ed29e4e040c003e
[ "MIT" ]
null
null
null
setup.py
tekktrik/CircuitPython_Org_DisplayIO_Cartesian
cef9af2765aa8cdfc8b6434e4ed29e4e040c003e
[ "MIT" ]
2
2022-02-18T19:12:08.000Z
2022-03-12T19:14:48.000Z
setup.py
tekktrik/CircuitPython_Org_DisplayIO_Cartesian
cef9af2765aa8cdfc8b6434e4ed29e4e040c003e
[ "MIT" ]
3
2021-05-26T10:57:13.000Z
2022-02-18T17:08:12.000Z
# SPDX-FileCopyrightText: 2017 Scott Shawcroft, written for Adafruit Industries # SPDX-FileCopyrightText: Copyright (c) 2021 Jose David M. for circuitpython # # SPDX-License-Identifier: MIT """A setuptools based setup module. See: https://packaging.python.org/en/latest/distributing.html https://github.com/pypa/sampleproject """ from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, "README.rst"), encoding="utf-8") as f: long_description = f.read() setup( # Community Bundle Information name="circuitpython-displayio-cartesian", use_scm_version=True, setup_requires=["setuptools_scm"], description="A cartesian plane widget for displaying graphical information.", long_description=long_description, long_description_content_type="text/x-rst", # The project's main homepage. url="https://github.com/circuitpython/CircuitPython_Org_DisplayIO_Cartesian.git", # Author details author="Jose David M.", author_email="", install_requires=[ "Adafruit-Blinka", "adafruit-circuitpython-display-text", "adafruit-circuitpython-displayio-layout", ], # Choose your license license="MIT", # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "Topic :: System :: Hardware", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", ], # What does your project relate to? keywords="adafruit blinka circuitpython micropython displayio_cartesian displayio widget " "graphics gui graph chart graphic", # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). # TODO: IF LIBRARY FILES ARE A PACKAGE FOLDER, # CHANGE `py_modules=['...']` TO `packages=['...']` py_modules=["displayio_cartesian"], )
34.953846
94
0.695423
from setuptools import setup, find_packages from codecs import open from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, "README.rst"), encoding="utf-8") as f: long_description = f.read() setup( name="circuitpython-displayio-cartesian", use_scm_version=True, setup_requires=["setuptools_scm"], description="A cartesian plane widget for displaying graphical information.", long_description=long_description, long_description_content_type="text/x-rst", url="https://github.com/circuitpython/CircuitPython_Org_DisplayIO_Cartesian.git", # Author details author="Jose David M.", author_email="", install_requires=[ "Adafruit-Blinka", "adafruit-circuitpython-display-text", "adafruit-circuitpython-displayio-layout", ], # Choose your license license="MIT", # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries", "Topic :: System :: Hardware", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", ], # What does your project relate to? keywords="adafruit blinka circuitpython micropython displayio_cartesian displayio widget " "graphics gui graph chart graphic", # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). # TODO: IF LIBRARY FILES ARE A PACKAGE FOLDER, # CHANGE `py_modules=['...']` TO `packages=['...']` py_modules=["displayio_cartesian"], )
true
true
f7002809c326a21adb3489f8362fe6d0df39aa6a
2,482
py
Python
ssmpfwd/test/test_helpers.py
grizmin/ssm-port-forwarding
5894e906049f206aeca58320cd9bd02f5a2b6e7f
[ "MIT" ]
1
2021-08-05T06:45:17.000Z
2021-08-05T06:45:17.000Z
ssmpfwd/test/test_helpers.py
grizmin/ssm-port-forwarding
5894e906049f206aeca58320cd9bd02f5a2b6e7f
[ "MIT" ]
null
null
null
ssmpfwd/test/test_helpers.py
grizmin/ssm-port-forwarding
5894e906049f206aeca58320cd9bd02f5a2b6e7f
[ "MIT" ]
null
null
null
from ssmpfwd.helpers import verify_plugin_version, verbose_debug_quiet, time_decorator from unittest.mock import MagicMock, patch import unittest class TestVerifyPluginVersion(unittest.TestCase): @patch("ssmpfwd.helpers.subprocess") def test_verify_plugin_version_success(self, mock_subprocess): result = mock_subprocess.run() result.stdout = b"9.8.3" self.assertTrue(verify_plugin_version("9.8.3")) @patch("ssmpfwd.helpers.subprocess") def test_verify_plugin_version_fail(self, mock_subprocess): with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm: result = mock_subprocess.run() result.stdout = b"1.8.1" self.assertFalse(verify_plugin_version("9.2.3")) self.assertEqual(cm.output[0], "ERROR:ssmpfwd.helpers:session-manager-plugin version 1.8.1 is installed, 9.2.3 is required") class TestVerboseDebugQuiet(unittest.TestCase): import logging def setUp(self): @verbose_debug_quiet def test_func(): pass self.vdq = test_func self.vdq() def test_quiet(self): option_name = "quiet" self.assertTrue(any([p.name == option_name for p in self.vdq.__click_params__]), msg=f"Can not find {option_name} in option parameters") def test_debug(self): flag_value = self.logging.DEBUG self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values") def test_verbose(self): flag_value = self.logging.INFO self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values") def test_default_loglevel(self): flag_value = self.logging.WARN self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values") class TestTimeDecorator(unittest.TestCase): from time import sleep def setUp(self): @time_decorator def test_func(): self.sleep(0.5) self.time_decorated_method = test_func def test_time_decorartor(self): with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm: self.time_decorated_method() self.assertEqual(cm.output[0], "INFO:ssmpfwd.helpers:[*] starting test_func")
38.78125
150
0.670427
from ssmpfwd.helpers import verify_plugin_version, verbose_debug_quiet, time_decorator from unittest.mock import MagicMock, patch import unittest class TestVerifyPluginVersion(unittest.TestCase): @patch("ssmpfwd.helpers.subprocess") def test_verify_plugin_version_success(self, mock_subprocess): result = mock_subprocess.run() result.stdout = b"9.8.3" self.assertTrue(verify_plugin_version("9.8.3")) @patch("ssmpfwd.helpers.subprocess") def test_verify_plugin_version_fail(self, mock_subprocess): with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm: result = mock_subprocess.run() result.stdout = b"1.8.1" self.assertFalse(verify_plugin_version("9.2.3")) self.assertEqual(cm.output[0], "ERROR:ssmpfwd.helpers:session-manager-plugin version 1.8.1 is installed, 9.2.3 is required") class TestVerboseDebugQuiet(unittest.TestCase): import logging def setUp(self): @verbose_debug_quiet def test_func(): pass self.vdq = test_func self.vdq() def test_quiet(self): option_name = "quiet" self.assertTrue(any([p.name == option_name for p in self.vdq.__click_params__]), msg=f"Can not find {option_name} in option parameters") def test_debug(self): flag_value = self.logging.DEBUG self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values") def test_verbose(self): flag_value = self.logging.INFO self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values") def test_default_loglevel(self): flag_value = self.logging.WARN self.assertTrue(any([p.flag_value == flag_value for p in self.vdq.__click_params__]), msg=f"Can not find {flag_value} in option flag values") class TestTimeDecorator(unittest.TestCase): from time import sleep def setUp(self): @time_decorator def test_func(): self.sleep(0.5) self.time_decorated_method = test_func def test_time_decorartor(self): with self.assertLogs("ssmpfwd.helpers", level="INFO") as cm: self.time_decorated_method() self.assertEqual(cm.output[0], "INFO:ssmpfwd.helpers:[*] starting test_func")
true
true
f70028f9fa4d86978ac4bf40e069c11a32974d6b
5,221
py
Python
x2.ESR/ESRB.py
doronbehar/lab4
90af5a8fd562ba6a35b6ba90611122573e7de485
[ "MIT" ]
null
null
null
x2.ESR/ESRB.py
doronbehar/lab4
90af5a8fd562ba6a35b6ba90611122573e7de485
[ "MIT" ]
null
null
null
x2.ESR/ESRB.py
doronbehar/lab4
90af5a8fd562ba6a35b6ba90611122573e7de485
[ "MIT" ]
null
null
null
import numpy as np import matplotlib.pyplot as plt import pint # Use the same registry from main import ureg ureg.setup_matplotlib(True) from uncertainties import ufloat, umath, unumpy import pandas as pd from scipy.signal import find_peaks from scipy.integrate import simpson from scipy.optimize import curve_fit plt.rcParams['text.usetex'] = True amp = 700*ureg.mV R=ufloat(0.82, 0.82*0.1)*ureg.ohm df = pd.read_csv("./ESRB.csv") # The I0_modulation signal is horrible, the system was too noisy, so instead: # # I0_modulation = (unumpy.uarray( # df['V_modulation_raw'].values, # df['V_modulation_err'].values # )*ureg.mV/R).to('ampere') # # we regnerate it, assuming it should be linear, just as V_DC is. I0_modulation = (unumpy.uarray(np.linspace( df['V_modulation_raw'].min(), df['V_modulation_raw'].max(), len(df) ), df['V_modulation_err'].mean())*ureg.mV/R).to('ampere') ptp_Y = unumpy.uarray( df['ptp_Y_raw'].values*df['phase_sign'].values, df['ptp_Y_err'].values )*ureg.mV ptp_X_modulation = ufloat(3.09, 0.01)*ureg.mV fig, ax = plt.subplots() I0_modulation_err = np.array([val.m.s for val in I0_modulation]) I0_modulation_raw = np.array([val.m.n for val in I0_modulation]) ptp_ratio = ptp_Y/ptp_X_modulation absorption_deriviative = ptp_ratio/max(ptp_ratio) absorption_deriviative_raw = np.array([val.m.n for val in absorption_deriviative]) absorption_deriviative_err = np.array([val.m.s for val in absorption_deriviative]) ax.errorbar( I0_modulation_raw*ureg.ampere, absorption_deriviative_raw, # Dimensionless fmt='.', yerr=absorption_deriviative_err, # TODO: Mention in report that error is too big to be drafted #xerr=I_modulation_err, # TODO: Is this the correct label? label='Absorption Deriviative' ) def lorentzian_dif_fit(I, I0, gamma, amplitude): return amplitude*(-2*(gamma**2)*(I - I0))/ \ (gamma**2 + (I - I0)**2)**2 def lorentzian_fit(I, I0, gamma, amplitude): return amplitude*gamma**2/\ (gamma**2 + (I - I0)**2)**2 ##### By MATLAB: # Goodness of fit: # SSE: 0.197 # R-square: 0.9845 # Adjusted R-square: 0.9838 # RMSE: 0.06769 # I0 gamma amplitude matlab_p0 = [0.5479, 0.03847, 0.05554] matlab_bounds=((0.547, 0.03672, 0.05304), (0.5488, 0.04021, 0.05805)) I_rf = ufloat(matlab_p0[0], abs(matlab_bounds[0][0] - matlab_p0[0]))*ureg.ampere I_hwhm = ufloat(matlab_p0[1], abs(matlab_bounds[0][1] - matlab_p0[1]))*ureg.ampere from main import g_times_bohr # TODO: Take this value from Itamar & Tomer H_RF = ufloat(34.914, 0.009)*ureg.gauss k = H_RF/I_rf # Converts current I To frequency f using all of the constants def I2f(I): return (I*k*g_times_bohr/ureg.planck_constant).to('megahertz') f0_modulation = I2f(I0_modulation) f_rf = I2f(I_rf) f_hwhm = I2f(I_hwhm) T2 = (1/f_hwhm).to('nanosecond') ##### A failing Python fit attempt - I consider it as a failure because it hits ##### the bounds :/ # popt, pcov = curve_fit( # lorentzian_dif_fit, absorption_deriviative_raw, I0_modulation_raw, # p0=matlab_p0, bounds=matlab_bounds # ) # lorentzian_dif_fit_points = lorentzian_dif_fit(I0_modulation_raw, *popt) # ax.plot( # I0_modulation_raw*ureg.ampere, # lorentzian_dif_fit_points, # label="Python fit" # ) I0_modulation_seq = np.linspace( I0_modulation.min().m.n, I0_modulation.max().m.n, len(I0_modulation)*100 ) ax.plot( I0_modulation_seq*ureg.ampere, lorentzian_dif_fit(I0_modulation_seq, I_rf.m.n, I_hwhm.m.n, matlab_p0[2]), label="Matlab fit" ) ax.set_yticks([]) axt = ax.twiny() axt.grid(linestyle='--') axt.set_yticks([]) f0_modulation_seq = np.linspace( f0_modulation.min().m.n, f0_modulation.max().m.n, len(f0_modulation)*100 ) def lorentzian_wrapper(f0): # From some reason this need to be amplified by a factor of 800 so it will # look good. return lorentzian_fit(f0, f_rf.m.n, f_hwhm.m.n, matlab_p0[2]*800) axt.plot( f0_modulation_seq*ureg.megahertz, lorentzian_wrapper(f0_modulation_seq), label = "Lorenzian fit", color='green' ) axt.set_xticks( [(f_rf - f_hwhm).m.n, f_rf.m.n, (f_rf + f_hwhm).m.n], ['', '$f_{rf}$', ''] ) axt.set_xlabel('') axt.arrow( length_includes_head = True, x = (f_rf - f_hwhm).m.n*ureg.megahertz, y = lorentzian_wrapper((f_rf - f_hwhm).m.n), dx = 2*f_hwhm.m.n*ureg.megahertz, dy = 0, head_length = f_hwhm.m.n/10, head_width = matlab_p0[2], label="Full Width Half Max", ) axt.arrow( length_includes_head = True, x = (f_rf + f_hwhm).m.n*ureg.megahertz, y = lorentzian_wrapper((f_rf + f_hwhm).m.n), dx = -2*f_hwhm.m.n*ureg.megahertz, head_length = f_hwhm.m.n/10, head_width = matlab_p0[2], dy = 0, ) axt.text( 0.5, 0.63, # (f_hwhm.m.n/10), # lorentzian_wrapper((f0 - f_hwhm).m.n)*2, "FWHM", transform=ax.transAxes, # fontsize=00 ) ax.legend(loc='upper right') # axt.legend(loc='upper left') plt.show() fig.savefig("ESRB.pgf") fig.savefig("ESRB.png") # TODO: Integrate numerically / or fit to a laurenzian's differentiation # TODO: Scale the x axis to frequency and find the width of the laurenzian in # frequency scale
30.532164
82
0.684735
import numpy as np import matplotlib.pyplot as plt import pint from main import ureg ureg.setup_matplotlib(True) from uncertainties import ufloat, umath, unumpy import pandas as pd from scipy.signal import find_peaks from scipy.integrate import simpson from scipy.optimize import curve_fit plt.rcParams['text.usetex'] = True amp = 700*ureg.mV R=ufloat(0.82, 0.82*0.1)*ureg.ohm df = pd.read_csv("./ESRB.csv") I0_modulation = (unumpy.uarray(np.linspace( df['V_modulation_raw'].min(), df['V_modulation_raw'].max(), len(df) ), df['V_modulation_err'].mean())*ureg.mV/R).to('ampere') ptp_Y = unumpy.uarray( df['ptp_Y_raw'].values*df['phase_sign'].values, df['ptp_Y_err'].values )*ureg.mV ptp_X_modulation = ufloat(3.09, 0.01)*ureg.mV fig, ax = plt.subplots() I0_modulation_err = np.array([val.m.s for val in I0_modulation]) I0_modulation_raw = np.array([val.m.n for val in I0_modulation]) ptp_ratio = ptp_Y/ptp_X_modulation absorption_deriviative = ptp_ratio/max(ptp_ratio) absorption_deriviative_raw = np.array([val.m.n for val in absorption_deriviative]) absorption_deriviative_err = np.array([val.m.s for val in absorption_deriviative]) ax.errorbar( I0_modulation_raw*ureg.ampere, absorption_deriviative_raw, fmt='.', yerr=absorption_deriviative_err, label='Absorption Deriviative' ) def lorentzian_dif_fit(I, I0, gamma, amplitude): return amplitude*(-2*(gamma**2)*(I - I0))/ \ (gamma**2 + (I - I0)**2)**2 def lorentzian_fit(I, I0, gamma, amplitude): return amplitude*gamma**2/\ (gamma**2 + (I - I0)**2)**2 matlab_p0 = [0.5479, 0.03847, 0.05554] matlab_bounds=((0.547, 0.03672, 0.05304), (0.5488, 0.04021, 0.05805)) I_rf = ufloat(matlab_p0[0], abs(matlab_bounds[0][0] - matlab_p0[0]))*ureg.ampere I_hwhm = ufloat(matlab_p0[1], abs(matlab_bounds[0][1] - matlab_p0[1]))*ureg.ampere from main import g_times_bohr H_RF = ufloat(34.914, 0.009)*ureg.gauss k = H_RF/I_rf def I2f(I): return (I*k*g_times_bohr/ureg.planck_constant).to('megahertz') f0_modulation = I2f(I0_modulation) f_rf = I2f(I_rf) f_hwhm = I2f(I_hwhm) T2 = (1/f_hwhm).to('nanosecond') I0_modulation_seq = np.linspace( I0_modulation.min().m.n, I0_modulation.max().m.n, len(I0_modulation)*100 ) ax.plot( I0_modulation_seq*ureg.ampere, lorentzian_dif_fit(I0_modulation_seq, I_rf.m.n, I_hwhm.m.n, matlab_p0[2]), label="Matlab fit" ) ax.set_yticks([]) axt = ax.twiny() axt.grid(linestyle='--') axt.set_yticks([]) f0_modulation_seq = np.linspace( f0_modulation.min().m.n, f0_modulation.max().m.n, len(f0_modulation)*100 ) def lorentzian_wrapper(f0): return lorentzian_fit(f0, f_rf.m.n, f_hwhm.m.n, matlab_p0[2]*800) axt.plot( f0_modulation_seq*ureg.megahertz, lorentzian_wrapper(f0_modulation_seq), label = "Lorenzian fit", color='green' ) axt.set_xticks( [(f_rf - f_hwhm).m.n, f_rf.m.n, (f_rf + f_hwhm).m.n], ['', '$f_{rf}$', ''] ) axt.set_xlabel('') axt.arrow( length_includes_head = True, x = (f_rf - f_hwhm).m.n*ureg.megahertz, y = lorentzian_wrapper((f_rf - f_hwhm).m.n), dx = 2*f_hwhm.m.n*ureg.megahertz, dy = 0, head_length = f_hwhm.m.n/10, head_width = matlab_p0[2], label="Full Width Half Max", ) axt.arrow( length_includes_head = True, x = (f_rf + f_hwhm).m.n*ureg.megahertz, y = lorentzian_wrapper((f_rf + f_hwhm).m.n), dx = -2*f_hwhm.m.n*ureg.megahertz, head_length = f_hwhm.m.n/10, head_width = matlab_p0[2], dy = 0, ) axt.text( 0.5, 0.63, "FWHM", transform=ax.transAxes, ) ax.legend(loc='upper right') plt.show() fig.savefig("ESRB.pgf") fig.savefig("ESRB.png") # TODO: Scale the x axis to frequency and find the width of the laurenzian in # frequency scale
true
true
f70029a1b9d5fe7dd248a9f3e0e32a8709368d7c
315
py
Python
receive_sms.py
jatinchowdhury18/DogBot
8f0487c68a203f518d5b110107acda227aa6f112
[ "MIT" ]
null
null
null
receive_sms.py
jatinchowdhury18/DogBot
8f0487c68a203f518d5b110107acda227aa6f112
[ "MIT" ]
null
null
null
receive_sms.py
jatinchowdhury18/DogBot
8f0487c68a203f518d5b110107acda227aa6f112
[ "MIT" ]
null
null
null
from flask import Flask, request, redirect from twilio.twiml.messaging_response import MessagingResponse from get_secrets import * def main(): resp = MessagingResponse() resp.message ("You have reached the DogBot. Thanks for contacting us :)") return str(resp) if __name__ == "__main__": main()
22.5
77
0.730159
from flask import Flask, request, redirect from twilio.twiml.messaging_response import MessagingResponse from get_secrets import * def main(): resp = MessagingResponse() resp.message ("You have reached the DogBot. Thanks for contacting us :)") return str(resp) if __name__ == "__main__": main()
true
true
f7002a41f654a061211357f2e1d7380fd76aa69c
4,738
py
Python
src/aioyhsm/constants.py
joernheissler/aioyhsm
598837034ec1d094a4bb544d018cc79036d55dd3
[ "MIT" ]
null
null
null
src/aioyhsm/constants.py
joernheissler/aioyhsm
598837034ec1d094a4bb544d018cc79036d55dd3
[ "MIT" ]
null
null
null
src/aioyhsm/constants.py
joernheissler/aioyhsm
598837034ec1d094a4bb544d018cc79036d55dd3
[ "MIT" ]
null
null
null
from __future__ import annotations from enum import IntEnum class Algorithm(IntEnum): """ https://developers.yubico.com/YubiHSM2/Concepts/Algorithms.html """ RSA_PKCS1_SHA1 = 1 RSA_PKCS1_SHA256 = 2 RSA_PKCS1_SHA384 = 3 RSA_PKCS1_SHA512 = 4 RSA_PSS_SHA1 = 5 RSA_PSS_SHA256 = 6 RSA_PSS_SHA384 = 7 RSA_PSS_SHA512 = 8 RSA_2048 = 9 RSA_3072 = 10 RSA_4096 = 11 EC_P256 = 12 EC_P384 = 13 EC_P521 = 14 EC_K256 = 15 EC_BP256 = 16 EC_BP384 = 17 EC_BP512 = 18 HMAC_SHA1 = 19 HMAC_SHA256 = 20 HMAC_SHA384 = 21 HMAC_SHA512 = 22 ECDSA_SHA1 = 23 EC_ECDH = 24 RSA_OAEP_SHA1 = 25 RSA_OAEP_SHA256 = 26 RSA_OAEP_SHA384 = 27 RSA_OAEP_SHA512 = 28 AES128_CCM_WRAP = 29 Opaque_Data = 30 Opaque_X509_Certificate = 31 MGF1_SHA1 = 32 MGF1_SHA256 = 33 MGF1_SHA384 = 34 MGF1_SHA512 = 35 SSH_Template = 36 Yubico_OTP_AES128 = 37 Yubico_AES_Authentication = 38 Yubico_OTP_AES192 = 39 Yubico_OTP_AES256 = 40 AES192_CCM_WRAP = 41 AES256_CCM_WRAP = 42 ECDSA_SHA256 = 43 ECDSA_SHA384 = 44 ECDSA_SHA512 = 45 ED25519 = 46 EC_P224 = 47 class Capability(IntEnum): """ https://developers.yubico.com/YubiHSM2/Concepts/Capability.html """ GetOpaque = 0 PutOpaque = 1 PutAuthenticationKey = 2 PutAsymmetricKey = 3 GenerateAsymmetricKey = 4 SignPkcs = 5 SignPss = 6 SignEcdsa = 7 SignEddsa = 8 DecryptPkcs = 9 DecryptOaep = 10 DeriveEcdh = 11 ExportWrapped = 12 ImportWrapped = 13 PutWrapKey = 14 GenerateWrapKey = 15 ExportableUnderWrap = 16 SetOption = 17 GetOption = 18 GetPseudoRandom = 19 PutMacKey = 20 GenerateHmacKey = 21 SignHmac = 22 VerifyHmac = 23 GetLogEntries = 24 SignSshCertificate = 25 GetTemplate = 26 PutTemplate = 27 ResetDevice = 28 DecryptOtp = 29 CreateOtpAead = 30 RandomizeOtpAead = 31 RewrapFromOtpAeadKey = 32 RewrapToOtpAeadKey = 33 SignAttestationCertificate = 34 PutOtpAeadKey = 35 GenerateOtpAeadKey = 36 WrapData = 37 UnwrapData = 38 DeleteOpaque = 39 DeleteAuthenticationKey = 40 DeleteAsymmetricKey = 41 DeleteWrapKey = 42 DeleteHmacKey = 43 DeleteTemplate = 44 DeleteOtpAeadKey = 45 ChangeAuthenticationKey = 46 class Command(IntEnum): """ https://developers.yubico.com/YubiHSM2/Commands/ """ Echo = 0x01 CreateSession = 0x03 AuthenticateSession = 0x04 SessionMessage = 0x05 GetDeviceInfo = 0x06 ResetDevice = 0x08 CloseSession = 0x40 GetStorageInfo = 0x41 PutOpaque = 0x42 GetOpaque = 0x43 PutAuthenticationKey = 0x44 PutAsymmetricKey = 0x45 GenerateAsymmetricKey = 0x46 SignPkcs1 = 0x47 ListObjects = 0x48 DecryptPkcs1 = 0x49 ExportWrapped = 0x4A ImportWrapped = 0x4B PutWrapKey = 0x4C GetLogEntries = 0x4D GetObjectInfo = 0x4E SetOption = 0x4F GetOption = 0x50 GetPseudoRandom = 0x51 PutHmacKey = 0x52 SignHmac = 0x53 GetPublicKey = 0x54 SignPss = 0x55 SignEcdsa = 0x56 DeriveEcdh = 0x57 DeleteObject = 0x58 DecryptOaep = 0x59 GenerateHmacKey = 0x5A GenerateWrapKey = 0x5B VerifyHmac = 0x5C SignSshCertificate = 0x5D PutTemplate = 0x5E GetTemplate = 0x5F DecryptOtp = 0x60 CreateOtpAead = 0x61 RandomizeOtpAead = 0x62 RewrapOtpAead = 0x63 SignAttestationCertificate = 0x64 PutOtpAeadKey = 0x65 GenerateOtpAeadKey = 0x66 SetLogIndex = 0x67 WrapData = 0x68 UnwrapData = 0x69 SignEddsa = 0x6A BlinkDevice = 0x6B ChangeAuthenticationKey = 0x6C Error = 0x7F class Error(IntEnum): """ https://developers.yubico.com/YubiHSM2/Concepts/Errors.html """ OK = 0x00 INVALID_COMMAND = 0x01 INVALID_DATA = 0x02 INVALID_SESSION = 0x03 AUTHENTICATION_FAILED = 0x04 SESSIONS_FULL = 0x05 SESSION_FAILED = 0x06 STORAGE_FAILED = 0x07 WRONG_LENGTH = 0x08 INSUFFICIENT_PERMISSIONS = 0x09 LOG_FULL = 0x0A OBJECT_NOT_FOUND = 0x0B INVALID_ID = 0x0C SSH_CA_CONSTRAINT_VIOLATION = 0x0E INVALID_OTP = 0x0F DEMO_MODE = 0x10 OBJECT_EXISTS = 0x11 class ObjectType(IntEnum): """ https://developers.yubico.com/YubiHSM2/Concepts/Object.html """ Opaque = 0x01 AuthenticationKey = 0x02 AsymmetricKey = 0x03 WrapKey = 0x04 HmacKey = 0x05 Template = 0x06 OtpAeadKey = 0x07 class Option(IntEnum): """ https://developers.yubico.com/YubiHSM2/Concepts/Options.html """ ForceAudit = 0x01 CommandAudit = 0x03
21.733945
67
0.659772
from __future__ import annotations from enum import IntEnum class Algorithm(IntEnum): RSA_PKCS1_SHA1 = 1 RSA_PKCS1_SHA256 = 2 RSA_PKCS1_SHA384 = 3 RSA_PKCS1_SHA512 = 4 RSA_PSS_SHA1 = 5 RSA_PSS_SHA256 = 6 RSA_PSS_SHA384 = 7 RSA_PSS_SHA512 = 8 RSA_2048 = 9 RSA_3072 = 10 RSA_4096 = 11 EC_P256 = 12 EC_P384 = 13 EC_P521 = 14 EC_K256 = 15 EC_BP256 = 16 EC_BP384 = 17 EC_BP512 = 18 HMAC_SHA1 = 19 HMAC_SHA256 = 20 HMAC_SHA384 = 21 HMAC_SHA512 = 22 ECDSA_SHA1 = 23 EC_ECDH = 24 RSA_OAEP_SHA1 = 25 RSA_OAEP_SHA256 = 26 RSA_OAEP_SHA384 = 27 RSA_OAEP_SHA512 = 28 AES128_CCM_WRAP = 29 Opaque_Data = 30 Opaque_X509_Certificate = 31 MGF1_SHA1 = 32 MGF1_SHA256 = 33 MGF1_SHA384 = 34 MGF1_SHA512 = 35 SSH_Template = 36 Yubico_OTP_AES128 = 37 Yubico_AES_Authentication = 38 Yubico_OTP_AES192 = 39 Yubico_OTP_AES256 = 40 AES192_CCM_WRAP = 41 AES256_CCM_WRAP = 42 ECDSA_SHA256 = 43 ECDSA_SHA384 = 44 ECDSA_SHA512 = 45 ED25519 = 46 EC_P224 = 47 class Capability(IntEnum): GetOpaque = 0 PutOpaque = 1 PutAuthenticationKey = 2 PutAsymmetricKey = 3 GenerateAsymmetricKey = 4 SignPkcs = 5 SignPss = 6 SignEcdsa = 7 SignEddsa = 8 DecryptPkcs = 9 DecryptOaep = 10 DeriveEcdh = 11 ExportWrapped = 12 ImportWrapped = 13 PutWrapKey = 14 GenerateWrapKey = 15 ExportableUnderWrap = 16 SetOption = 17 GetOption = 18 GetPseudoRandom = 19 PutMacKey = 20 GenerateHmacKey = 21 SignHmac = 22 VerifyHmac = 23 GetLogEntries = 24 SignSshCertificate = 25 GetTemplate = 26 PutTemplate = 27 ResetDevice = 28 DecryptOtp = 29 CreateOtpAead = 30 RandomizeOtpAead = 31 RewrapFromOtpAeadKey = 32 RewrapToOtpAeadKey = 33 SignAttestationCertificate = 34 PutOtpAeadKey = 35 GenerateOtpAeadKey = 36 WrapData = 37 UnwrapData = 38 DeleteOpaque = 39 DeleteAuthenticationKey = 40 DeleteAsymmetricKey = 41 DeleteWrapKey = 42 DeleteHmacKey = 43 DeleteTemplate = 44 DeleteOtpAeadKey = 45 ChangeAuthenticationKey = 46 class Command(IntEnum): Echo = 0x01 CreateSession = 0x03 AuthenticateSession = 0x04 SessionMessage = 0x05 GetDeviceInfo = 0x06 ResetDevice = 0x08 CloseSession = 0x40 GetStorageInfo = 0x41 PutOpaque = 0x42 GetOpaque = 0x43 PutAuthenticationKey = 0x44 PutAsymmetricKey = 0x45 GenerateAsymmetricKey = 0x46 SignPkcs1 = 0x47 ListObjects = 0x48 DecryptPkcs1 = 0x49 ExportWrapped = 0x4A ImportWrapped = 0x4B PutWrapKey = 0x4C GetLogEntries = 0x4D GetObjectInfo = 0x4E SetOption = 0x4F GetOption = 0x50 GetPseudoRandom = 0x51 PutHmacKey = 0x52 SignHmac = 0x53 GetPublicKey = 0x54 SignPss = 0x55 SignEcdsa = 0x56 DeriveEcdh = 0x57 DeleteObject = 0x58 DecryptOaep = 0x59 GenerateHmacKey = 0x5A GenerateWrapKey = 0x5B VerifyHmac = 0x5C SignSshCertificate = 0x5D PutTemplate = 0x5E GetTemplate = 0x5F DecryptOtp = 0x60 CreateOtpAead = 0x61 RandomizeOtpAead = 0x62 RewrapOtpAead = 0x63 SignAttestationCertificate = 0x64 PutOtpAeadKey = 0x65 GenerateOtpAeadKey = 0x66 SetLogIndex = 0x67 WrapData = 0x68 UnwrapData = 0x69 SignEddsa = 0x6A BlinkDevice = 0x6B ChangeAuthenticationKey = 0x6C Error = 0x7F class Error(IntEnum): OK = 0x00 INVALID_COMMAND = 0x01 INVALID_DATA = 0x02 INVALID_SESSION = 0x03 AUTHENTICATION_FAILED = 0x04 SESSIONS_FULL = 0x05 SESSION_FAILED = 0x06 STORAGE_FAILED = 0x07 WRONG_LENGTH = 0x08 INSUFFICIENT_PERMISSIONS = 0x09 LOG_FULL = 0x0A OBJECT_NOT_FOUND = 0x0B INVALID_ID = 0x0C SSH_CA_CONSTRAINT_VIOLATION = 0x0E INVALID_OTP = 0x0F DEMO_MODE = 0x10 OBJECT_EXISTS = 0x11 class ObjectType(IntEnum): Opaque = 0x01 AuthenticationKey = 0x02 AsymmetricKey = 0x03 WrapKey = 0x04 HmacKey = 0x05 Template = 0x06 OtpAeadKey = 0x07 class Option(IntEnum): ForceAudit = 0x01 CommandAudit = 0x03
true
true
f7002af16052213b7cd652135616cee9b3b5d62d
13,675
py
Python
panel/tests/test_reactive.py
govinda18/panel
d2b70d9a0a4433d427c627e70328d0bc8621d78b
[ "BSD-3-Clause" ]
2
2018-08-23T16:50:40.000Z
2018-08-23T20:01:45.000Z
panel/tests/test_reactive.py
pyviz/pyviz_panels
120019e4318ac51bc2b9d0a1b2eb2239c8a0c9ad
[ "BSD-3-Clause" ]
null
null
null
panel/tests/test_reactive.py
pyviz/pyviz_panels
120019e4318ac51bc2b9d0a1b2eb2239c8a0c9ad
[ "BSD-3-Clause" ]
null
null
null
import unittest.mock from functools import partial import bokeh.core.properties as bp import param import pytest from bokeh.document import Document from bokeh.io.doc import patch_curdoc from bokeh.models import Div from panel.layout import Tabs, WidgetBox from panel.reactive import Reactive, ReactiveHTML from panel.viewable import Viewable from panel.widgets import ( Checkbox, IntInput, StaticText, TextInput, ) def test_reactive_default_title(): doc = ReactiveHTML().server_doc() assert doc.title == 'Panel Application' def test_reactive_servable_title(): doc = Document() session_context = unittest.mock.Mock() with patch_curdoc(doc): doc._session_context = lambda: session_context ReactiveHTML().servable(title='A') ReactiveHTML().servable(title='B') assert doc.title == 'B' def test_link(): "Link two Reactive objects" class ReactiveLink(Reactive): a = param.Parameter() obj = ReactiveLink() obj2 = ReactiveLink() obj.link(obj2, a='a') obj.a = 1 assert obj.a == 1 assert obj2.a == 1 def test_param_rename(): "Test that Reactive renames params and properties" class ReactiveRename(Reactive): a = param.Parameter() _rename = {'a': 'b'} obj = ReactiveRename() params = obj._process_property_change({'b': 1}) assert params == {'a': 1} properties = obj._process_param_change({'a': 1}) assert properties == {'b': 1} def test_link_properties_nb(document, comm): class ReactiveLink(Reactive): text = param.String(default='A') obj = ReactiveLink() div = Div() # Link property and check bokeh js property callback is defined obj._link_props(div, ['text'], document, div, comm) assert 'text' in div._callbacks # Assert callback is set up correctly cb = div._callbacks['text'][0] assert isinstance(cb, partial) assert cb.args == (document, div.ref['id'], comm, None) assert cb.func == obj._comm_change def test_link_properties_server(document): class ReactiveLink(Reactive): text = param.String(default='A') obj = ReactiveLink() div = Div() # Link property and check bokeh callback is defined obj._link_props(div, ['text'], document, div) assert 'text' in div._callbacks # Assert callback is set up correctly cb = div._callbacks['text'][0] assert isinstance(cb, partial) assert cb.args == (document, div.ref['id'], None) assert cb.func == obj._server_change def test_text_input_controls(): text_input = TextInput() controls = text_input.controls() assert isinstance(controls, Tabs) assert len(controls) == 2 wb1, wb2 = controls assert isinstance(wb1, WidgetBox) assert len(wb1) == 6 name, disabled, *(ws) = wb1 assert isinstance(name, StaticText) assert isinstance(disabled, Checkbox) not_checked = [] for w in ws: if w.name == 'Value': assert isinstance(w, TextInput) text_input.value = "New value" assert w.value == "New value" elif w.name == 'Value input': assert isinstance(w, TextInput) elif w.name == 'Placeholder': assert isinstance(w, TextInput) text_input.placeholder = "Test placeholder..." assert w.value == "Test placeholder..." elif w.name == 'Max length': assert isinstance(w, IntInput) else: not_checked.append(w) assert not not_checked assert isinstance(wb2, WidgetBox) assert len(wb2) == len(list(Viewable.param)) + 1 def test_text_input_controls_explicit(): text_input = TextInput() controls = text_input.controls(['placeholder', 'disabled']) assert isinstance(controls, WidgetBox) assert len(controls) == 3 name, disabled, placeholder = controls assert isinstance(name, StaticText) assert isinstance(disabled, Checkbox) assert isinstance(placeholder, TextInput) text_input.disabled = True assert disabled.value text_input.placeholder = "Test placeholder..." assert placeholder.value == "Test placeholder..." def test_reactive_html_basic(): class Test(ReactiveHTML): int = param.Integer(default=3, doc='An integer') float = param.Number(default=3.14, doc='A float') _template = '<div id="div" width=${int}></div>' data_model = Test._data_model assert data_model.__name__ == 'Test1' properties = data_model.properties() assert 'int' in properties assert 'float' in properties int_prop = data_model.lookup('int') assert isinstance(int_prop.property, bp.Int) assert int_prop.class_default(data_model) == 3 float_prop = data_model.lookup('float') assert isinstance(float_prop.property, bp.Float) assert float_prop.class_default(data_model) == 3.14 assert Test._node_callbacks == {} test = Test() root = test.get_root() assert test._attrs == {'div': [('width', ['int'], '{int}')]} assert root.callbacks == {} assert root.events == {} def test_reactive_html_no_id_param_error(): with pytest.raises(ValueError) as excinfo: class Test(ReactiveHTML): width = param.Number(default=200) _template = '<div width=${width}></div>' assert "Found <div> node with the `width` attribute referencing the `width` parameter." in str(excinfo.value) def test_reactive_html_no_id_method_error(): with pytest.raises(ValueError) as excinfo: class Test(ReactiveHTML): _template = '<div onclick=${_onclick}></div>' def _onclick(self): pass assert "Found <div> node with the `onclick` callback referencing the `_onclick` method." in str(excinfo.value) def test_reactive_html_dom_events(): class TestDOMEvents(ReactiveHTML): int = param.Integer(default=3, doc='An integer') float = param.Number(default=3.14, doc='A float') _template = '<div id="div" width=${int}></div>' _dom_events = {'div': ['change']} data_model = TestDOMEvents._data_model assert data_model.__name__ == 'TestDOMEvents1' properties = data_model.properties() assert 'int' in properties assert 'float' in properties int_prop = data_model.lookup('int') assert isinstance(int_prop.property, bp.Int) assert int_prop.class_default(data_model) == 3 float_prop = data_model.lookup('float') assert isinstance(float_prop.property, bp.Float) assert float_prop.class_default(data_model) == 3.14 assert TestDOMEvents._node_callbacks == {} test = TestDOMEvents() root = test.get_root() assert test._attrs == {'div': [('width', ['int'], '{int}')]} assert root.callbacks == {} assert root.events == {'div': {'change': True}} def test_reactive_html_inline(): class TestInline(ReactiveHTML): int = param.Integer(default=3, doc='An integer') _template = '<div id="div" onchange=${_div_change} width=${int}></div>' def _div_change(self, event): pass data_model = TestInline._data_model assert data_model.__name__ == 'TestInline1' properties = data_model.properties() assert 'int' in properties int_prop = data_model.lookup('int') assert isinstance(int_prop.property, bp.Int) assert int_prop.class_default(data_model) == 3 assert TestInline._node_callbacks == {'div': [('onchange', '_div_change')]} assert TestInline._inline_callbacks == [('div', 'onchange', '_div_change')] test = TestInline() root = test.get_root() assert test._attrs == { 'div': [ ('onchange', [], '{_div_change}'), ('width', ['int'], '{int}') ] } assert root.callbacks == {'div': [('onchange', '_div_change')]} assert root.events == {} test.on_event('div', 'click', print) assert root.events == {'div': {'click': False}} def test_reactive_html_children(): class TestChildren(ReactiveHTML): children = param.List(default=[]) _template = '<div id="div">${children}</div>' assert TestChildren._node_callbacks == {} assert TestChildren._inline_callbacks == [] assert TestChildren._parser.children == {'div': 'children'} widget = TextInput() test = TestChildren(children=[widget]) root = test.get_root() assert test._attrs == {} assert root.children == {'div': [widget._models[root.ref['id']][0]]} assert len(widget._models) == 1 assert test._panes == {'children': [widget]} widget_new = TextInput() test.children = [widget_new] assert len(widget._models) == 0 assert root.children == {'div': [widget_new._models[root.ref['id']][0]]} assert test._panes == {'children': [widget_new]} test._cleanup(root) assert len(test._models) == 0 assert len(widget_new._models) == 0 def test_reactive_html_templated_children(): class TestTemplatedChildren(ReactiveHTML): children = param.List(default=[]) _template = """ <select id="select"> {% for option in children %} <option id="option-{{ loop.index0 }}">${children[{{ loop.index0 }}]}</option> {% endfor %} </div> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} widget = TextInput() test = TestTemplatedChildren(children=[widget]) root = test.get_root() assert test._attrs == {} assert root.looped == ['option'] assert root.children == {'option': [widget._models[root.ref['id']][0]]} assert test._panes == {'children': [widget]} widget_new = TextInput() test.children = [widget_new] assert len(widget._models) == 0 assert root.children == {'option': [widget_new._models[root.ref['id']][0]]} assert test._panes == {'children': [widget_new]} def test_reactive_html_templated_dict_children(): class TestTemplatedChildren(ReactiveHTML): children = param.Dict(default={}) _template = """ <select id="select"> {% for key, option in children.items() %} <option id="option-{{ loop.index0 }}">${children[{{ key }}]}</option> {% endfor %} </div> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} widget = TextInput() test = TestTemplatedChildren(children={'test': widget}) root = test.get_root() assert test._attrs == {} assert root.looped == ['option'] assert root.children == {'option': [widget._models[root.ref['id']][0]]} assert test._panes == {'children': [widget]} widget_model = widget._models[root.ref['id']][0] widget_new = TextInput() test.children = {'test': widget_new, 'test2': widget} assert len(widget._models) == 1 assert root.children == { 'option': [ widget_new._models[root.ref['id']][0], widget_model ] } assert test._panes == {'children': [widget_new, widget]} def test_reactive_html_templated_children_add_loop_id(): class TestTemplatedChildren(ReactiveHTML): children = param.List(default=[]) _template = """ <select id="select"> {%- for option in children %} <option id="option">${children[{{ loop.index0 }}]}</option> {%- endfor %} </select> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} test = TestTemplatedChildren(children=['A', 'B', 'C']) assert test._get_template()[0] == """ <select id="select-${id}"> <option id="option-0-${id}"></option> <option id="option-1-${id}"></option> <option id="option-2-${id}"></option> </select> """ model = test.get_root() assert test._attrs == {} assert model.looped == ['option'] def test_reactive_html_templated_children_add_loop_id_and_for_loop_var(): class TestTemplatedChildren(ReactiveHTML): children = param.List(default=[]) _template = """ <select id="select"> {%- for option in children %} <option id="option">${option}</option> {%- endfor %} </select> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} test = TestTemplatedChildren(children=['A', 'B', 'C']) assert test._get_template()[0] == """ <select id="select-${id}"> <option id="option-0-${id}"></option> <option id="option-1-${id}"></option> <option id="option-2-${id}"></option> </select> """ model = test.get_root() assert test._attrs == {} assert model.looped == ['option'] @pytest.mark.parametrize('operator', ['', '+', '-', '*', '\\', '%', '**', '>>', '<<', '>>>', '&', '^', '&&', '||', '??']) @pytest.mark.parametrize('sep', [' ', '']) def test_reactive_html_scripts_linked_properties_assignment_operator(operator, sep): class TestScripts(ReactiveHTML): clicks = param.Integer() _template = "<div id='test'></div>" _scripts = {'render': f'test.onclick = () => {{ data.clicks{sep}{operator}= 1 }}'} assert TestScripts()._linked_properties() == ['clicks']
28.312629
121
0.631079
import unittest.mock from functools import partial import bokeh.core.properties as bp import param import pytest from bokeh.document import Document from bokeh.io.doc import patch_curdoc from bokeh.models import Div from panel.layout import Tabs, WidgetBox from panel.reactive import Reactive, ReactiveHTML from panel.viewable import Viewable from panel.widgets import ( Checkbox, IntInput, StaticText, TextInput, ) def test_reactive_default_title(): doc = ReactiveHTML().server_doc() assert doc.title == 'Panel Application' def test_reactive_servable_title(): doc = Document() session_context = unittest.mock.Mock() with patch_curdoc(doc): doc._session_context = lambda: session_context ReactiveHTML().servable(title='A') ReactiveHTML().servable(title='B') assert doc.title == 'B' def test_link(): class ReactiveLink(Reactive): a = param.Parameter() obj = ReactiveLink() obj2 = ReactiveLink() obj.link(obj2, a='a') obj.a = 1 assert obj.a == 1 assert obj2.a == 1 def test_param_rename(): class ReactiveRename(Reactive): a = param.Parameter() _rename = {'a': 'b'} obj = ReactiveRename() params = obj._process_property_change({'b': 1}) assert params == {'a': 1} properties = obj._process_param_change({'a': 1}) assert properties == {'b': 1} def test_link_properties_nb(document, comm): class ReactiveLink(Reactive): text = param.String(default='A') obj = ReactiveLink() div = Div() obj._link_props(div, ['text'], document, div, comm) assert 'text' in div._callbacks cb = div._callbacks['text'][0] assert isinstance(cb, partial) assert cb.args == (document, div.ref['id'], comm, None) assert cb.func == obj._comm_change def test_link_properties_server(document): class ReactiveLink(Reactive): text = param.String(default='A') obj = ReactiveLink() div = Div() obj._link_props(div, ['text'], document, div) assert 'text' in div._callbacks cb = div._callbacks['text'][0] assert isinstance(cb, partial) assert cb.args == (document, div.ref['id'], None) assert cb.func == obj._server_change def test_text_input_controls(): text_input = TextInput() controls = text_input.controls() assert isinstance(controls, Tabs) assert len(controls) == 2 wb1, wb2 = controls assert isinstance(wb1, WidgetBox) assert len(wb1) == 6 name, disabled, *(ws) = wb1 assert isinstance(name, StaticText) assert isinstance(disabled, Checkbox) not_checked = [] for w in ws: if w.name == 'Value': assert isinstance(w, TextInput) text_input.value = "New value" assert w.value == "New value" elif w.name == 'Value input': assert isinstance(w, TextInput) elif w.name == 'Placeholder': assert isinstance(w, TextInput) text_input.placeholder = "Test placeholder..." assert w.value == "Test placeholder..." elif w.name == 'Max length': assert isinstance(w, IntInput) else: not_checked.append(w) assert not not_checked assert isinstance(wb2, WidgetBox) assert len(wb2) == len(list(Viewable.param)) + 1 def test_text_input_controls_explicit(): text_input = TextInput() controls = text_input.controls(['placeholder', 'disabled']) assert isinstance(controls, WidgetBox) assert len(controls) == 3 name, disabled, placeholder = controls assert isinstance(name, StaticText) assert isinstance(disabled, Checkbox) assert isinstance(placeholder, TextInput) text_input.disabled = True assert disabled.value text_input.placeholder = "Test placeholder..." assert placeholder.value == "Test placeholder..." def test_reactive_html_basic(): class Test(ReactiveHTML): int = param.Integer(default=3, doc='An integer') float = param.Number(default=3.14, doc='A float') _template = '<div id="div" width=${int}></div>' data_model = Test._data_model assert data_model.__name__ == 'Test1' properties = data_model.properties() assert 'int' in properties assert 'float' in properties int_prop = data_model.lookup('int') assert isinstance(int_prop.property, bp.Int) assert int_prop.class_default(data_model) == 3 float_prop = data_model.lookup('float') assert isinstance(float_prop.property, bp.Float) assert float_prop.class_default(data_model) == 3.14 assert Test._node_callbacks == {} test = Test() root = test.get_root() assert test._attrs == {'div': [('width', ['int'], '{int}')]} assert root.callbacks == {} assert root.events == {} def test_reactive_html_no_id_param_error(): with pytest.raises(ValueError) as excinfo: class Test(ReactiveHTML): width = param.Number(default=200) _template = '<div width=${width}></div>' assert "Found <div> node with the `width` attribute referencing the `width` parameter." in str(excinfo.value) def test_reactive_html_no_id_method_error(): with pytest.raises(ValueError) as excinfo: class Test(ReactiveHTML): _template = '<div onclick=${_onclick}></div>' def _onclick(self): pass assert "Found <div> node with the `onclick` callback referencing the `_onclick` method." in str(excinfo.value) def test_reactive_html_dom_events(): class TestDOMEvents(ReactiveHTML): int = param.Integer(default=3, doc='An integer') float = param.Number(default=3.14, doc='A float') _template = '<div id="div" width=${int}></div>' _dom_events = {'div': ['change']} data_model = TestDOMEvents._data_model assert data_model.__name__ == 'TestDOMEvents1' properties = data_model.properties() assert 'int' in properties assert 'float' in properties int_prop = data_model.lookup('int') assert isinstance(int_prop.property, bp.Int) assert int_prop.class_default(data_model) == 3 float_prop = data_model.lookup('float') assert isinstance(float_prop.property, bp.Float) assert float_prop.class_default(data_model) == 3.14 assert TestDOMEvents._node_callbacks == {} test = TestDOMEvents() root = test.get_root() assert test._attrs == {'div': [('width', ['int'], '{int}')]} assert root.callbacks == {} assert root.events == {'div': {'change': True}} def test_reactive_html_inline(): class TestInline(ReactiveHTML): int = param.Integer(default=3, doc='An integer') _template = '<div id="div" onchange=${_div_change} width=${int}></div>' def _div_change(self, event): pass data_model = TestInline._data_model assert data_model.__name__ == 'TestInline1' properties = data_model.properties() assert 'int' in properties int_prop = data_model.lookup('int') assert isinstance(int_prop.property, bp.Int) assert int_prop.class_default(data_model) == 3 assert TestInline._node_callbacks == {'div': [('onchange', '_div_change')]} assert TestInline._inline_callbacks == [('div', 'onchange', '_div_change')] test = TestInline() root = test.get_root() assert test._attrs == { 'div': [ ('onchange', [], '{_div_change}'), ('width', ['int'], '{int}') ] } assert root.callbacks == {'div': [('onchange', '_div_change')]} assert root.events == {} test.on_event('div', 'click', print) assert root.events == {'div': {'click': False}} def test_reactive_html_children(): class TestChildren(ReactiveHTML): children = param.List(default=[]) _template = '<div id="div">${children}</div>' assert TestChildren._node_callbacks == {} assert TestChildren._inline_callbacks == [] assert TestChildren._parser.children == {'div': 'children'} widget = TextInput() test = TestChildren(children=[widget]) root = test.get_root() assert test._attrs == {} assert root.children == {'div': [widget._models[root.ref['id']][0]]} assert len(widget._models) == 1 assert test._panes == {'children': [widget]} widget_new = TextInput() test.children = [widget_new] assert len(widget._models) == 0 assert root.children == {'div': [widget_new._models[root.ref['id']][0]]} assert test._panes == {'children': [widget_new]} test._cleanup(root) assert len(test._models) == 0 assert len(widget_new._models) == 0 def test_reactive_html_templated_children(): class TestTemplatedChildren(ReactiveHTML): children = param.List(default=[]) _template = """ <select id="select"> {% for option in children %} <option id="option-{{ loop.index0 }}">${children[{{ loop.index0 }}]}</option> {% endfor %} </div> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} widget = TextInput() test = TestTemplatedChildren(children=[widget]) root = test.get_root() assert test._attrs == {} assert root.looped == ['option'] assert root.children == {'option': [widget._models[root.ref['id']][0]]} assert test._panes == {'children': [widget]} widget_new = TextInput() test.children = [widget_new] assert len(widget._models) == 0 assert root.children == {'option': [widget_new._models[root.ref['id']][0]]} assert test._panes == {'children': [widget_new]} def test_reactive_html_templated_dict_children(): class TestTemplatedChildren(ReactiveHTML): children = param.Dict(default={}) _template = """ <select id="select"> {% for key, option in children.items() %} <option id="option-{{ loop.index0 }}">${children[{{ key }}]}</option> {% endfor %} </div> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} widget = TextInput() test = TestTemplatedChildren(children={'test': widget}) root = test.get_root() assert test._attrs == {} assert root.looped == ['option'] assert root.children == {'option': [widget._models[root.ref['id']][0]]} assert test._panes == {'children': [widget]} widget_model = widget._models[root.ref['id']][0] widget_new = TextInput() test.children = {'test': widget_new, 'test2': widget} assert len(widget._models) == 1 assert root.children == { 'option': [ widget_new._models[root.ref['id']][0], widget_model ] } assert test._panes == {'children': [widget_new, widget]} def test_reactive_html_templated_children_add_loop_id(): class TestTemplatedChildren(ReactiveHTML): children = param.List(default=[]) _template = """ <select id="select"> {%- for option in children %} <option id="option">${children[{{ loop.index0 }}]}</option> {%- endfor %} </select> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} test = TestTemplatedChildren(children=['A', 'B', 'C']) assert test._get_template()[0] == """ <select id="select-${id}"> <option id="option-0-${id}"></option> <option id="option-1-${id}"></option> <option id="option-2-${id}"></option> </select> """ model = test.get_root() assert test._attrs == {} assert model.looped == ['option'] def test_reactive_html_templated_children_add_loop_id_and_for_loop_var(): class TestTemplatedChildren(ReactiveHTML): children = param.List(default=[]) _template = """ <select id="select"> {%- for option in children %} <option id="option">${option}</option> {%- endfor %} </select> """ assert TestTemplatedChildren._node_callbacks == {} assert TestTemplatedChildren._inline_callbacks == [] assert TestTemplatedChildren._parser.children == {'option': 'children'} test = TestTemplatedChildren(children=['A', 'B', 'C']) assert test._get_template()[0] == """ <select id="select-${id}"> <option id="option-0-${id}"></option> <option id="option-1-${id}"></option> <option id="option-2-${id}"></option> </select> """ model = test.get_root() assert test._attrs == {} assert model.looped == ['option'] @pytest.mark.parametrize('operator', ['', '+', '-', '*', '\\', '%', '**', '>>', '<<', '>>>', '&', '^', '&&', '||', '??']) @pytest.mark.parametrize('sep', [' ', '']) def test_reactive_html_scripts_linked_properties_assignment_operator(operator, sep): class TestScripts(ReactiveHTML): clicks = param.Integer() _template = "<div id='test'></div>" _scripts = {'render': f'test.onclick = () => {{ data.clicks{sep}{operator}= 1 }}'} assert TestScripts()._linked_properties() == ['clicks']
true
true
f7002b2400bc2b06679586a2df52f97e215dcc65
4,748
py
Python
mhr_api/src/mhr_api/models/event_tracking.py
cameron-freshworks/ppr
01d6f5d300c791aebad5e58bb4601e9be2ccfc46
[ "Apache-2.0" ]
null
null
null
mhr_api/src/mhr_api/models/event_tracking.py
cameron-freshworks/ppr
01d6f5d300c791aebad5e58bb4601e9be2ccfc46
[ "Apache-2.0" ]
null
null
null
mhr_api/src/mhr_api/models/event_tracking.py
cameron-freshworks/ppr
01d6f5d300c791aebad5e58bb4601e9be2ccfc46
[ "Apache-2.0" ]
null
null
null
# Copyright © 2019 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module holds data for ppr queue event tracking.""" from __future__ import annotations from mhr_api.models import utils as model_utils from mhr_api.utils.base import BaseEnum from .db import db class EventTracking(db.Model): # pylint: disable=too-many-instance-attributes """This class manages all of the event tracking information.""" class EventTrackingTypes(BaseEnum): """Render an Enum of the event tracking types.""" SEARCH_REPORT = 'SEARCH_REPORT' API_NOTIFICATION = 'API_NOTIFICATION' EMAIL = 'EMAIL' SURFACE_MAIL = 'SURFACE_MAIL' EMAIL_REPORT = 'EMAIL_REPORT' REGISTRATION_REPORT = 'REGISTRATION_REPORT' __tablename__ = 'event_tracking' id = db.Column('id', db.Integer, db.Sequence('event_tracking_id_seq'), primary_key=True) key_id = db.Column('key_id', db.Integer, nullable=False, index=True) event_ts = db.Column('event_ts', db.DateTime, nullable=False, index=True) event_tracking_type = db.Column('event_tracking_type', db.String(20), db.ForeignKey('event_tracking_types.event_tracking_type'), nullable=False, index=True) status = db.Column('status', db.Integer, nullable=True) message = db.Column('message', db.String(2000), nullable=True) email_id = db.Column('email_address', db.String(250), nullable=True) # Relationships - SerialType tracking_type = db.relationship('EventTrackingType', foreign_keys=[event_tracking_type], back_populates='event_tracking', cascade='all, delete', uselist=False) def save(self): """Save the object to the database immediately.""" db.session.add(self) db.session.commit() @property def json(self) -> dict: """Return the event tracking record as a json object.""" event_tracking = { 'eventTrackingId': self.id, 'keyId': self.key_id, 'type': self.event_tracking_type, 'createDateTime': model_utils.format_ts(self.event_ts) } if self.status: event_tracking['status'] = self.status if self.message: event_tracking['message'] = self.message if self.email_id: event_tracking['emailAddress'] = self.email_id return event_tracking @classmethod def find_by_id(cls, event_id: int): """Return a tracking object by ID.""" if event_id: return cls.query.get(event_id) return None @classmethod def find_by_key_id(cls, key_id: int): """Return a list of event tracking objects by key id.""" event_tracking = None if key_id: event_tracking = cls.query.filter(EventTracking.key_id == key_id) \ .order_by(EventTracking.id).all() return event_tracking @classmethod def find_by_key_id_type(cls, key_id: int, event_tracking_type: str, extra_key: str = None): """Return a list of event tracking objects by key id and event tracking type.""" event_tracking = None if key_id and event_tracking_type: event_tracking = cls.query.filter(EventTracking.key_id == key_id, EventTracking.event_tracking_type == event_tracking_type) \ .order_by(EventTracking.id).all() if event_tracking is not None and extra_key: events = [] for event in event_tracking: if event.message and event.message.find(extra_key) > 0: events.append(event) return events return event_tracking @staticmethod def create(key_id: int, event_type: str, status: int = None, message: str = None): """Create an EventTracking record.""" event_tracking = EventTracking(key_id=key_id, event_tracking_type=event_type, status=status, message=message) event_tracking.event_ts = model_utils.now_ts() event_tracking.save() return event_tracking
40.237288
117
0.64385
from __future__ import annotations from mhr_api.models import utils as model_utils from mhr_api.utils.base import BaseEnum from .db import db class EventTracking(db.Model): class EventTrackingTypes(BaseEnum): SEARCH_REPORT = 'SEARCH_REPORT' API_NOTIFICATION = 'API_NOTIFICATION' EMAIL = 'EMAIL' SURFACE_MAIL = 'SURFACE_MAIL' EMAIL_REPORT = 'EMAIL_REPORT' REGISTRATION_REPORT = 'REGISTRATION_REPORT' __tablename__ = 'event_tracking' id = db.Column('id', db.Integer, db.Sequence('event_tracking_id_seq'), primary_key=True) key_id = db.Column('key_id', db.Integer, nullable=False, index=True) event_ts = db.Column('event_ts', db.DateTime, nullable=False, index=True) event_tracking_type = db.Column('event_tracking_type', db.String(20), db.ForeignKey('event_tracking_types.event_tracking_type'), nullable=False, index=True) status = db.Column('status', db.Integer, nullable=True) message = db.Column('message', db.String(2000), nullable=True) email_id = db.Column('email_address', db.String(250), nullable=True) tracking_type = db.relationship('EventTrackingType', foreign_keys=[event_tracking_type], back_populates='event_tracking', cascade='all, delete', uselist=False) def save(self): db.session.add(self) db.session.commit() @property def json(self) -> dict: event_tracking = { 'eventTrackingId': self.id, 'keyId': self.key_id, 'type': self.event_tracking_type, 'createDateTime': model_utils.format_ts(self.event_ts) } if self.status: event_tracking['status'] = self.status if self.message: event_tracking['message'] = self.message if self.email_id: event_tracking['emailAddress'] = self.email_id return event_tracking @classmethod def find_by_id(cls, event_id: int): if event_id: return cls.query.get(event_id) return None @classmethod def find_by_key_id(cls, key_id: int): event_tracking = None if key_id: event_tracking = cls.query.filter(EventTracking.key_id == key_id) \ .order_by(EventTracking.id).all() return event_tracking @classmethod def find_by_key_id_type(cls, key_id: int, event_tracking_type: str, extra_key: str = None): event_tracking = None if key_id and event_tracking_type: event_tracking = cls.query.filter(EventTracking.key_id == key_id, EventTracking.event_tracking_type == event_tracking_type) \ .order_by(EventTracking.id).all() if event_tracking is not None and extra_key: events = [] for event in event_tracking: if event.message and event.message.find(extra_key) > 0: events.append(event) return events return event_tracking @staticmethod def create(key_id: int, event_type: str, status: int = None, message: str = None): event_tracking = EventTracking(key_id=key_id, event_tracking_type=event_type, status=status, message=message) event_tracking.event_ts = model_utils.now_ts() event_tracking.save() return event_tracking
true
true
f7002c90467435a91e99ee2aae11e1a594ffba14
36,375
py
Python
hydrus/test/TestHydrusSerialisable.py
baibhavvishalpani/hydrus
1117ebbde9e5d5ec17e469dba6bd8086838cfb89
[ "WTFPL" ]
null
null
null
hydrus/test/TestHydrusSerialisable.py
baibhavvishalpani/hydrus
1117ebbde9e5d5ec17e469dba6bd8086838cfb89
[ "WTFPL" ]
null
null
null
hydrus/test/TestHydrusSerialisable.py
baibhavvishalpani/hydrus
1117ebbde9e5d5ec17e469dba6bd8086838cfb89
[ "WTFPL" ]
null
null
null
import unittest from hydrus.core import HydrusConstants as HC from hydrus.core import HydrusData from hydrus.core import HydrusSerialisable from hydrus.client import ClientApplicationCommand as CAC from hydrus.client import ClientConstants as CC from hydrus.client import ClientData from hydrus.client import ClientDefaults from hydrus.client import ClientDuplicates from hydrus.client import ClientSearch from hydrus.client.gui import ClientGUIShortcuts from hydrus.client.importing import ClientImportOptions from hydrus.client.importing import ClientImportSubscriptions from hydrus.client.importing import ClientImportSubscriptionQuery from hydrus.client.media import ClientMedia from hydrus.client.media import ClientMediaManagers from hydrus.client.media import ClientMediaResult from hydrus.client.metadata import ClientTags from hydrus.test import TestController as TC class TestSerialisables( unittest.TestCase ): def _dump_and_load_and_test( self, obj, test_func ): serialisable_tuple = obj.GetSerialisableTuple() self.assertIsInstance( serialisable_tuple, tuple ) if isinstance( obj, HydrusSerialisable.SerialisableBaseNamed ): ( serialisable_type, name, version, serialisable_info ) = serialisable_tuple elif isinstance( obj, HydrusSerialisable.SerialisableBase ): ( serialisable_type, version, serialisable_info ) = serialisable_tuple self.assertEqual( serialisable_type, obj.SERIALISABLE_TYPE ) self.assertEqual( version, obj.SERIALISABLE_VERSION ) dupe_obj = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_tuple ) self.assertIsNot( obj, dupe_obj ) test_func( obj, dupe_obj ) # json_string = obj.DumpToString() self.assertIsInstance( json_string, str ) dupe_obj = HydrusSerialisable.CreateFromString( json_string ) self.assertIsNot( obj, dupe_obj ) test_func( obj, dupe_obj ) # network_bytes = obj.DumpToNetworkBytes() self.assertIsInstance( network_bytes, bytes ) dupe_obj = HydrusSerialisable.CreateFromNetworkBytes( network_bytes ) self.assertIsNot( obj, dupe_obj ) test_func( obj, dupe_obj ) def test_basics( self ): def test( obj, dupe_obj ): self.assertEqual( len( list(obj.items()) ), len( list(dupe_obj.items()) ) ) for ( key, value ) in list(obj.items()): self.assertEqual( value, dupe_obj[ key ] ) # d = HydrusSerialisable.SerialisableDictionary() d[ 1 ] = 2 d[ 3 ] = 'test1' d[ 'test2' ] = 4 d[ 'test3' ] = 5 d[ 6 ] = HydrusSerialisable.SerialisableDictionary( { i : 'test' + str( i ) for i in range( 20 ) } ) d[ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test pred 1' ) ] = 56 d[ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test pred 2' ) ] = HydrusSerialisable.SerialisableList( [ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test' + str( i ) ) for i in range( 10 ) ] ) self.assertEqual( len( list(d.keys()) ), 7 ) for ( key, value ) in list(d.items()): self.assertEqual( d[ key ], value ) self._dump_and_load_and_test( d, test ) # db = HydrusSerialisable.SerialisableBytesDictionary() db[ HydrusData.GenerateKey() ] = HydrusData.GenerateKey() db[ HydrusData.GenerateKey() ] = [ HydrusData.GenerateKey() for i in range( 10 ) ] db[ 1 ] = HydrusData.GenerateKey() db[ 2 ] = [ HydrusData.GenerateKey() for i in range( 10 ) ] self.assertEqual( len( list(db.keys()) ), 4 ) for ( key, value ) in list(db.items()): self.assertEqual( db[ key ], value ) self._dump_and_load_and_test( db, test ) def test_SERIALISABLE_TYPE_APPLICATION_COMMAND( self ): def test( obj, dupe_obj ): self.assertEqual( obj.GetCommandType(), dupe_obj.GetCommandType() ) self.assertEqual( obj.GetData(), dupe_obj.GetData() ) acs = [] acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_SIMPLE, CAC.SIMPLE_ARCHIVE_FILE ), 'archive file' ) ) acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for unknown service!' ) ) acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for my tags' ) ) acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_SET, 0.4 ) ), 'set ratings uncertain rating, "0.4" for unknown service!' ) ) for ( ac, s ) in acs: self._dump_and_load_and_test( ac, test ) self.assertEqual( ac.ToString(), s ) def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS( self ): def test( obj, dupe_obj ): self.assertEqual( obj.ToTuple(), dupe_obj.ToTuple() ) duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ) ] ) duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ) ] ) duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ) ] ) inbox = True size = 40960 mime = HC.IMAGE_JPEG width = 640 height = 480 duration = None num_frames = None has_audio = False num_words = None local_locations_manager = ClientMediaManagers.LocationsManager( { CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox ) trash_locations_manager = ClientMediaManagers.LocationsManager( { CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox ) deleted_locations_manager = ClientMediaManagers.LocationsManager( set(), { CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), inbox ) # duplicate to generate proper dicts one_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } } ).Duplicate() two_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } } ).Duplicate() substantial_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } } ).Duplicate() empty_tags_manager = ClientMediaManagers.TagsManager( {}, {} ).Duplicate() one_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } ) two_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.6 } ) substantial_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } ) empty_ratings_manager = ClientMediaManagers.RatingsManager( {} ) notes_manager = ClientMediaManagers.NotesManager( {} ) file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager() # local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 1, local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager ) local_media_has_values = ClientMedia.MediaSingleton( media_result ) # other_local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 2, other_local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager ) other_local_media_has_values = ClientMedia.MediaSingleton( media_result ) # local_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 3, local_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager ) local_media_empty = ClientMedia.MediaSingleton( media_result ) # trashed_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 4, trashed_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager ) trashed_media_empty = ClientMedia.MediaSingleton( media_result ) # deleted_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 5, deleted_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager ) deleted_media_empty = ClientMedia.MediaSingleton( media_result ) # one_hash = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 6, one_hash, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager, notes_manager, file_viewing_stats_manager ) one_media = ClientMedia.MediaSingleton( media_result ) # two_hash = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 7, two_hash, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager, notes_manager, file_viewing_stats_manager ) two_media = ClientMedia.MediaSingleton( media_result ) # self._dump_and_load_and_test( duplicate_action_options_delete_and_move, test ) self._dump_and_load_and_test( duplicate_action_options_copy, test ) self._dump_and_load_and_test( duplicate_action_options_merge, test ) # def assertSCUEqual( one, two ): self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable( one ), TC.ConvertServiceKeysToContentUpdatesToComparable( two ) ) file_deletion_reason = 'test delete' # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { local_hash_empty }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.TRASH_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { trashed_hash_empty }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason ) self.assertEqual( result, {} ) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) # result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) # # result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason ) self.assertEqual( result, {} ) # result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ] assertSCUEqual( result, scu ) # # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ] assertSCUEqual( result, scu ) # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ] assertSCUEqual( result, scu ) # result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'one', { two_hash } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'two', { one_hash } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { two_hash } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { two_hash } ) ) ] assertSCUEqual( result, scu ) def test_SERIALISABLE_TYPE_SHORTCUT( self ): def test( obj, dupe_obj ): self.assertEqual( dupe_obj.__hash__(), ( dupe_obj.shortcut_type, dupe_obj.shortcut_key, dupe_obj.shortcut_press_type, tuple( dupe_obj.modifiers ) ).__hash__() ) self.assertEqual( obj, dupe_obj ) shortcuts = [] shortcuts.append( ( ClientGUIShortcuts.Shortcut(), 'f7' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_SPACE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ), 'space' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'a' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'A' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_HOME, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+alt+home' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_LEFT, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ), 'left-click' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_MIDDLE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+middle-click' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_SCROLL_DOWN, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_SHIFT ] ), 'alt+shift+scroll down' ) ) for ( shortcut, s ) in shortcuts: self._dump_and_load_and_test( shortcut, test ) self.assertEqual( shortcut.ToString(), s ) def test_SERIALISABLE_TYPE_SHORTCUT_SET( self ): def test( obj, dupe_obj ): for ( shortcut, command ) in obj: self.assertEqual( dupe_obj.GetCommand( shortcut ).GetData(), command.GetData() ) default_shortcuts = ClientDefaults.GetDefaultShortcuts() for shortcuts in default_shortcuts: self._dump_and_load_and_test( shortcuts, test ) command_1 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_SIMPLE, CAC.SIMPLE_ARCHIVE_FILE ) command_2 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ) command_3 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ) k_shortcut_1 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_SPACE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ) k_shortcut_2 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'a' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) k_shortcut_3 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'A' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) k_shortcut_4 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_HOME, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) m_shortcut_1 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_LEFT, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ) m_shortcut_2 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_MIDDLE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) m_shortcut_3 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_SCROLL_DOWN, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_SHIFT ] ) shortcut_set = ClientGUIShortcuts.ShortcutSet( 'test' ) shortcut_set.SetCommand( k_shortcut_1, command_1 ) shortcut_set.SetCommand( k_shortcut_2, command_2 ) shortcut_set.SetCommand( k_shortcut_3, command_2 ) shortcut_set.SetCommand( k_shortcut_4, command_3 ) shortcut_set.SetCommand( m_shortcut_1, command_1 ) shortcut_set.SetCommand( m_shortcut_2, command_2 ) shortcut_set.SetCommand( m_shortcut_3, command_3 ) self._dump_and_load_and_test( shortcut_set, test ) self.assertEqual( shortcut_set.GetCommand( k_shortcut_1 ).GetData(), command_1.GetData() ) shortcut_set.SetCommand( k_shortcut_1, command_3 ) self.assertEqual( shortcut_set.GetCommand( k_shortcut_1 ).GetData(), command_3.GetData() ) def test_SERIALISABLE_TYPE_SUBSCRIPTION( self ): def test( obj, dupe_obj ): self.assertEqual( obj.GetName(), dupe_obj.GetName() ) self.assertEqual( obj._gug_key_and_name, dupe_obj._gug_key_and_name ) self.assertEqual( len( obj._query_headers ), len( dupe_obj._query_headers ) ) self.assertEqual( obj._initial_file_limit, dupe_obj._initial_file_limit ) self.assertEqual( obj._periodic_file_limit, dupe_obj._periodic_file_limit ) self.assertEqual( obj._paused, dupe_obj._paused ) self.assertEqual( obj._file_import_options.GetSerialisableTuple(), dupe_obj._file_import_options.GetSerialisableTuple() ) self.assertEqual( obj._tag_import_options.GetSerialisableTuple(), dupe_obj._tag_import_options.GetSerialisableTuple() ) self.assertEqual( obj._no_work_until, dupe_obj._no_work_until ) sub = ClientImportSubscriptions.Subscription( 'test sub' ) self._dump_and_load_and_test( sub, test ) gug_key_and_name = ( HydrusData.GenerateKey(), 'muh test gug' ) query_headers = [] q = ClientImportSubscriptionQuery.SubscriptionQueryHeader() q.SetQueryText( 'test query' ) query_headers.append( q ) q = ClientImportSubscriptionQuery.SubscriptionQueryHeader() q.SetQueryText( 'test query 2' ) query_headers.append( q ) checker_options = ClientImportOptions.CheckerOptions() initial_file_limit = 100 periodic_file_limit = 50 paused = False file_import_options = ClientImportOptions.FileImportOptions() service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_tags = False, additional_tags = { 'test additional tag', 'and another' } ) tag_import_options = ClientImportOptions.TagImportOptions( service_keys_to_service_tag_import_options = { HydrusData.GenerateKey() : service_tag_import_options } ) no_work_until = HydrusData.GetNow() - 86400 * 20 sub.SetTuple( gug_key_and_name, checker_options, initial_file_limit, periodic_file_limit, paused, file_import_options, tag_import_options, no_work_until ) sub.SetQueryHeaders( query_headers ) self.assertEqual( sub.GetGUGKeyAndName(), gug_key_and_name ) self.assertEqual( sub.GetTagImportOptions(), tag_import_options ) self.assertEqual( sub.GetQueryHeaders(), query_headers ) self.assertEqual( sub._paused, False ) sub.PauseResume() self.assertEqual( sub._paused, True ) sub.PauseResume() self.assertEqual( sub._paused, False ) self._dump_and_load_and_test( sub, test ) def test_SERIALISABLE_TYPE_TAG_FILTER( self ): def test( obj, dupe_obj ): self.assertEqual( obj._tag_slices_to_rules, dupe_obj._tag_slices_to_rules ) tags = set() tags.add( 'title:test title' ) tags.add( 'series:neon genesis evangelion' ) tags.add( 'series:kill la kill' ) tags.add( 'smile' ) tags.add( 'blue eyes' ) # tag_filter = ClientTags.TagFilter() self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), set() ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'series:neon genesis evangelion', 'series:kill la kill' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'series:kill la kill' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'smile', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:neon genesis evangelion', 'series:kill la kill' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:kill la kill' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:neon genesis evangelion', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'blue eyes', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill', 'blue eyes' } ) # blacklist namespace test blacklist_tags = { 'nintendo', 'studio:nintendo' } # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( blacklist_tags ), { 'studio:nintendo' } ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), set() ) # tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'studio:nintendo', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), { 'studio:nintendo' } )
51.01683
576
0.674639
import unittest from hydrus.core import HydrusConstants as HC from hydrus.core import HydrusData from hydrus.core import HydrusSerialisable from hydrus.client import ClientApplicationCommand as CAC from hydrus.client import ClientConstants as CC from hydrus.client import ClientData from hydrus.client import ClientDefaults from hydrus.client import ClientDuplicates from hydrus.client import ClientSearch from hydrus.client.gui import ClientGUIShortcuts from hydrus.client.importing import ClientImportOptions from hydrus.client.importing import ClientImportSubscriptions from hydrus.client.importing import ClientImportSubscriptionQuery from hydrus.client.media import ClientMedia from hydrus.client.media import ClientMediaManagers from hydrus.client.media import ClientMediaResult from hydrus.client.metadata import ClientTags from hydrus.test import TestController as TC class TestSerialisables( unittest.TestCase ): def _dump_and_load_and_test( self, obj, test_func ): serialisable_tuple = obj.GetSerialisableTuple() self.assertIsInstance( serialisable_tuple, tuple ) if isinstance( obj, HydrusSerialisable.SerialisableBaseNamed ): ( serialisable_type, name, version, serialisable_info ) = serialisable_tuple elif isinstance( obj, HydrusSerialisable.SerialisableBase ): ( serialisable_type, version, serialisable_info ) = serialisable_tuple self.assertEqual( serialisable_type, obj.SERIALISABLE_TYPE ) self.assertEqual( version, obj.SERIALISABLE_VERSION ) dupe_obj = HydrusSerialisable.CreateFromSerialisableTuple( serialisable_tuple ) self.assertIsNot( obj, dupe_obj ) test_func( obj, dupe_obj ) json_string = obj.DumpToString() self.assertIsInstance( json_string, str ) dupe_obj = HydrusSerialisable.CreateFromString( json_string ) self.assertIsNot( obj, dupe_obj ) test_func( obj, dupe_obj ) network_bytes = obj.DumpToNetworkBytes() self.assertIsInstance( network_bytes, bytes ) dupe_obj = HydrusSerialisable.CreateFromNetworkBytes( network_bytes ) self.assertIsNot( obj, dupe_obj ) test_func( obj, dupe_obj ) def test_basics( self ): def test( obj, dupe_obj ): self.assertEqual( len( list(obj.items()) ), len( list(dupe_obj.items()) ) ) for ( key, value ) in list(obj.items()): self.assertEqual( value, dupe_obj[ key ] ) d = HydrusSerialisable.SerialisableDictionary() d[ 1 ] = 2 d[ 3 ] = 'test1' d[ 'test2' ] = 4 d[ 'test3' ] = 5 d[ 6 ] = HydrusSerialisable.SerialisableDictionary( { i : 'test' + str( i ) for i in range( 20 ) } ) d[ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test pred 1' ) ] = 56 d[ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test pred 2' ) ] = HydrusSerialisable.SerialisableList( [ ClientSearch.Predicate( ClientSearch.PREDICATE_TYPE_TAG, 'test' + str( i ) ) for i in range( 10 ) ] ) self.assertEqual( len( list(d.keys()) ), 7 ) for ( key, value ) in list(d.items()): self.assertEqual( d[ key ], value ) self._dump_and_load_and_test( d, test ) db = HydrusSerialisable.SerialisableBytesDictionary() db[ HydrusData.GenerateKey() ] = HydrusData.GenerateKey() db[ HydrusData.GenerateKey() ] = [ HydrusData.GenerateKey() for i in range( 10 ) ] db[ 1 ] = HydrusData.GenerateKey() db[ 2 ] = [ HydrusData.GenerateKey() for i in range( 10 ) ] self.assertEqual( len( list(db.keys()) ), 4 ) for ( key, value ) in list(db.items()): self.assertEqual( db[ key ], value ) self._dump_and_load_and_test( db, test ) def test_SERIALISABLE_TYPE_APPLICATION_COMMAND( self ): def test( obj, dupe_obj ): self.assertEqual( obj.GetCommandType(), dupe_obj.GetCommandType() ) self.assertEqual( obj.GetData(), dupe_obj.GetData() ) acs = [] acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_SIMPLE, CAC.SIMPLE_ARCHIVE_FILE ), 'archive file' ) ) acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for unknown service!' ) ) acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ), 'flip on/off mappings "test" for my tags' ) ) acs.append( ( CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_SET, 0.4 ) ), 'set ratings uncertain rating, "0.4" for unknown service!' ) ) for ( ac, s ) in acs: self._dump_and_load_and_test( ac, test ) self.assertEqual( ac.ToString(), s ) def test_SERIALISABLE_TYPE_DUPLICATE_ACTION_OPTIONS( self ): def test( obj, dupe_obj ): self.assertEqual( obj.ToTuple(), dupe_obj.ToTuple() ) duplicate_action_options_delete_and_move = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_MOVE ) ] ) duplicate_action_options_copy = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_COPY ) ] ) duplicate_action_options_merge = ClientDuplicates.DuplicateActionOptions( [ ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE, ClientTags.TagFilter() ) ], [ ( TC.LOCAL_RATING_LIKE_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ), ( TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY, HC.CONTENT_MERGE_ACTION_TWO_WAY_MERGE ) ] ) inbox = True size = 40960 mime = HC.IMAGE_JPEG width = 640 height = 480 duration = None num_frames = None has_audio = False num_words = None local_locations_manager = ClientMediaManagers.LocationsManager( { CC.LOCAL_FILE_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox ) trash_locations_manager = ClientMediaManagers.LocationsManager( { CC.TRASH_SERVICE_KEY, CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), set(), inbox ) deleted_locations_manager = ClientMediaManagers.LocationsManager( set(), { CC.COMBINED_LOCAL_FILE_SERVICE_KEY }, set(), set(), inbox ) one_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'one' } } } ).Duplicate() two_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'two' } } } ).Duplicate() substantial_tags_manager = ClientMediaManagers.TagsManager( { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } }, { CC.DEFAULT_LOCAL_TAG_SERVICE_KEY : { HC.CONTENT_STATUS_CURRENT : { 'test tag', 'series:namespaced test tag' } } } ).Duplicate() empty_tags_manager = ClientMediaManagers.TagsManager( {}, {} ).Duplicate() one_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } ) two_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 0.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.6 } ) substantial_ratings_manager = ClientMediaManagers.RatingsManager( { TC.LOCAL_RATING_LIKE_SERVICE_KEY : 1.0, TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY : 0.8 } ) empty_ratings_manager = ClientMediaManagers.RatingsManager( {} ) notes_manager = ClientMediaManagers.NotesManager( {} ) file_viewing_stats_manager = ClientMediaManagers.FileViewingStatsManager.STATICGenerateEmptyManager() local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 1, local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager ) local_media_has_values = ClientMedia.MediaSingleton( media_result ) other_local_hash_has_values = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 2, other_local_hash_has_values, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, substantial_tags_manager, local_locations_manager, substantial_ratings_manager, notes_manager, file_viewing_stats_manager ) other_local_media_has_values = ClientMedia.MediaSingleton( media_result ) local_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 3, local_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, local_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager ) local_media_empty = ClientMedia.MediaSingleton( media_result ) trashed_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 4, trashed_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, trash_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager ) trashed_media_empty = ClientMedia.MediaSingleton( media_result ) deleted_hash_empty = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 5, deleted_hash_empty, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, empty_tags_manager, deleted_locations_manager, empty_ratings_manager, notes_manager, file_viewing_stats_manager ) deleted_media_empty = ClientMedia.MediaSingleton( media_result ) one_hash = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 6, one_hash, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, one_tags_manager, local_locations_manager, one_ratings_manager, notes_manager, file_viewing_stats_manager ) one_media = ClientMedia.MediaSingleton( media_result ) two_hash = HydrusData.GenerateKey() file_info_manager = ClientMediaManagers.FileInfoManager( 7, two_hash, size, mime, width, height, duration, num_frames, has_audio, num_words ) media_result = ClientMediaResult.MediaResult( file_info_manager, two_tags_manager, local_locations_manager, two_ratings_manager, notes_manager, file_viewing_stats_manager ) two_media = ClientMedia.MediaSingleton( media_result ) self._dump_and_load_and_test( duplicate_action_options_delete_and_move, test ) self._dump_and_load_and_test( duplicate_action_options_copy, test ) self._dump_and_load_and_test( duplicate_action_options_merge, test ) def assertSCUEqual( one, two ): self.assertEqual( TC.ConvertServiceKeysToContentUpdatesToComparable( one ), TC.ConvertServiceKeysToContentUpdatesToComparable( two ) ) file_deletion_reason = 'test delete' result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { local_hash_empty }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, trashed_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.TRASH_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { trashed_hash_empty }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, deleted_media_empty, delete_second = True, file_deletion_reason = file_deletion_reason ) self.assertEqual( result, {} ) result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_has_values, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_delete_and_move.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, delete_second = True, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'test tag', { other_local_hash_has_values } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_DELETE, ( 'series:namespaced test tag', { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( None, { other_local_hash_has_values } ) ) ] scu[ CC.LOCAL_FILE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_FILES, HC.CONTENT_UPDATE_DELETE, { other_local_hash_has_values }, reason = file_deletion_reason ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason ) self.assertEqual( result, {} ) result = duplicate_action_options_copy.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_has_values, local_media_empty, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( local_media_empty, other_local_media_has_values, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'test tag', { local_hash_empty } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'series:namespaced test tag', { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { local_hash_empty } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { local_hash_empty } ) ) ] assertSCUEqual( result, scu ) result = duplicate_action_options_merge.ProcessPairIntoContentUpdates( one_media, two_media, file_deletion_reason = file_deletion_reason ) scu = {} scu[ CC.DEFAULT_LOCAL_TAG_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'one', { two_hash } ) ), HydrusData.ContentUpdate( HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_ADD, ( 'two', { one_hash } ) ) ] scu[ TC.LOCAL_RATING_LIKE_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 1.0, { two_hash } ) ) ] scu[ TC.LOCAL_RATING_NUMERICAL_SERVICE_KEY ] = [ HydrusData.ContentUpdate( HC.CONTENT_TYPE_RATINGS, HC.CONTENT_UPDATE_ADD, ( 0.8, { two_hash } ) ) ] assertSCUEqual( result, scu ) def test_SERIALISABLE_TYPE_SHORTCUT( self ): def test( obj, dupe_obj ): self.assertEqual( dupe_obj.__hash__(), ( dupe_obj.shortcut_type, dupe_obj.shortcut_key, dupe_obj.shortcut_press_type, tuple( dupe_obj.modifiers ) ).__hash__() ) self.assertEqual( obj, dupe_obj ) shortcuts = [] shortcuts.append( ( ClientGUIShortcuts.Shortcut(), 'f7' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_SPACE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ), 'space' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'a' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'A' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+a' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_HOME, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+alt+home' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_LEFT, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ), 'left-click' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_MIDDLE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ), 'ctrl+middle-click' ) ) shortcuts.append( ( ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_SCROLL_DOWN, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_SHIFT ] ), 'alt+shift+scroll down' ) ) for ( shortcut, s ) in shortcuts: self._dump_and_load_and_test( shortcut, test ) self.assertEqual( shortcut.ToString(), s ) def test_SERIALISABLE_TYPE_SHORTCUT_SET( self ): def test( obj, dupe_obj ): for ( shortcut, command ) in obj: self.assertEqual( dupe_obj.GetCommand( shortcut ).GetData(), command.GetData() ) default_shortcuts = ClientDefaults.GetDefaultShortcuts() for shortcuts in default_shortcuts: self._dump_and_load_and_test( shortcuts, test ) command_1 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_SIMPLE, CAC.SIMPLE_ARCHIVE_FILE ) command_2 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( HydrusData.GenerateKey(), HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ) command_3 = CAC.ApplicationCommand( CAC.APPLICATION_COMMAND_TYPE_CONTENT, ( CC.DEFAULT_LOCAL_TAG_SERVICE_KEY, HC.CONTENT_TYPE_MAPPINGS, HC.CONTENT_UPDATE_FLIP, 'test' ) ) k_shortcut_1 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_SPACE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ) k_shortcut_2 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'a' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) k_shortcut_3 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_CHARACTER, ord( 'A' ), ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) k_shortcut_4 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_KEYBOARD_SPECIAL, ClientGUIShortcuts.SHORTCUT_KEY_SPECIAL_HOME, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) m_shortcut_1 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_LEFT, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [] ) m_shortcut_2 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_MIDDLE, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_CTRL ] ) m_shortcut_3 = ClientGUIShortcuts.Shortcut( ClientGUIShortcuts.SHORTCUT_TYPE_MOUSE, ClientGUIShortcuts.SHORTCUT_MOUSE_SCROLL_DOWN, ClientGUIShortcuts.SHORTCUT_PRESS_TYPE_PRESS, [ ClientGUIShortcuts.SHORTCUT_MODIFIER_ALT, ClientGUIShortcuts.SHORTCUT_MODIFIER_SHIFT ] ) shortcut_set = ClientGUIShortcuts.ShortcutSet( 'test' ) shortcut_set.SetCommand( k_shortcut_1, command_1 ) shortcut_set.SetCommand( k_shortcut_2, command_2 ) shortcut_set.SetCommand( k_shortcut_3, command_2 ) shortcut_set.SetCommand( k_shortcut_4, command_3 ) shortcut_set.SetCommand( m_shortcut_1, command_1 ) shortcut_set.SetCommand( m_shortcut_2, command_2 ) shortcut_set.SetCommand( m_shortcut_3, command_3 ) self._dump_and_load_and_test( shortcut_set, test ) self.assertEqual( shortcut_set.GetCommand( k_shortcut_1 ).GetData(), command_1.GetData() ) shortcut_set.SetCommand( k_shortcut_1, command_3 ) self.assertEqual( shortcut_set.GetCommand( k_shortcut_1 ).GetData(), command_3.GetData() ) def test_SERIALISABLE_TYPE_SUBSCRIPTION( self ): def test( obj, dupe_obj ): self.assertEqual( obj.GetName(), dupe_obj.GetName() ) self.assertEqual( obj._gug_key_and_name, dupe_obj._gug_key_and_name ) self.assertEqual( len( obj._query_headers ), len( dupe_obj._query_headers ) ) self.assertEqual( obj._initial_file_limit, dupe_obj._initial_file_limit ) self.assertEqual( obj._periodic_file_limit, dupe_obj._periodic_file_limit ) self.assertEqual( obj._paused, dupe_obj._paused ) self.assertEqual( obj._file_import_options.GetSerialisableTuple(), dupe_obj._file_import_options.GetSerialisableTuple() ) self.assertEqual( obj._tag_import_options.GetSerialisableTuple(), dupe_obj._tag_import_options.GetSerialisableTuple() ) self.assertEqual( obj._no_work_until, dupe_obj._no_work_until ) sub = ClientImportSubscriptions.Subscription( 'test sub' ) self._dump_and_load_and_test( sub, test ) gug_key_and_name = ( HydrusData.GenerateKey(), 'muh test gug' ) query_headers = [] q = ClientImportSubscriptionQuery.SubscriptionQueryHeader() q.SetQueryText( 'test query' ) query_headers.append( q ) q = ClientImportSubscriptionQuery.SubscriptionQueryHeader() q.SetQueryText( 'test query 2' ) query_headers.append( q ) checker_options = ClientImportOptions.CheckerOptions() initial_file_limit = 100 periodic_file_limit = 50 paused = False file_import_options = ClientImportOptions.FileImportOptions() service_tag_import_options = ClientImportOptions.ServiceTagImportOptions( get_tags = False, additional_tags = { 'test additional tag', 'and another' } ) tag_import_options = ClientImportOptions.TagImportOptions( service_keys_to_service_tag_import_options = { HydrusData.GenerateKey() : service_tag_import_options } ) no_work_until = HydrusData.GetNow() - 86400 * 20 sub.SetTuple( gug_key_and_name, checker_options, initial_file_limit, periodic_file_limit, paused, file_import_options, tag_import_options, no_work_until ) sub.SetQueryHeaders( query_headers ) self.assertEqual( sub.GetGUGKeyAndName(), gug_key_and_name ) self.assertEqual( sub.GetTagImportOptions(), tag_import_options ) self.assertEqual( sub.GetQueryHeaders(), query_headers ) self.assertEqual( sub._paused, False ) sub.PauseResume() self.assertEqual( sub._paused, True ) sub.PauseResume() self.assertEqual( sub._paused, False ) self._dump_and_load_and_test( sub, test ) def test_SERIALISABLE_TYPE_TAG_FILTER( self ): def test( obj, dupe_obj ): self.assertEqual( obj._tag_slices_to_rules, dupe_obj._tag_slices_to_rules ) tags = set() tags.add( 'title:test title' ) tags.add( 'series:neon genesis evangelion' ) tags.add( 'series:kill la kill' ) tags.add( 'smile' ) tags.add( 'blue eyes' ) tag_filter = ClientTags.TagFilter() self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), set() ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'series:neon genesis evangelion', 'series:kill la kill' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'series:kill la kill' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'smile', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:neon genesis evangelion', 'series:kill la kill' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( ':', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:kill la kill', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'series:kill la kill' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'series:', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'series:neon genesis evangelion', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'smile', 'blue eyes', 'title:test title', 'series:neon genesis evangelion' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( '', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'blue eyes', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( tags ), { 'title:test title', 'series:neon genesis evangelion', 'series:kill la kill', 'blue eyes' } ) blacklist_tags = { 'nintendo', 'studio:nintendo' } tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( blacklist_tags ), { 'studio:nintendo' } ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), set() ) tag_filter = ClientTags.TagFilter() tag_filter.SetRule( 'nintendo', CC.FILTER_BLACKLIST ) tag_filter.SetRule( 'studio:nintendo', CC.FILTER_WHITELIST ) self._dump_and_load_and_test( tag_filter, test ) self.assertEqual( tag_filter.Filter( blacklist_tags, apply_unnamespaced_rules_to_namespaced_tags = True ), { 'studio:nintendo' } )
true
true
f7002cf455ba55020c201c52fcb49a79f2741227
1,567
py
Python
src/plugins/sign/req.py
inuEbisu/inuBot
70bc139100a49983418a9fd7709f2f4899f0c6ea
[ "MIT" ]
null
null
null
src/plugins/sign/req.py
inuEbisu/inuBot
70bc139100a49983418a9fd7709f2f4899f0c6ea
[ "MIT" ]
null
null
null
src/plugins/sign/req.py
inuEbisu/inuBot
70bc139100a49983418a9fd7709f2f4899f0c6ea
[ "MIT" ]
null
null
null
import requests import json import time import random from . import conf, data, lang from inukit.timestamp import natural_date, natural_time, timestamp_now def is_same_day(ts1, ts2) -> bool: def d(ts): return natural_date(ts, '%Y-%m-%d') return d(ts1) == d(ts2) def handle_morning(qq): last_morning = data.get(qq, 'last_morning') last_night = data.get(qq, 'last_night') now = timestamp_now() if last_morning > last_night: msg = lang.no_sleep else: msg = lang.morning_success % ( natural_time(now - last_night) ) data.set(qq, 'last_morning', now) return msg def handle_night(qq): last_morning = data.get(qq, 'last_morning') last_night = data.get(qq, 'last_night') now = timestamp_now() if last_night > last_morning: msg = lang.no_getup else: data.set(qq, 'last_night', now) msg = lang.night_success % ( natural_time(now - last_morning) ) return msg def gen_sign_info(): rp = random.randint(1,100) return { "rp": rp } def handle_sign(qq): last_sign = data.get(qq, 'last_sign') now = timestamp_now() msg = '' if is_same_day(last_sign, now): info = data.get(qq, 'last_sign_info') msg = lang.already_sign else: msg = lang.sign_success info = gen_sign_info() data.set(qq, 'last_sign', now) data.set(qq, 'last_sign_info', info) msg += lang.sign % ( natural_date(last_sign), info['rp'] ) return msg
25.688525
70
0.603701
import requests import json import time import random from . import conf, data, lang from inukit.timestamp import natural_date, natural_time, timestamp_now def is_same_day(ts1, ts2) -> bool: def d(ts): return natural_date(ts, '%Y-%m-%d') return d(ts1) == d(ts2) def handle_morning(qq): last_morning = data.get(qq, 'last_morning') last_night = data.get(qq, 'last_night') now = timestamp_now() if last_morning > last_night: msg = lang.no_sleep else: msg = lang.morning_success % ( natural_time(now - last_night) ) data.set(qq, 'last_morning', now) return msg def handle_night(qq): last_morning = data.get(qq, 'last_morning') last_night = data.get(qq, 'last_night') now = timestamp_now() if last_night > last_morning: msg = lang.no_getup else: data.set(qq, 'last_night', now) msg = lang.night_success % ( natural_time(now - last_morning) ) return msg def gen_sign_info(): rp = random.randint(1,100) return { "rp": rp } def handle_sign(qq): last_sign = data.get(qq, 'last_sign') now = timestamp_now() msg = '' if is_same_day(last_sign, now): info = data.get(qq, 'last_sign_info') msg = lang.already_sign else: msg = lang.sign_success info = gen_sign_info() data.set(qq, 'last_sign', now) data.set(qq, 'last_sign_info', info) msg += lang.sign % ( natural_date(last_sign), info['rp'] ) return msg
true
true
f7002cfaaf2541347b111dee46c214adbb5b841c
3,413
py
Python
app/app/settings.py
hrusfandi/recipe-app-api
342ec9c30dc327218476a48ce4b65ba042dfe42b
[ "MIT" ]
null
null
null
app/app/settings.py
hrusfandi/recipe-app-api
342ec9c30dc327218476a48ce4b65ba042dfe42b
[ "MIT" ]
null
null
null
app/app/settings.py
hrusfandi/recipe-app-api
342ec9c30dc327218476a48ce4b65ba042dfe42b
[ "MIT" ]
null
null
null
""" Django settings for app project. Generated by 'django-admin startproject' using Django 2.1.15. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'lxb!(o00)qtw0p+6q_vs$01&wtsw(m*s!ol0_6^v*flo^!&ek&' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'core', 'user', 'recipe', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'app.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'app.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'HOST': os.environ.get('DB_HOST'), 'NAME': os.environ.get('DB_NAME'), 'USER': os.environ.get('DB_USER'), 'PASSWORD': os.environ.get('DB_PASS'), } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' MEDIA_URL = '/media/' MEDIA_ROOT = '/vol/web/media' STATIC_ROOT = '/vol/web/static' AUTH_USER_MODEL = 'core.User'
25.281481
91
0.685028
import os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = 'lxb!(o00)qtw0p+6q_vs$01&wtsw(m*s!ol0_6^v*flo^!&ek&' DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'core', 'user', 'recipe', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'app.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'app.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'HOST': os.environ.get('DB_HOST'), 'NAME': os.environ.get('DB_NAME'), 'USER': os.environ.get('DB_USER'), 'PASSWORD': os.environ.get('DB_PASS'), } } # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' MEDIA_URL = '/media/' MEDIA_ROOT = '/vol/web/media' STATIC_ROOT = '/vol/web/static' AUTH_USER_MODEL = 'core.User'
true
true
f7002dc942023defc41282a811fecb1719c2a9bf
25,241
py
Python
tests/blackbox/scopes/test_bb_scopes_updates.py
airencracken/waflz
00eef42258dd98d0cbc71102061e4951f456803f
[ "Apache-2.0" ]
null
null
null
tests/blackbox/scopes/test_bb_scopes_updates.py
airencracken/waflz
00eef42258dd98d0cbc71102061e4951f456803f
[ "Apache-2.0" ]
null
null
null
tests/blackbox/scopes/test_bb_scopes_updates.py
airencracken/waflz
00eef42258dd98d0cbc71102061e4951f456803f
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 '''Test config updates ''' # ------------------------------------------------------------------------------ # Imports # ------------------------------------------------------------------------------ import subprocess import os import json import time import datetime import requests import pytest # ------------------------------------------------------------------------------ # Constants # ------------------------------------------------------------------------------ G_TEST_HOST = 'http://127.0.0.1:12345' # ------------------------------------------------------------------------------ # run_command # ------------------------------------------------------------------------------ def run_command(command): p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() return (p.returncode, stdout, stderr) # ------------------------------------------------------------------------------ # setup scopez server in action mode # ------------------------------------------------------------------------------ @pytest.fixture() def setup_scopez_server_action(): # ------------------------------------------------------ # setup # ------------------------------------------------------ l_file_path = os.path.dirname(os.path.abspath(__file__)) l_geoip2city_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-City.mmdb')) l_geoip2ISP_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-ASN.mmdb')) l_conf_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf')) l_ruleset_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/ruleset')) l_scopez_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes')) l_an_list = os.path.realpath(os.path.join(l_file_path, '../../data/an/an-scopes.json')) l_scopez_server_path = os.path.abspath(os.path.join(l_file_path, '../../../build/util/scopez_server/scopez_server')) l_bot_challenge = os.path.realpath(os.path.join(l_file_path, '../../data/bot/bot-challenges.json')) l_subproc = subprocess.Popen([l_scopez_server_path, '-d', l_conf_dir, '-S', l_scopez_dir, '-l', l_an_list, '-r', l_ruleset_path, '-g', l_geoip2city_path, '-i', l_geoip2ISP_path, '-c', l_bot_challenge, '-a' ]) print('cmd: {}'.format(' '.join([l_scopez_server_path, '-d', l_conf_dir, '-S', l_scopez_dir, '-l', l_an_list, '-r', l_ruleset_path, '-g', l_geoip2city_path, '-i', l_geoip2ISP_path, '-c', l_bot_challenge, '-a']))) # '-b']))) time.sleep(1) # ------------------------------------------------------ # yield... # ------------------------------------------------------ yield setup_scopez_server_action # ------------------------------------------------------ # tear down # ------------------------------------------------------ _, _, _ = run_command('kill -9 %d'%(l_subproc.pid)) time.sleep(0.5) def test_acl_config_update(setup_scopez_server_action): ''' update acl config 0050-ZrLf2KkQ - remove gizoogle from user agent black list and test if request returns 200 ''' # ------------------------------------------------------ # test an 0050 with user-agent acl 'gizoogle' in the # request # ------------------------------------------------------ l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'gizoogle', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is acl custom response\n' #------------------------------------------------------- # load acl config and remove gizoogle from blacklist # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_acl_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/acl/0050-ZrLf2KkQ.acl.json')) try: with open(l_acl_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_acl_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['user_agent']['blacklist'] = [] l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') # ------------------------------------------------------ # post/update acl conf # ------------------------------------------------------ l_url = '%s/update_acl'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 # ------------------------------------------------------ # blacklist should have been updated and should get 200 #------------------------------------------------------- l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'gizoogle', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 def test_rules_config_update(setup_scopez_server_action): ''' update rules config 0050-ZrLf3KKq.rules.json - change user agent to Donkeez from Monkeez ''' # ------------------------------------------------------ # test an 0050 with user-agent 'Monkeez' in the # request # ------------------------------------------------------ l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'monkeez', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' #------------------------------------------------------- # load rules config and changes monkeez to donkeez in # custom rules # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_rules_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/rules/0050-ZrLf3KkQ.rules.json')) try: with open(l_rules_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_file_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['directive'][1]['sec_rule']['operator']['value'] = 'donkeez' l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') # ------------------------------------------------------ # post/update rules conf # ------------------------------------------------------ l_url = '%s/update_rules'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 # ------------------------------------------------------ # test again with user-agent 'Monkeez' in the # request. It should pass # ------------------------------------------------------ l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'monkeez', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 # ------------------------------------------------------ # test with user-agent 'donkeez' in the # request. should be blocked # ------------------------------------------------------ l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'donkeez', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' def test_profile_config_update(setup_scopez_server_action): ''' update profile config 0050-YrLf3KkQ.wafprof.json - change ignore_query_args to test from ignore ''' # ------------------------------------------------------ # test an 0050 with sql injection # ------------------------------------------------------ l_uri = G_TEST_HOST+'/profile.html?a=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is profile custom response\n' # ------------------------------------------------------ # test an 0050 with sql injection and query_args "ignore" # ------------------------------------------------------ l_uri = G_TEST_HOST+'/profile.html?ignore=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 #------------------------------------------------------- # load profile config and change "ignore_query_args" # to "test" # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_profile_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/profile/0050-YrLf3KkQ.wafprof.json')) try: with open(l_profile_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_profile_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf["general_settings"]["ignore_query_args"] = ["test"] l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') # ------------------------------------------------------ # post/update profile conf # ------------------------------------------------------ l_url = '%s/update_profile'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 # ------------------------------------------------------ # test an 0050 with sql injection and query_args "ignore" # should get 403 # ------------------------------------------------------ l_uri = G_TEST_HOST+'/profile.html?ignore=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is profile custom response\n' # ------------------------------------------------------ # test an 0050 with sql injection and query_args "test" # sql injection should be ignored and get 200 # ------------------------------------------------------ l_uri = G_TEST_HOST+'/profile.html?test=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 def test_limit_config_update(setup_scopez_server_action): # ------------------------------------------------------ # Make 3 request in 2 sec for 3rd and # 4th scope. Third request should get rate limited # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'limit.com', 'waf-scopes-id': '0050'} for _ in range(2): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is ddos custom response\n' l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'test.limit.com', 'waf-scopes-id': '0050'} for _ in range(2): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'custom response for limits from limit_id_2\n' # ------------------------------------------------------ # sleep for 2 seconds. Enforcements should expire # ------------------------------------------------------ time.sleep(2) #------------------------------------------------------- # load limit config and change duration_sec to 3 # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_limit_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/limit/0050-MjMhNXMR.limit.json')) try: with open(l_limit_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_limit_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf["num"] = 3 l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') #------------------------------------------------------- # POST conf # ------------------------------------------------------ l_url = '%s/update_limit'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 # ------------------------------------------------------ # Make 4 request in 2 sec. fourth request should get # rate limited. Third request shouldn't be blocked # because of the update # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'limit.com', 'waf-scopes-id': '0050'} for _ in range(3): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is ddos custom response\n' # ------------------------------------------------------ # Make 4 request in 2 sec for fourth scope. # verify if 4th scope was also updated # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'test.limit.com', 'waf-scopes-id': '0050'} for _ in range(3): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'custom response for limits from limit_id_2\n' def test_scopes_update(setup_scopez_server_action): #------------------------------------------------------- # check second scope for AN 0051 working correctly # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'www.regexhost.com', 'waf-scopes-id':'0051', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is from RX scope\n' #------------------------------------------------------- # change the 'path' value for scope and update. # check if update was successful # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_scopes_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes/0051.scopes.json')) try: with open(l_scopes_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_scopes_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['scopes'][1]['path']['value'] = ".*/test.html" l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') #------------------------------------------------------- # POST conf # ------------------------------------------------------ l_url = '%s/update_scopes'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 #------------------------------------------------------- # make a request with same path '/path.html', # should match GLOB scope # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'www.regexhost.com', 'waf-scopes-id':'0051', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is from GLOB scope\n' #------------------------------------------------------- # make a request with updated path '/test.html', # should get 403 with custom response # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'www.regexhost.com', 'waf-scopes-id':'0051', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is from RX scope\n' def test_scopes_linkage_update(setup_scopez_server_action): """ Test linkage update. Update rules config in second scope (0050-scopes.json) to 0050-0gG8osWJ.rules.json from 0050-ZrLf3KkQ.rules.json check if update worked """ #------------------------------------------------------- # check second scope for AN 0050 working correctly # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'test.com', 'waf-scopes-id':'0050', 'User-Agent': 'monkeez'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' #------------------------------------------------------- # change the 'rules_prod_id' value for second scope # and update. # check if update was successful # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_scopes_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes/0050.scopes.json')) try: with open(l_scopes_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_scopes_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['scopes'][1]['rules_prod_id'] = "0gG8osWJ" l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') #------------------------------------------------------- # POST conf # ------------------------------------------------------ l_url = '%s/update_scopes'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 #------------------------------------------------------- # make the same request. should get 200 # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'test.com', 'waf-scopes-id':'0050', 'User-Agent': 'monkeez'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 #assert l_r.text == 'This is from GLOB scope\n' #------------------------------------------------------- # make a request with user-agent bananas # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'test.com', 'waf-scopes-id':'0050', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' # ------------------------------------------------------------------------------ # test /update_bots endpoint # ------------------------------------------------------------------------------ def test_update_bots_endpoint(setup_scopez_server_action): l_url = G_TEST_HOST + '/update_bots' l_file_path = os.path.dirname(os.path.abspath(__file__)) l_test_file = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/bots/0052-wHyMHxV7.bots.json')) l_test_payload = '' # ------------------------------------------------------ # check setup # ------------------------------------------------------ assert os.path.exists(l_test_file), 'test file not found!' # ------------------------------------------------------ # slurp test file # ------------------------------------------------------ with open(l_test_file) as l_tf: l_test_payload = l_tf.read() # ------------------------------------------------------ # check setup # ------------------------------------------------------ assert l_test_payload, 'payload is empty!' l_json_payload = json.loads(l_test_payload) # ------------------------------------------------------ # Check that challenge works # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'mybot.com', 'user-agent': 'bot-testing', 'waf-scopes-id': '0052'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 401 # ------------------------------------------------------ # Update the bot config # ------------------------------------------------------ l_json_payload['directive'][0]['sec_rule']['operator']['value'] = 'chowdah' # ------------------------------------------------------ # update the timestamp, else it will silently do nothing and return 200 # ref: scopes.cc:load_bots (compare time) # ------------------------------------------------------ l_json_payload['last_modified_date'] = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') l_result = requests.post(l_url, timeout=3, json=l_json_payload) assert l_result.status_code == 200 assert l_result.json()['status'] == 'success' # ------------------------------------------------------ # Expect 200 # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'mybot.com', 'user-agent': 'bot-testing', 'waf-scopes-id': '0052'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200,\ "expecting 200, got {resp_code} since user-agent changed to chowdah".format(resp_code=l_r.status_code) # ------------------------------------------------------ # Expect 401 due to new UA # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'mybot.com', 'user-agent': 'chowdah', 'waf-scopes-id': '0052'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 401,\ "expecting 401, got {resp_code} since user-agent changed to chowdah".format(resp_code=l_r.status_code) # ------------------------------------------------------ # check negative test - missing customer_id field # ------------------------------------------------------ l_cust_id = l_json_payload.pop('customer_id') l_n2_result = requests.post(l_url, json=l_json_payload) assert l_n2_result.status_code == 500,\ 'expected 500 since customer_id {} is removed'.format(l_cust_id)
47.804924
127
0.465909
import subprocess import os import json import time import datetime import requests import pytest G_TEST_HOST = 'http://127.0.0.1:12345' def run_command(command): p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() return (p.returncode, stdout, stderr) @pytest.fixture() def setup_scopez_server_action(): l_file_path = os.path.dirname(os.path.abspath(__file__)) l_geoip2city_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-City.mmdb')) l_geoip2ISP_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/db/GeoLite2-ASN.mmdb')) l_conf_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf')) l_ruleset_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/ruleset')) l_scopez_dir = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes')) l_an_list = os.path.realpath(os.path.join(l_file_path, '../../data/an/an-scopes.json')) l_scopez_server_path = os.path.abspath(os.path.join(l_file_path, '../../../build/util/scopez_server/scopez_server')) l_bot_challenge = os.path.realpath(os.path.join(l_file_path, '../../data/bot/bot-challenges.json')) l_subproc = subprocess.Popen([l_scopez_server_path, '-d', l_conf_dir, '-S', l_scopez_dir, '-l', l_an_list, '-r', l_ruleset_path, '-g', l_geoip2city_path, '-i', l_geoip2ISP_path, '-c', l_bot_challenge, '-a' ]) print('cmd: {}'.format(' '.join([l_scopez_server_path, '-d', l_conf_dir, '-S', l_scopez_dir, '-l', l_an_list, '-r', l_ruleset_path, '-g', l_geoip2city_path, '-i', l_geoip2ISP_path, '-c', l_bot_challenge, '-a']))) time.sleep(1) yield setup_scopez_server_action _, _, _ = run_command('kill -9 %d'%(l_subproc.pid)) time.sleep(0.5) def test_acl_config_update(setup_scopez_server_action): l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'gizoogle', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is acl custom response\n' l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_acl_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/acl/0050-ZrLf2KkQ.acl.json')) try: with open(l_acl_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_acl_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['user_agent']['blacklist'] = [] l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') l_url = '%s/update_acl'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'gizoogle', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 def test_rules_config_update(setup_scopez_server_action): l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'monkeez', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_rules_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/rules/0050-ZrLf3KkQ.rules.json')) try: with open(l_rules_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_file_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['directive'][1]['sec_rule']['operator']['value'] = 'donkeez' l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') l_url = '%s/update_rules'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'monkeez', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_uri = G_TEST_HOST l_headers = {'host': 'monkeez.com', 'user-agent': 'donkeez', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' def test_profile_config_update(setup_scopez_server_action): l_uri = G_TEST_HOST+'/profile.html?a=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is profile custom response\n' l_uri = G_TEST_HOST+'/profile.html?ignore=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_profile_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/profile/0050-YrLf3KkQ.wafprof.json')) try: with open(l_profile_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_profile_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf["general_settings"]["ignore_query_args"] = ["test"] l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') l_url = '%s/update_profile'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 l_uri = G_TEST_HOST+'/profile.html?ignore=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is profile custom response\n' l_uri = G_TEST_HOST+'/profile.html?test=%27select%20*%20from%20testing%27' l_headers = {'host': 'monkeez.com', 'waf-scopes-id': '0050'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 def test_limit_config_update(setup_scopez_server_action): l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'limit.com', 'waf-scopes-id': '0050'} for _ in range(2): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is ddos custom response\n' l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'test.limit.com', 'waf-scopes-id': '0050'} for _ in range(2): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'custom response for limits from limit_id_2\n' time.sleep(2) l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_limit_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/limit/0050-MjMhNXMR.limit.json')) try: with open(l_limit_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_limit_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf["num"] = 3 l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') l_url = '%s/update_limit'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json', 'waf-scopes-id': '0050'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 # because of the update # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'limit.com', 'waf-scopes-id': '0050'} for _ in range(3): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is ddos custom response\n' # ------------------------------------------------------ # Make 4 request in 2 sec for fourth scope. # verify if 4th scope was also updated # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'test.limit.com', 'waf-scopes-id': '0050'} for _ in range(3): l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'custom response for limits from limit_id_2\n' def test_scopes_update(setup_scopez_server_action): #------------------------------------------------------- # check second scope for AN 0051 working correctly # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'www.regexhost.com', 'waf-scopes-id':'0051', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is from RX scope\n' #------------------------------------------------------- # change the 'path' value for scope and update. # check if update was successful # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_scopes_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes/0051.scopes.json')) try: with open(l_scopes_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_scopes_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['scopes'][1]['path']['value'] = ".*/test.html" l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') #------------------------------------------------------- # POST conf # ------------------------------------------------------ l_url = '%s/update_scopes'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 #------------------------------------------------------- # make a request with same path '/path.html', # should match GLOB scope # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'www.regexhost.com', 'waf-scopes-id':'0051', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is from GLOB scope\n' #------------------------------------------------------- # make a request with updated path '/test.html', # should get 403 with custom response # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'www.regexhost.com', 'waf-scopes-id':'0051', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is from RX scope\n' def test_scopes_linkage_update(setup_scopez_server_action): #------------------------------------------------------- # check second scope for AN 0050 working correctly # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'test.com', 'waf-scopes-id':'0050', 'User-Agent': 'monkeez'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' #------------------------------------------------------- # change the 'rules_prod_id' value for second scope # and update. # check if update was successful # ------------------------------------------------------ l_conf = {} l_file_path = os.path.dirname(os.path.abspath(__file__)) l_scopes_conf_path = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/scopes/0050.scopes.json')) try: with open(l_scopes_conf_path) as l_f: l_conf = json.load(l_f) except Exception as l_e: print('error opening config file: %s. Reason: %s error: %s, doc: %s' % ( l_scopes_conf_path, type(l_e), l_e, l_e.__doc__)) assert False l_conf['scopes'][1]['rules_prod_id'] = "0gG8osWJ" l_conf['last_modified_date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%fZ') #------------------------------------------------------- # POST conf # ------------------------------------------------------ l_url = '%s/update_scopes'%(G_TEST_HOST) l_headers = {'Content-Type': 'application/json'} l_r = requests.post(l_url, headers=l_headers, data=json.dumps(l_conf)) assert l_r.status_code == 200 #------------------------------------------------------- # make the same request. should get 200 # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'test.com', 'waf-scopes-id':'0050', 'User-Agent': 'monkeez'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200 #assert l_r.text == 'This is from GLOB scope\n' #------------------------------------------------------- # make a request with user-agent bananas # ------------------------------------------------------ l_uri = G_TEST_HOST+'/path.html' l_headers = {'host': 'test.com', 'waf-scopes-id':'0050', 'User-Agent': 'bananas'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 403 assert l_r.text == 'This is rules custom response\n' # ------------------------------------------------------------------------------ # test /update_bots endpoint # ------------------------------------------------------------------------------ def test_update_bots_endpoint(setup_scopez_server_action): l_url = G_TEST_HOST + '/update_bots' l_file_path = os.path.dirname(os.path.abspath(__file__)) l_test_file = os.path.realpath(os.path.join(l_file_path, '../../data/waf/conf/bots/0052-wHyMHxV7.bots.json')) l_test_payload = '' # ------------------------------------------------------ # check setup # ------------------------------------------------------ assert os.path.exists(l_test_file), 'test file not found!' # ------------------------------------------------------ # slurp test file # ------------------------------------------------------ with open(l_test_file) as l_tf: l_test_payload = l_tf.read() # ------------------------------------------------------ # check setup # ------------------------------------------------------ assert l_test_payload, 'payload is empty!' l_json_payload = json.loads(l_test_payload) # ------------------------------------------------------ # Check that challenge works # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'mybot.com', 'user-agent': 'bot-testing', 'waf-scopes-id': '0052'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 401 # ------------------------------------------------------ # Update the bot config # ------------------------------------------------------ l_json_payload['directive'][0]['sec_rule']['operator']['value'] = 'chowdah' # ------------------------------------------------------ # update the timestamp, else it will silently do nothing and return 200 # ref: scopes.cc:load_bots (compare time) # ------------------------------------------------------ l_json_payload['last_modified_date'] = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') l_result = requests.post(l_url, timeout=3, json=l_json_payload) assert l_result.status_code == 200 assert l_result.json()['status'] == 'success' # ------------------------------------------------------ # Expect 200 # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'mybot.com', 'user-agent': 'bot-testing', 'waf-scopes-id': '0052'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 200,\ "expecting 200, got {resp_code} since user-agent changed to chowdah".format(resp_code=l_r.status_code) # ------------------------------------------------------ # Expect 401 due to new UA # ------------------------------------------------------ l_uri = G_TEST_HOST+'/test.html' l_headers = {'host': 'mybot.com', 'user-agent': 'chowdah', 'waf-scopes-id': '0052'} l_r = requests.get(l_uri, headers=l_headers) assert l_r.status_code == 401,\ "expecting 401, got {resp_code} since user-agent changed to chowdah".format(resp_code=l_r.status_code) # ------------------------------------------------------ # check negative test - missing customer_id field # ------------------------------------------------------ l_cust_id = l_json_payload.pop('customer_id') l_n2_result = requests.post(l_url, json=l_json_payload) assert l_n2_result.status_code == 500,\ 'expected 500 since customer_id {} is removed'.format(l_cust_id)
true
true
f7002eb930b1638b61045d541b7e5cad48ca6680
1,408
py
Python
python/setup.py
tohuynh/king-county
50147256f73bd42b1967c8e9ce15cba10a08a72a
[ "MIT" ]
null
null
null
python/setup.py
tohuynh/king-county
50147256f73bd42b1967c8e9ce15cba10a08a72a
[ "MIT" ]
7
2022-01-06T04:34:59.000Z
2022-03-10T03:48:06.000Z
python/setup.py
tohuynh/king-county
50147256f73bd42b1967c8e9ce15cba10a08a72a
[ "MIT" ]
1
2022-01-07T21:40:24.000Z
2022-01-07T21:40:24.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """The setup script.""" from setuptools import find_packages, setup test_requirements = [ "black>=19.10b0", "flake8>=3.8.3", "flake8-debugger>=3.2.1", ] dev_requirements = [ *test_requirements, "wheel>=0.34.2", ] requirements = [ "cdp-backend[pipeline]==3.0.2", "cdp-scrapers[king_county]>=0.3.2", ] extra_requirements = { "test": test_requirements, "dev": dev_requirements, "all": [ *requirements, *dev_requirements, ], } setup( author="JacksonMaxfield", classifiers=[ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3.9", ], description="Package containing the gather functions for Example.", install_requires=requirements, license="MIT license", long_description_content_type="text/markdown", include_package_data=True, keywords="civic technology, open government", name="cdp-king_county-backend", packages=find_packages(exclude=["tests", "*.tests", "*.tests.*"]), python_requires=">=3.9", tests_require=test_requirements, extras_require=extra_requirements, url="https://github.com/CouncilDataProject/king-county", version="1.0.0", zip_safe=False, )
24.701754
71
0.642045
from setuptools import find_packages, setup test_requirements = [ "black>=19.10b0", "flake8>=3.8.3", "flake8-debugger>=3.2.1", ] dev_requirements = [ *test_requirements, "wheel>=0.34.2", ] requirements = [ "cdp-backend[pipeline]==3.0.2", "cdp-scrapers[king_county]>=0.3.2", ] extra_requirements = { "test": test_requirements, "dev": dev_requirements, "all": [ *requirements, *dev_requirements, ], } setup( author="JacksonMaxfield", classifiers=[ "Development Status :: 2 - Pre-Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Programming Language :: Python :: 3.9", ], description="Package containing the gather functions for Example.", install_requires=requirements, license="MIT license", long_description_content_type="text/markdown", include_package_data=True, keywords="civic technology, open government", name="cdp-king_county-backend", packages=find_packages(exclude=["tests", "*.tests", "*.tests.*"]), python_requires=">=3.9", tests_require=test_requirements, extras_require=extra_requirements, url="https://github.com/CouncilDataProject/king-county", version="1.0.0", zip_safe=False, )
true
true
f7002fa28c4f96c4ce9de895ed3dc6923730e7d5
705
py
Python
scrapy_proj/openrecipes/spiders/elanaspantry_feedspider.py
fictivekin/openrecipes
82b5c080168439b328f76a115aa2011fa4601384
[ "Apache-2.0" ]
300
2015-01-05T05:37:34.000Z
2022-03-05T16:24:37.000Z
scrapy_proj/openrecipes/spiders/elanaspantry_feedspider.py
fictivekin/openrecipes
82b5c080168439b328f76a115aa2011fa4601384
[ "Apache-2.0" ]
11
2015-05-14T04:15:22.000Z
2018-01-27T17:22:32.000Z
scrapy_proj/openrecipes/spiders/elanaspantry_feedspider.py
fictivekin/openrecipes
82b5c080168439b328f76a115aa2011fa4601384
[ "Apache-2.0" ]
100
2015-01-11T23:14:29.000Z
2022-03-25T06:03:48.000Z
from scrapy.spider import BaseSpider from scrapy.http import Request from scrapy.selector import XmlXPathSelector from openrecipes.spiders.elanaspantry_spider import ElanaspantryMixin class ElanaspantryfeedSpider(BaseSpider, ElanaspantryMixin): name = "elanaspantry.feed" allowed_domains = [ "www.elanaspantry.com", "feeds.feedburner.com", "feedproxy.google.com", ] start_urls = [ "http://feeds.feedburner.com/elanaspantry", ] def parse(self, response): xxs = XmlXPathSelector(response) links = xxs.select("//item/*[local-name()='origLink']/text()").extract() return [Request(x, callback=self.parse_item) for x in links]
30.652174
80
0.695035
from scrapy.spider import BaseSpider from scrapy.http import Request from scrapy.selector import XmlXPathSelector from openrecipes.spiders.elanaspantry_spider import ElanaspantryMixin class ElanaspantryfeedSpider(BaseSpider, ElanaspantryMixin): name = "elanaspantry.feed" allowed_domains = [ "www.elanaspantry.com", "feeds.feedburner.com", "feedproxy.google.com", ] start_urls = [ "http://feeds.feedburner.com/elanaspantry", ] def parse(self, response): xxs = XmlXPathSelector(response) links = xxs.select("//item/*[local-name()='origLink']/text()").extract() return [Request(x, callback=self.parse_item) for x in links]
true
true
f70030098f7ff150f5b43cdb085cc72adcf3e29e
3,676
py
Python
AlyMoly/venta/models.py
CreceLibre/alymoly
1420e305f41301b8548dfbbabfc64330b74403be
[ "MIT" ]
null
null
null
AlyMoly/venta/models.py
CreceLibre/alymoly
1420e305f41301b8548dfbbabfc64330b74403be
[ "MIT" ]
null
null
null
AlyMoly/venta/models.py
CreceLibre/alymoly
1420e305f41301b8548dfbbabfc64330b74403be
[ "MIT" ]
null
null
null
#!/usr/bin/env python #-*- encoding: UTF-8 -*- ############################################### # Todos los derechos reservados a: # # CreceLibre Consultores en Tecnologías Ltda. # # # # ©Milton Inostroza Aguilera # # [email protected] # # 2009 # ############################################### from django.db import models from AlyMoly.mantenedor.models import Producto, Promocion, Trabajador class Turno(models.Model): """ estado: 1 --> abierto 2 --> cerrado """ fecha_apertura_sistema = models.DateTimeField() fecha_cierre_sistema = models.DateTimeField(null=True, blank=True) estado = models.IntegerField(default=1, blank=True) trabajador = models.ForeignKey(Trabajador, blank=True) monto_apertura_caja = models.IntegerField(default=0) monto_cierre_calculado = models.IntegerField(default=0, blank=True) monto_afecto = models.IntegerField(default=0, blank=True) monto_exento = models.IntegerField(default=0, blank=True) def monto_cierre_informado(self): return self.boletadeposito.total def estado_turno(self): if self.estado == 1: return "Abierto" else: return "Cerrado" def save(self, force_insert=False, force_update=False): """ Al guardar un turno abierto se verifica que el trabajador ya no cuente con un turno abierto anteriormente. """ if self.estado == 1 and len(Turno.objects.exclude(id=self.id).filter(trabajador__id=self.trabajador.id).filter(estado=1)) > 0: raise Exception(u"Usted ya cuenta con un turno abierto.") super(Turno, self).save(force_insert, force_update) class BoletaDeposito(models.Model): turno = models.OneToOneField(Turno, blank=True) veintemil = models.PositiveIntegerField(default=0, blank=True) diezmil = models.PositiveIntegerField(default=0, blank=True) cincomil = models.PositiveIntegerField(default=0, blank=True) dosmil = models.PositiveIntegerField(default=0, blank=True) mil = models.PositiveIntegerField(default=0, blank=True) quinientos = models.PositiveIntegerField(default=0, blank=True) cien = models.PositiveIntegerField(default=0, blank=True) cincuenta = models.PositiveIntegerField(default=0, blank=True) diez = models.PositiveIntegerField(default=0, blank=True) tarjetas = models.PositiveIntegerField(default=0, blank=True) otros = models.PositiveIntegerField(default=0, blank=True) total = models.PositiveIntegerField(default=0, blank=True) class Venta(models.Model): """ medio_pago: 1 --> efectivo 2 --> otro """ fecha_venta = models.DateTimeField() folio_boleta = models.PositiveIntegerField(null=True, blank=True) monto_total = models.PositiveIntegerField() monto_afecto = models.PositiveIntegerField() monto_exento = models.PositiveIntegerField() cantidad_productos = models.PositiveIntegerField() medio_pago = models.PositiveIntegerField() monto_pago = models.PositiveIntegerField(null=True) turno = models.ForeignKey('Turno') def __unicode__(self): return u"%s-%s" % (self.id, self.folio_boleta) class LineaDetalle(models.Model): cantidad = models.IntegerField() precio_venta = models.IntegerField() precio_venta_total = models.IntegerField() producto = models.ForeignKey(Producto, null=True, blank=True) promocion = models.ForeignKey(Promocion, null=True, blank=True) venta = models.ForeignKey('Venta')
39.526882
134
0.661045
from django.db import models from AlyMoly.mantenedor.models import Producto, Promocion, Trabajador class Turno(models.Model): fecha_apertura_sistema = models.DateTimeField() fecha_cierre_sistema = models.DateTimeField(null=True, blank=True) estado = models.IntegerField(default=1, blank=True) trabajador = models.ForeignKey(Trabajador, blank=True) monto_apertura_caja = models.IntegerField(default=0) monto_cierre_calculado = models.IntegerField(default=0, blank=True) monto_afecto = models.IntegerField(default=0, blank=True) monto_exento = models.IntegerField(default=0, blank=True) def monto_cierre_informado(self): return self.boletadeposito.total def estado_turno(self): if self.estado == 1: return "Abierto" else: return "Cerrado" def save(self, force_insert=False, force_update=False): if self.estado == 1 and len(Turno.objects.exclude(id=self.id).filter(trabajador__id=self.trabajador.id).filter(estado=1)) > 0: raise Exception(u"Usted ya cuenta con un turno abierto.") super(Turno, self).save(force_insert, force_update) class BoletaDeposito(models.Model): turno = models.OneToOneField(Turno, blank=True) veintemil = models.PositiveIntegerField(default=0, blank=True) diezmil = models.PositiveIntegerField(default=0, blank=True) cincomil = models.PositiveIntegerField(default=0, blank=True) dosmil = models.PositiveIntegerField(default=0, blank=True) mil = models.PositiveIntegerField(default=0, blank=True) quinientos = models.PositiveIntegerField(default=0, blank=True) cien = models.PositiveIntegerField(default=0, blank=True) cincuenta = models.PositiveIntegerField(default=0, blank=True) diez = models.PositiveIntegerField(default=0, blank=True) tarjetas = models.PositiveIntegerField(default=0, blank=True) otros = models.PositiveIntegerField(default=0, blank=True) total = models.PositiveIntegerField(default=0, blank=True) class Venta(models.Model): fecha_venta = models.DateTimeField() folio_boleta = models.PositiveIntegerField(null=True, blank=True) monto_total = models.PositiveIntegerField() monto_afecto = models.PositiveIntegerField() monto_exento = models.PositiveIntegerField() cantidad_productos = models.PositiveIntegerField() medio_pago = models.PositiveIntegerField() monto_pago = models.PositiveIntegerField(null=True) turno = models.ForeignKey('Turno') def __unicode__(self): return u"%s-%s" % (self.id, self.folio_boleta) class LineaDetalle(models.Model): cantidad = models.IntegerField() precio_venta = models.IntegerField() precio_venta_total = models.IntegerField() producto = models.ForeignKey(Producto, null=True, blank=True) promocion = models.ForeignKey(Promocion, null=True, blank=True) venta = models.ForeignKey('Venta')
true
true
f700308f76753f938a995240fe09d0f4b13796ba
5,919
py
Python
examples/gb1/train_oracle.py
ayushkarnawat/profit
f3c4d601078b52513af6832c3faf75ddafc59ac5
[ "MIT" ]
null
null
null
examples/gb1/train_oracle.py
ayushkarnawat/profit
f3c4d601078b52513af6832c3faf75ddafc59ac5
[ "MIT" ]
1
2021-09-15T13:13:12.000Z
2021-09-15T13:13:12.000Z
examples/gb1/train_oracle.py
ayushkarnawat/profit
f3c4d601078b52513af6832c3faf75ddafc59ac5
[ "MIT" ]
null
null
null
"""Train (basic) densely-connected oracle.""" import os import time import multiprocessing as mp import pandas as pd import torch from torch import optim from torch.utils.data import DataLoader, Subset, TensorDataset, WeightedRandomSampler from profit.dataset.splitters import split_method_dict from profit.models.torch import SequenceOracle from profit.utils.data_utils.tokenizers import AminoAcidTokenizer from profit.utils.training_utils.torch import losses as L from profit.utils.training_utils.torch.callbacks import ModelCheckpoint from profit.utils.training_utils.torch.callbacks import EarlyStopping from examples.gb1.data import load_dataset timestep = time.strftime("%Y-%b-%d-%H:%M:%S", time.gmtime()) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") tensor = torch.cuda.FloatTensor if torch.cuda.is_available() else torch.Tensor splits = ["train", "valid"] # Preprocess + load the dataset dataset = load_dataset("lstm", "primary", labels="Fitness", num_data=-1, filetype="mdb", as_numpy=False, vocab="aa20") # Stratify train/val/test sets s.t. the target labels are equally represented in # each subset. Each subset will have the same ratio of low/mid/high variants in # each batch as the full dataset. See: https://discuss.pytorch.org/t/29907/2 _dataset = dataset[:]["arr_0"] _labels = dataset[:]["arr_1"].view(-1) # # Remove samples below a certain threshold # high_idx = torch.where(_labels > _labels.mean()) # dataset = Subset(dataset, sorted(high_idx)) # _dataset = _dataset[high_idx] # _labels = _labels[high_idx] # Compute sample weights (each sample should get its own weight) def sampler(labels: torch.Tensor, nbins: int = 10, stratify: bool = False) -> WeightedRandomSampler: discretize = pd.qcut if stratify else pd.cut bin_labels = torch.LongTensor(discretize(labels.tolist(), nbins, labels=False, duplicates="drop")) class_sample_count = torch.LongTensor( [(bin_labels == t).sum() for t in torch.arange(nbins)]) weight = 1. / class_sample_count.float() sample_weights = torch.zeros_like(labels) for t in torch.unique(bin_labels): sample_weights[bin_labels == t] = weight[t] return WeightedRandomSampler(sample_weights, len(sample_weights)) # Compute sample weights and add to original dataset weights = sampler(_labels, nbins=10, stratify=False).weights.type(torch.float) dataset = TensorDataset(*dataset[:].values(), weights) # Create subset indicies subset_idx = split_method_dict["stratified"]().train_valid_test_split( dataset=_dataset, labels=_labels.tolist(), frac_train=0.9, frac_valid=0.1, frac_test=0.0, return_idxs=True, n_bins=10) stratified = {split: Subset(dataset, sorted(idx)) for split, idx in zip(splits, subset_idx)} # Create stratified sampler (only needed for training) train_sampler = sampler(stratified["train"][:][1].view(-1), stratify=True) # Initialize model tokenizer = AminoAcidTokenizer("aa20") vocab_size = tokenizer.vocab_size seqlen = stratified["train"][0][0].size(0) model = SequenceOracle(seqlen, vocab_size, hidden_size=50, out_size=2) # Initialize callbacks # NOTE: Must set model (within save_clbk) to ensure weights get saved stop_clbk = EarlyStopping(patience=5, verbose=1) save_clbk = ModelCheckpoint(os.path.join("bin/3gb1/oracle", timestep), monitor="val_loss", verbose=1, save_weights_only=True) save_clbk.set_model(model) # Initialize callbacks optimizer = optim.AdamW(model.parameters(), lr=1e-3) epochs = 50 for epoch in range(1, epochs+1): for split in splits: summed_loss = 0 data_loader = DataLoader( dataset=stratified[split], batch_size=32, sampler=train_sampler if split == "train" else None, num_workers=mp.cpu_count(), pin_memory=torch.cuda.is_available() ) # Enable/disable dropout model.train() if split == "train" else model.eval() for it, batch in enumerate(data_loader): data = batch[0].long().to(device) target = batch[1].to(device) sample_weight = batch[2].to(device) # One-hot encode (see: https://discuss.pytorch.org/t/507/34) batch_size, seqlen = data.size() onehot = torch.zeros(batch_size, seqlen, vocab_size) onehot.scatter_(2, torch.unsqueeze(data, 2), 1) # Forward pass pred = model(onehot) # Loss calculation nll_loss = L.gaussian_nll_loss(pred, target, reduction="none") # Reweight nll_loss w/ sample weights nll_loss = (nll_loss * sample_weight).sum() summed_loss += nll_loss.item() loss = nll_loss / batch_size # Compute gradients and update params/weights if split == "train": optimizer.zero_grad() loss.backward() optimizer.step() # Bookkeeping (batch) if it % 5 == 0 or it+1 == len(data_loader): print("{} Batch {:04d}/{:d} ({:.2f}%)\tLoss: {:.4f}".format( split.upper(), it+1, len(data_loader), 100. * ((it+1)/len(data_loader)), loss.item())) # Bookkeeping (epoch) avg_loss = summed_loss / len(data_loader.dataset) print("{} Epoch {}/{}, Average NLL loss: {:.4f}".format( split.upper(), epoch, epochs, avg_loss)) # Stop training (based off val loss) and save (top k) ckpts if split == "valid": save_clbk.on_epoch_end(epoch, logs={"val_loss": avg_loss}) should_stop = stop_clbk.on_epoch_end(epoch, logs={"val_loss": avg_loss}) if should_stop: break else: continue break
40.265306
85
0.653151
import os import time import multiprocessing as mp import pandas as pd import torch from torch import optim from torch.utils.data import DataLoader, Subset, TensorDataset, WeightedRandomSampler from profit.dataset.splitters import split_method_dict from profit.models.torch import SequenceOracle from profit.utils.data_utils.tokenizers import AminoAcidTokenizer from profit.utils.training_utils.torch import losses as L from profit.utils.training_utils.torch.callbacks import ModelCheckpoint from profit.utils.training_utils.torch.callbacks import EarlyStopping from examples.gb1.data import load_dataset timestep = time.strftime("%Y-%b-%d-%H:%M:%S", time.gmtime()) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") tensor = torch.cuda.FloatTensor if torch.cuda.is_available() else torch.Tensor splits = ["train", "valid"] dataset = load_dataset("lstm", "primary", labels="Fitness", num_data=-1, filetype="mdb", as_numpy=False, vocab="aa20") _dataset = dataset[:]["arr_0"] _labels = dataset[:]["arr_1"].view(-1) def sampler(labels: torch.Tensor, nbins: int = 10, stratify: bool = False) -> WeightedRandomSampler: discretize = pd.qcut if stratify else pd.cut bin_labels = torch.LongTensor(discretize(labels.tolist(), nbins, labels=False, duplicates="drop")) class_sample_count = torch.LongTensor( [(bin_labels == t).sum() for t in torch.arange(nbins)]) weight = 1. / class_sample_count.float() sample_weights = torch.zeros_like(labels) for t in torch.unique(bin_labels): sample_weights[bin_labels == t] = weight[t] return WeightedRandomSampler(sample_weights, len(sample_weights)) weights = sampler(_labels, nbins=10, stratify=False).weights.type(torch.float) dataset = TensorDataset(*dataset[:].values(), weights) subset_idx = split_method_dict["stratified"]().train_valid_test_split( dataset=_dataset, labels=_labels.tolist(), frac_train=0.9, frac_valid=0.1, frac_test=0.0, return_idxs=True, n_bins=10) stratified = {split: Subset(dataset, sorted(idx)) for split, idx in zip(splits, subset_idx)} train_sampler = sampler(stratified["train"][:][1].view(-1), stratify=True) tokenizer = AminoAcidTokenizer("aa20") vocab_size = tokenizer.vocab_size seqlen = stratified["train"][0][0].size(0) model = SequenceOracle(seqlen, vocab_size, hidden_size=50, out_size=2) stop_clbk = EarlyStopping(patience=5, verbose=1) save_clbk = ModelCheckpoint(os.path.join("bin/3gb1/oracle", timestep), monitor="val_loss", verbose=1, save_weights_only=True) save_clbk.set_model(model) optimizer = optim.AdamW(model.parameters(), lr=1e-3) epochs = 50 for epoch in range(1, epochs+1): for split in splits: summed_loss = 0 data_loader = DataLoader( dataset=stratified[split], batch_size=32, sampler=train_sampler if split == "train" else None, num_workers=mp.cpu_count(), pin_memory=torch.cuda.is_available() ) model.train() if split == "train" else model.eval() for it, batch in enumerate(data_loader): data = batch[0].long().to(device) target = batch[1].to(device) sample_weight = batch[2].to(device) batch_size, seqlen = data.size() onehot = torch.zeros(batch_size, seqlen, vocab_size) onehot.scatter_(2, torch.unsqueeze(data, 2), 1) pred = model(onehot) nll_loss = L.gaussian_nll_loss(pred, target, reduction="none") nll_loss = (nll_loss * sample_weight).sum() summed_loss += nll_loss.item() loss = nll_loss / batch_size if split == "train": optimizer.zero_grad() loss.backward() optimizer.step() if it % 5 == 0 or it+1 == len(data_loader): print("{} Batch {:04d}/{:d} ({:.2f}%)\tLoss: {:.4f}".format( split.upper(), it+1, len(data_loader), 100. * ((it+1)/len(data_loader)), loss.item())) avg_loss = summed_loss / len(data_loader.dataset) print("{} Epoch {}/{}, Average NLL loss: {:.4f}".format( split.upper(), epoch, epochs, avg_loss)) if split == "valid": save_clbk.on_epoch_end(epoch, logs={"val_loss": avg_loss}) should_stop = stop_clbk.on_epoch_end(epoch, logs={"val_loss": avg_loss}) if should_stop: break else: continue break
true
true
f70031964499de2478621f668a6bbbbe0d067348
228,842
py
Python
tests/unit/gapic/deploy_v1/test_cloud_deploy.py
LaudateCorpus1/python-deploy
aaa957f2673db673c3a8e38275d4689323ded044
[ "Apache-2.0" ]
null
null
null
tests/unit/gapic/deploy_v1/test_cloud_deploy.py
LaudateCorpus1/python-deploy
aaa957f2673db673c3a8e38275d4689323ded044
[ "Apache-2.0" ]
null
null
null
tests/unit/gapic/deploy_v1/test_cloud_deploy.py
LaudateCorpus1/python-deploy
aaa957f2673db673c3a8e38275d4689323ded044
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import mock import grpc from grpc.experimental import aio import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.deploy_v1.services.cloud_deploy import CloudDeployAsyncClient from google.cloud.deploy_v1.services.cloud_deploy import CloudDeployClient from google.cloud.deploy_v1.services.cloud_deploy import pagers from google.cloud.deploy_v1.services.cloud_deploy import transports from google.cloud.deploy_v1.types import cloud_deploy from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth def client_cert_source_callback(): return b"cert bytes", b"key bytes" # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return ( "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT ) def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" assert CloudDeployClient._get_default_mtls_endpoint(None) is None assert ( CloudDeployClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( CloudDeployClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( CloudDeployClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( CloudDeployClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert CloudDeployClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi @pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient,]) def test_cloud_deploy_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "clouddeploy.googleapis.com:443" @pytest.mark.parametrize( "transport_class,transport_name", [ (transports.CloudDeployGrpcTransport, "grpc"), (transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio"), ], ) def test_cloud_deploy_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() @pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient,]) def test_cloud_deploy_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "clouddeploy.googleapis.com:443" def test_cloud_deploy_client_get_transport_class(): transport = CloudDeployClient.get_transport_class() available_transports = [ transports.CloudDeployGrpcTransport, ] assert transport in available_transports transport = CloudDeployClient.get_transport_class("grpc") assert transport == transports.CloudDeployGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) @mock.patch.object( CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient) ) @mock.patch.object( CloudDeployAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployAsyncClient), ) def test_cloud_deploy_client_client_options( client_class, transport_class, transport_name ): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudDeployClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. with mock.patch.object(CloudDeployClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc", "true"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", "true", ), (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc", "false"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", "false", ), ], ) @mock.patch.object( CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient) ) @mock.patch.object( CloudDeployAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_cloud_deploy_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None expected_host = client.DEFAULT_ENDPOINT else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): with mock.patch( "google.auth.transport.mtls.default_client_cert_source", return_value=client_cert_source_callback, ): if use_client_cert_env == "false": expected_host = client.DEFAULT_ENDPOINT expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT expected_client_cert_source = client_cert_source_callback patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=False, ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient]) @mock.patch.object( CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient) ) @mock.patch.object( CloudDeployAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployAsyncClient), ) def test_cloud_deploy_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" options = client_options.ClientOptions( client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=False, ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): with mock.patch( "google.auth.transport.mtls.default_client_cert_source", return_value=mock_client_cert_source, ): ( api_endpoint, cert_source, ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_cloud_deploy_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_cloud_deploy_client_client_options_credentials_file( client_class, transport_class, transport_name ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) def test_cloud_deploy_client_client_options_from_dict(): with mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = CloudDeployClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "request_type", [cloud_deploy.ListDeliveryPipelinesRequest, dict,] ) def test_list_delivery_pipelines(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListDeliveryPipelinesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_delivery_pipelines(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeliveryPipelinesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_delivery_pipelines_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: client.list_delivery_pipelines() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest() @pytest.mark.asyncio async def test_list_delivery_pipelines_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListDeliveryPipelinesRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListDeliveryPipelinesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_delivery_pipelines(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeliveryPipelinesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_delivery_pipelines_async_from_dict(): await test_list_delivery_pipelines_async(request_type=dict) def test_list_delivery_pipelines_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListDeliveryPipelinesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = cloud_deploy.ListDeliveryPipelinesResponse() client.list_delivery_pipelines(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_delivery_pipelines_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListDeliveryPipelinesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListDeliveryPipelinesResponse() ) await client.list_delivery_pipelines(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_delivery_pipelines_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListDeliveryPipelinesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_delivery_pipelines(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_delivery_pipelines_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_delivery_pipelines( cloud_deploy.ListDeliveryPipelinesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_delivery_pipelines_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListDeliveryPipelinesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListDeliveryPipelinesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_delivery_pipelines(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_delivery_pipelines_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_delivery_pipelines( cloud_deploy.ListDeliveryPipelinesRequest(), parent="parent_value", ) def test_list_delivery_pipelines_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_delivery_pipelines(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) def test_list_delivery_pipelines_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) pages = list(client.list_delivery_pipelines(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_delivery_pipelines_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) async_pager = await client.list_delivery_pipelines(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in responses) @pytest.mark.asyncio async def test_list_delivery_pipelines_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__", new_callable=mock.AsyncMock, ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_delivery_pipelines(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [cloud_deploy.GetDeliveryPipelineRequest, dict,] ) def test_get_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.DeliveryPipeline( name="name_value", uid="uid_value", description="description_value", etag="etag_value", serial_pipeline=cloud_deploy.SerialPipeline( stages=[cloud_deploy.Stage(target_id="target_id_value")] ), ) response = client.get_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.DeliveryPipeline) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.etag == "etag_value" def test_get_delivery_pipeline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: client.get_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetDeliveryPipelineRequest() @pytest.mark.asyncio async def test_get_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.DeliveryPipeline( name="name_value", uid="uid_value", description="description_value", etag="etag_value", ) ) response = await client.get_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.DeliveryPipeline) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_delivery_pipeline_async_from_dict(): await test_get_delivery_pipeline_async(request_type=dict) def test_get_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetDeliveryPipelineRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = cloud_deploy.DeliveryPipeline() client.get_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetDeliveryPipelineRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.DeliveryPipeline() ) await client.get_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.DeliveryPipeline() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_delivery_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_delivery_pipeline( cloud_deploy.GetDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.DeliveryPipeline() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.DeliveryPipeline() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_delivery_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_delivery_pipeline( cloud_deploy.GetDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.parametrize( "request_type", [cloud_deploy.CreateDeliveryPipelineRequest, dict,] ) def test_create_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_create_delivery_pipeline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: client.create_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest() @pytest.mark.asyncio async def test_create_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_delivery_pipeline_async_from_dict(): await test_create_delivery_pipeline_async(request_type=dict) def test_create_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateDeliveryPipelineRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateDeliveryPipelineRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_delivery_pipeline( parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].delivery_pipeline_id mock_val = "delivery_pipeline_id_value" assert arg == mock_val def test_create_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_delivery_pipeline( cloud_deploy.CreateDeliveryPipelineRequest(), parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) @pytest.mark.asyncio async def test_create_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_delivery_pipeline( parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].delivery_pipeline_id mock_val = "delivery_pipeline_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_delivery_pipeline( cloud_deploy.CreateDeliveryPipelineRequest(), parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) @pytest.mark.parametrize( "request_type", [cloud_deploy.UpdateDeliveryPipelineRequest, dict,] ) def test_update_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_update_delivery_pipeline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: client.update_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest() @pytest.mark.asyncio async def test_update_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.update_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_update_delivery_pipeline_async_from_dict(): await test_update_delivery_pipeline_async(request_type=dict) def test_update_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.UpdateDeliveryPipelineRequest() request.delivery_pipeline.name = "delivery_pipeline.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "delivery_pipeline.name=delivery_pipeline.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.UpdateDeliveryPipelineRequest() request.delivery_pipeline.name = "delivery_pipeline.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.update_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "delivery_pipeline.name=delivery_pipeline.name/value", ) in kw["metadata"] def test_update_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_delivery_pipeline( delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val def test_update_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_delivery_pipeline( cloud_deploy.UpdateDeliveryPipelineRequest(), delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_delivery_pipeline( delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio async def test_update_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_delivery_pipeline( cloud_deploy.UpdateDeliveryPipelineRequest(), delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [cloud_deploy.DeleteDeliveryPipelineRequest, dict,] ) def test_delete_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_delete_delivery_pipeline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: client.delete_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest() @pytest.mark.asyncio async def test_delete_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_delete_delivery_pipeline_async_from_dict(): await test_delete_delivery_pipeline_async(request_type=dict) def test_delete_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.DeleteDeliveryPipelineRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.DeleteDeliveryPipelineRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.delete_delivery_pipeline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_delivery_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_delete_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_delivery_pipeline( cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_delivery_pipeline(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_delete_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_delivery_pipeline( cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ListTargetsRequest, dict,]) def test_list_targets(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListTargetsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListTargetsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTargetsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_targets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: client.list_targets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListTargetsRequest() @pytest.mark.asyncio async def test_list_targets_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListTargetsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListTargetsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListTargetsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTargetsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_targets_async_from_dict(): await test_list_targets_async(request_type=dict) def test_list_targets_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListTargetsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = cloud_deploy.ListTargetsResponse() client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_targets_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListTargetsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListTargetsResponse() ) await client.list_targets(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_targets_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListTargetsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_targets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_targets_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_targets( cloud_deploy.ListTargetsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_targets_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListTargetsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListTargetsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_targets(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_targets_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_targets( cloud_deploy.ListTargetsRequest(), parent="parent_value", ) def test_list_targets_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_targets(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.Target) for i in results) def test_list_targets_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_targets), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) pages = list(client.list_targets(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_targets_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_targets), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) async_pager = await client.list_targets(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.Target) for i in responses) @pytest.mark.asyncio async def test_list_targets_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_targets), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_targets(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [cloud_deploy.GetTargetRequest, dict,]) def test_get_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Target( name="name_value", target_id="target_id_value", uid="uid_value", description="description_value", require_approval=True, etag="etag_value", gke=cloud_deploy.GkeCluster(cluster="cluster_value"), ) response = client.get_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Target) assert response.name == "name_value" assert response.target_id == "target_id_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.require_approval is True assert response.etag == "etag_value" def test_get_target_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: client.get_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetTargetRequest() @pytest.mark.asyncio async def test_get_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Target( name="name_value", target_id="target_id_value", uid="uid_value", description="description_value", require_approval=True, etag="etag_value", ) ) response = await client.get_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Target) assert response.name == "name_value" assert response.target_id == "target_id_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.require_approval is True assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_target_async_from_dict(): await test_get_target_async(request_type=dict) def test_get_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetTargetRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = cloud_deploy.Target() client.get_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetTargetRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Target()) await client.get_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Target() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_target(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_target( cloud_deploy.GetTargetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Target() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Target()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_target(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_target( cloud_deploy.GetTargetRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.CreateTargetRequest, dict,]) def test_create_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_create_target_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: client.create_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateTargetRequest() @pytest.mark.asyncio async def test_create_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_target_async_from_dict(): await test_create_target_async(request_type=dict) def test_create_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateTargetRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateTargetRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_target( parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].target_id mock_val = "target_id_value" assert arg == mock_val def test_create_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_target( cloud_deploy.CreateTargetRequest(), parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) @pytest.mark.asyncio async def test_create_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_target( parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].target_id mock_val = "target_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_target( cloud_deploy.CreateTargetRequest(), parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.UpdateTargetRequest, dict,]) def test_update_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_update_target_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: client.update_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateTargetRequest() @pytest.mark.asyncio async def test_update_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.update_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_update_target_async_from_dict(): await test_update_target_async(request_type=dict) def test_update_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.UpdateTargetRequest() request.target.name = "target.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "target.name=target.name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.UpdateTargetRequest() request.target.name = "target.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.update_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "target.name=target.name/value",) in kw["metadata"] def test_update_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_target( target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val def test_update_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.update_target( cloud_deploy.UpdateTargetRequest(), target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.update_target( target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio async def test_update_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.update_target( cloud_deploy.UpdateTargetRequest(), target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize("request_type", [cloud_deploy.DeleteTargetRequest, dict,]) def test_delete_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_delete_target_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: client.delete_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteTargetRequest() @pytest.mark.asyncio async def test_delete_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteTargetRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_delete_target_async_from_dict(): await test_delete_target_async(request_type=dict) def test_delete_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.DeleteTargetRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.DeleteTargetRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.delete_target(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_target(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_delete_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_target( cloud_deploy.DeleteTargetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_target), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.delete_target(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_delete_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_target( cloud_deploy.DeleteTargetRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ListReleasesRequest, dict,]) def test_list_releases(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListReleasesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListReleasesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListReleasesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_releases_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: client.list_releases() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListReleasesRequest() @pytest.mark.asyncio async def test_list_releases_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListReleasesRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListReleasesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListReleasesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListReleasesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_releases_async_from_dict(): await test_list_releases_async(request_type=dict) def test_list_releases_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListReleasesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = cloud_deploy.ListReleasesResponse() client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_releases_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListReleasesRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListReleasesResponse() ) await client.list_releases(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_releases_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListReleasesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_releases(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_releases_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_releases( cloud_deploy.ListReleasesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_releases_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListReleasesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListReleasesResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_releases(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_releases_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_releases( cloud_deploy.ListReleasesRequest(), parent="parent_value", ) def test_list_releases_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_releases(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.Release) for i in results) def test_list_releases_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_releases), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) pages = list(client.list_releases(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_releases_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_releases), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) async_pager = await client.list_releases(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.Release) for i in responses) @pytest.mark.asyncio async def test_list_releases_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_releases), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_releases(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [cloud_deploy.GetReleaseRequest, dict,]) def test_get_release(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Release( name="name_value", uid="uid_value", description="description_value", skaffold_config_uri="skaffold_config_uri_value", skaffold_config_path="skaffold_config_path_value", render_state=cloud_deploy.Release.RenderState.SUCCEEDED, etag="etag_value", skaffold_version="skaffold_version_value", ) response = client.get_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetReleaseRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Release) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.skaffold_config_uri == "skaffold_config_uri_value" assert response.skaffold_config_path == "skaffold_config_path_value" assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED assert response.etag == "etag_value" assert response.skaffold_version == "skaffold_version_value" def test_get_release_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: client.get_release() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetReleaseRequest() @pytest.mark.asyncio async def test_get_release_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetReleaseRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Release( name="name_value", uid="uid_value", description="description_value", skaffold_config_uri="skaffold_config_uri_value", skaffold_config_path="skaffold_config_path_value", render_state=cloud_deploy.Release.RenderState.SUCCEEDED, etag="etag_value", skaffold_version="skaffold_version_value", ) ) response = await client.get_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetReleaseRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Release) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.skaffold_config_uri == "skaffold_config_uri_value" assert response.skaffold_config_path == "skaffold_config_path_value" assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED assert response.etag == "etag_value" assert response.skaffold_version == "skaffold_version_value" @pytest.mark.asyncio async def test_get_release_async_from_dict(): await test_get_release_async(request_type=dict) def test_get_release_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetReleaseRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = cloud_deploy.Release() client.get_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_release_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetReleaseRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Release() ) await client.get_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_release_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Release() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_release(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_release_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_release( cloud_deploy.GetReleaseRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_release_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Release() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Release() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_release(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_release_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_release( cloud_deploy.GetReleaseRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.CreateReleaseRequest, dict,]) def test_create_release(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateReleaseRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_create_release_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: client.create_release() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateReleaseRequest() @pytest.mark.asyncio async def test_create_release_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateReleaseRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateReleaseRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_release_async_from_dict(): await test_create_release_async(request_type=dict) def test_create_release_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateReleaseRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_release_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateReleaseRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_release(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_release_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_release( parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].release mock_val = cloud_deploy.Release(name="name_value") assert arg == mock_val arg = args[0].release_id mock_val = "release_id_value" assert arg == mock_val def test_create_release_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_release( cloud_deploy.CreateReleaseRequest(), parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) @pytest.mark.asyncio async def test_create_release_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_release), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_release( parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].release mock_val = cloud_deploy.Release(name="name_value") assert arg == mock_val arg = args[0].release_id mock_val = "release_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_release_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_release( cloud_deploy.CreateReleaseRequest(), parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ApproveRolloutRequest, dict,]) def test_approve_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ApproveRolloutResponse() response = client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ApproveRolloutRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.ApproveRolloutResponse) def test_approve_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ApproveRolloutRequest() @pytest.mark.asyncio async def test_approve_rollout_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ApproveRolloutResponse() ) response = await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ApproveRolloutRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.ApproveRolloutResponse) @pytest.mark.asyncio async def test_approve_rollout_async_from_dict(): await test_approve_rollout_async(request_type=dict) def test_approve_rollout_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ApproveRolloutRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = cloud_deploy.ApproveRolloutResponse() client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_approve_rollout_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ApproveRolloutRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ApproveRolloutResponse() ) await client.approve_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_approve_rollout_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ApproveRolloutResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.approve_rollout(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_approve_rollout_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.approve_rollout( cloud_deploy.ApproveRolloutRequest(), name="name_value", ) @pytest.mark.asyncio async def test_approve_rollout_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ApproveRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ApproveRolloutResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.approve_rollout(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_approve_rollout_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.approve_rollout( cloud_deploy.ApproveRolloutRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ListRolloutsRequest, dict,]) def test_list_rollouts(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListRolloutsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListRolloutsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_rollouts_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListRolloutsRequest() @pytest.mark.asyncio async def test_list_rollouts_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListRolloutsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListRolloutsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_rollouts_async_from_dict(): await test_list_rollouts_async(request_type=dict) def test_list_rollouts_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListRolloutsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = cloud_deploy.ListRolloutsResponse() client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_rollouts_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.ListRolloutsRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListRolloutsResponse() ) await client.list_rollouts(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_rollouts_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListRolloutsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_rollouts(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_rollouts_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_rollouts( cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_rollouts_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.ListRolloutsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListRolloutsResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_rollouts(parent="parent_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_rollouts_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_rollouts( cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) def test_list_rollouts_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_rollouts(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.Rollout) for i in results) def test_list_rollouts_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) pages = list(client.list_rollouts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_rollouts_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) async_pager = await client.list_rollouts(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) @pytest.mark.asyncio async def test_list_rollouts_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_rollouts(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [cloud_deploy.GetRolloutRequest, dict,]) def test_get_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Rollout( name="name_value", uid="uid_value", description="description_value", target_id="target_id_value", approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, state=cloud_deploy.Rollout.State.SUCCEEDED, failure_reason="failure_reason_value", deploying_build="deploying_build_value", etag="etag_value", ) response = client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetRolloutRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.target_id == "target_id_value" assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL assert response.state == cloud_deploy.Rollout.State.SUCCEEDED assert response.failure_reason == "failure_reason_value" assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" def test_get_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetRolloutRequest() @pytest.mark.asyncio async def test_get_rollout_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Rollout( name="name_value", uid="uid_value", description="description_value", target_id="target_id_value", approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, state=cloud_deploy.Rollout.State.SUCCEEDED, failure_reason="failure_reason_value", deploying_build="deploying_build_value", etag="etag_value", ) ) response = await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetRolloutRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.target_id == "target_id_value" assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL assert response.state == cloud_deploy.Rollout.State.SUCCEEDED assert response.failure_reason == "failure_reason_value" assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_rollout_async_from_dict(): await test_get_rollout_async(request_type=dict) def test_get_rollout_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetRolloutRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = cloud_deploy.Rollout() client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_rollout_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetRolloutRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Rollout() ) await client.get_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_rollout_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Rollout() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_rollout(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_rollout_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_rollout( cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_rollout_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Rollout() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Rollout() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_rollout(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_rollout_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_rollout( cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.CreateRolloutRequest, dict,]) def test_create_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateRolloutRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) def test_create_rollout_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateRolloutRequest() @pytest.mark.asyncio async def test_create_rollout_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateRolloutRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_rollout_async_from_dict(): await test_create_rollout_async(request_type=dict) def test_create_rollout_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateRolloutRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_rollout_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.CreateRolloutRequest() request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_rollout(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_rollout_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_rollout( parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].rollout mock_val = cloud_deploy.Rollout(name="name_value") assert arg == mock_val arg = args[0].rollout_id mock_val = "rollout_id_value" assert arg == mock_val def test_create_rollout_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_rollout( cloud_deploy.CreateRolloutRequest(), parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) @pytest.mark.asyncio async def test_create_rollout_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_rollout( parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].rollout mock_val = cloud_deploy.Rollout(name="name_value") assert arg == mock_val arg = args[0].rollout_id mock_val = "rollout_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_rollout_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_rollout( cloud_deploy.CreateRolloutRequest(), parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.GetConfigRequest, dict,]) def test_get_config(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Config( name="name_value", default_skaffold_version="default_skaffold_version_value", ) response = client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetConfigRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Config) assert response.name == "name_value" assert response.default_skaffold_version == "default_skaffold_version_value" def test_get_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetConfigRequest() @pytest.mark.asyncio async def test_get_config_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Config( name="name_value", default_skaffold_version="default_skaffold_version_value", ) ) response = await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetConfigRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloud_deploy.Config) assert response.name == "name_value" assert response.default_skaffold_version == "default_skaffold_version_value" @pytest.mark.asyncio async def test_get_config_async_from_dict(): await test_get_config_async(request_type=dict) def test_get_config_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetConfigRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = cloud_deploy.Config() client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_config_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = cloud_deploy.GetConfigRequest() request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) await client.get_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request # Establish that the field header was sent. _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_config_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Config() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_config(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_config_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_config( cloud_deploy.GetConfigRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_config_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_config), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloud_deploy.Config() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.get_config(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_config_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_config( cloud_deploy.GetConfigRequest(), name="name_value", ) def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudDeployClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide an api_key and a transport instance. transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = CloudDeployClient(client_options=options, transport=transport,) # It is an error to provide an api_key and a credential. options = mock.Mock() options.api_key = "api_key" with pytest.raises(ValueError): client = CloudDeployClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudDeployClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) client = CloudDeployClient(transport=transport) assert client.transport is transport def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudDeployGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport,], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudDeployGrpcTransport,) def test_cloud_deploy_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudDeployTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) def test_cloud_deploy_base_transport(): # Instantiate the base transport. with mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.CloudDeployTransport( credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly # raise NotImplementedError. methods = ( "list_delivery_pipelines", "get_delivery_pipeline", "create_delivery_pipeline", "update_delivery_pipeline", "delete_delivery_pipeline", "list_targets", "get_target", "create_target", "update_target", "delete_target", "list_releases", "get_release", "create_release", "approve_rollout", "list_rollouts", "get_rollout", "create_rollout", "get_config", ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) with pytest.raises(NotImplementedError): transport.close() # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): transport.operations_client def test_cloud_deploy_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudDeployTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_cloud_deploy_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudDeployTransport() adc.assert_called_once() def test_cloud_deploy_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudDeployClient() adc.assert_called_once_with( scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport,], ) def test_cloud_deploy_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.CloudDeployGrpcTransport, grpc_helpers), (transports.CloudDeployGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_cloud_deploy_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object( google.auth, "default", autospec=True ) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "clouddeploy.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="clouddeploy.googleapis.com", ssl_credentials=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport], ) def test_cloud_deploy_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: mock_ssl_channel_creds = mock.Mock() transport_class( host="squid.clam.whelk", credentials=cred, ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls # is used. with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( certificate_chain=expected_cert, private_key=expected_key ) def test_cloud_deploy_host_no_port(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="clouddeploy.googleapis.com" ), ) assert client.transport._host == "clouddeploy.googleapis.com:443" def test_cloud_deploy_host_with_port(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="clouddeploy.googleapis.com:8000" ), ) assert client.transport._host == "clouddeploy.googleapis.com:8000" def test_cloud_deploy_grpc_transport_channel(): channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudDeployGrpcTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None def test_cloud_deploy_grpc_asyncio_transport_channel(): channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudDeployGrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport], ) def test_cloud_deploy_transport_channel_mtls_with_client_cert_source(transport_class): with mock.patch( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=client_cert_source_callback, ) adc.assert_called_once() grpc_ssl_channel_cred.assert_called_once_with( certificate_chain=b"cert bytes", private_key=b"key bytes" ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport], ) def test_cloud_deploy_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() with pytest.warns(DeprecationWarning): transport = transport_class( host="squid.clam.whelk", credentials=mock_cred, api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=None, ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel def test_cloud_deploy_grpc_lro_client(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client def test_cloud_deploy_grpc_lro_async_client(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client def test_build_path(): project = "squid" location = "clam" build = "whelk" expected = "projects/{project}/locations/{location}/builds/{build}".format( project=project, location=location, build=build, ) actual = CloudDeployClient.build_path(project, location, build) assert expected == actual def test_parse_build_path(): expected = { "project": "octopus", "location": "oyster", "build": "nudibranch", } path = CloudDeployClient.build_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_build_path(path) assert expected == actual def test_cluster_path(): project = "cuttlefish" location = "mussel" cluster = "winkle" expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( project=project, location=location, cluster=cluster, ) actual = CloudDeployClient.cluster_path(project, location, cluster) assert expected == actual def test_parse_cluster_path(): expected = { "project": "nautilus", "location": "scallop", "cluster": "abalone", } path = CloudDeployClient.cluster_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_cluster_path(path) assert expected == actual def test_config_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}/config".format( project=project, location=location, ) actual = CloudDeployClient.config_path(project, location) assert expected == actual def test_parse_config_path(): expected = { "project": "whelk", "location": "octopus", } path = CloudDeployClient.config_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_config_path(path) assert expected == actual def test_delivery_pipeline_path(): project = "oyster" location = "nudibranch" delivery_pipeline = "cuttlefish" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}".format( project=project, location=location, delivery_pipeline=delivery_pipeline, ) actual = CloudDeployClient.delivery_pipeline_path( project, location, delivery_pipeline ) assert expected == actual def test_parse_delivery_pipeline_path(): expected = { "project": "mussel", "location": "winkle", "delivery_pipeline": "nautilus", } path = CloudDeployClient.delivery_pipeline_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_delivery_pipeline_path(path) assert expected == actual def test_release_path(): project = "scallop" location = "abalone" delivery_pipeline = "squid" release = "clam" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( project=project, location=location, delivery_pipeline=delivery_pipeline, release=release, ) actual = CloudDeployClient.release_path( project, location, delivery_pipeline, release ) assert expected == actual def test_parse_release_path(): expected = { "project": "whelk", "location": "octopus", "delivery_pipeline": "oyster", "release": "nudibranch", } path = CloudDeployClient.release_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_release_path(path) assert expected == actual def test_rollout_path(): project = "cuttlefish" location = "mussel" delivery_pipeline = "winkle" release = "nautilus" rollout = "scallop" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, delivery_pipeline=delivery_pipeline, release=release, rollout=rollout, ) actual = CloudDeployClient.rollout_path( project, location, delivery_pipeline, release, rollout ) assert expected == actual def test_parse_rollout_path(): expected = { "project": "abalone", "location": "squid", "delivery_pipeline": "clam", "release": "whelk", "rollout": "octopus", } path = CloudDeployClient.rollout_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_rollout_path(path) assert expected == actual def test_target_path(): project = "oyster" location = "nudibranch" target = "cuttlefish" expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, target=target, ) actual = CloudDeployClient.target_path(project, location, target) assert expected == actual def test_parse_target_path(): expected = { "project": "mussel", "location": "winkle", "target": "nautilus", } path = CloudDeployClient.target_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_target_path(path) assert expected == actual def test_worker_pool_path(): project = "scallop" location = "abalone" worker_pool = "squid" expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, location=location, worker_pool=worker_pool, ) actual = CloudDeployClient.worker_pool_path(project, location, worker_pool) assert expected == actual def test_parse_worker_pool_path(): expected = { "project": "clam", "location": "whelk", "worker_pool": "octopus", } path = CloudDeployClient.worker_pool_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_worker_pool_path(path) assert expected == actual def test_common_billing_account_path(): billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) actual = CloudDeployClient.common_billing_account_path(billing_account) assert expected == actual def test_parse_common_billing_account_path(): expected = { "billing_account": "nudibranch", } path = CloudDeployClient.common_billing_account_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_common_billing_account_path(path) assert expected == actual def test_common_folder_path(): folder = "cuttlefish" expected = "folders/{folder}".format(folder=folder,) actual = CloudDeployClient.common_folder_path(folder) assert expected == actual def test_parse_common_folder_path(): expected = { "folder": "mussel", } path = CloudDeployClient.common_folder_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_common_folder_path(path) assert expected == actual def test_common_organization_path(): organization = "winkle" expected = "organizations/{organization}".format(organization=organization,) actual = CloudDeployClient.common_organization_path(organization) assert expected == actual def test_parse_common_organization_path(): expected = { "organization": "nautilus", } path = CloudDeployClient.common_organization_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_common_organization_path(path) assert expected == actual def test_common_project_path(): project = "scallop" expected = "projects/{project}".format(project=project,) actual = CloudDeployClient.common_project_path(project) assert expected == actual def test_parse_common_project_path(): expected = { "project": "abalone", } path = CloudDeployClient.common_project_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_common_project_path(path) assert expected == actual def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) actual = CloudDeployClient.common_location_path(project, location) assert expected == actual def test_parse_common_location_path(): expected = { "project": "whelk", "location": "octopus", } path = CloudDeployClient.common_location_path(**expected) # Check that the path construction is reversible. actual = CloudDeployClient.parse_common_location_path(path) assert expected == actual def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( transports.CloudDeployTransport, "_prep_wrapped_messages" ) as prep: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( transports.CloudDeployTransport, "_prep_wrapped_messages" ) as prep: transport_class = CloudDeployClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @pytest.mark.asyncio async def test_transport_close_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() def test_transport_close(): transports = { "grpc": "_grpc_channel", } for transport, close_name in transports.items(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( type(getattr(client.transport, close_name)), "close" ) as close: with client: close.assert_not_called() close.assert_called_once() def test_client_ctx(): transports = [ "grpc", ] for transport in transports: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. with mock.patch.object(type(client.transport), "close") as close: close.assert_not_called() with client: pass close.assert_called() @pytest.mark.parametrize( "client_class,transport_class", [ (CloudDeployClient, transports.CloudDeployGrpcTransport), (CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport), ], ) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True ) as get_api_key_credentials: mock_cred = mock.Mock() get_api_key_credentials.return_value = mock_cred options = client_options.ClientOptions() options.api_key = "api_key" with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, )
39.111605
140
0.686491
import os import mock import grpc from grpc.experimental import aio import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import operation_async from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.deploy_v1.services.cloud_deploy import CloudDeployAsyncClient from google.cloud.deploy_v1.services.cloud_deploy import CloudDeployClient from google.cloud.deploy_v1.services.cloud_deploy import pagers from google.cloud.deploy_v1.services.cloud_deploy import transports from google.cloud.deploy_v1.types import cloud_deploy from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 import google.auth def client_cert_source_callback(): return b"cert bytes", b"key bytes" def modify_default_endpoint(client): return ( "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT ) def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" assert CloudDeployClient._get_default_mtls_endpoint(None) is None assert ( CloudDeployClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint ) assert ( CloudDeployClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( CloudDeployClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( CloudDeployClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) assert CloudDeployClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi @pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient,]) def test_cloud_deploy_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} client = client_class.from_service_account_info(info) assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "clouddeploy.googleapis.com:443" @pytest.mark.parametrize( "transport_class,transport_name", [ (transports.CloudDeployGrpcTransport, "grpc"), (transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio"), ], ) def test_cloud_deploy_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=False) use_jwt.assert_not_called() @pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient,]) def test_cloud_deploy_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds assert isinstance(client, client_class) assert client.transport._host == "clouddeploy.googleapis.com:443" def test_cloud_deploy_client_get_transport_class(): transport = CloudDeployClient.get_transport_class() available_transports = [ transports.CloudDeployGrpcTransport, ] assert transport in available_transports transport = CloudDeployClient.get_transport_class("grpc") assert transport == transports.CloudDeployGrpcTransport @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) @mock.patch.object( CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient) ) @mock.patch.object( CloudDeployAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployAsyncClient), ) def test_cloud_deploy_client_client_options( client_class, transport_class, transport_name ): with mock.patch.object(CloudDeployClient, "get_transport_class") as gtc: transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. with mock.patch.object(CloudDeployClient, "get_transport_class") as gtc: client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc", "true"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", "true", ), (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc", "false"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", "false", ), ], ) @mock.patch.object( CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient) ) @mock.patch.object( CloudDeployAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_cloud_deploy_client_mtls_env_auto( client_class, transport_class, transport_name, use_client_cert_env ): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. # Check the case client_cert_source is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None expected_host = client.DEFAULT_ENDPOINT else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): with mock.patch( "google.auth.transport.mtls.default_client_cert_source", return_value=client_cert_source_callback, ): if use_client_cert_env == "false": expected_host = client.DEFAULT_ENDPOINT expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT expected_client_cert_source = client_cert_source_callback patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=expected_host, scopes=None, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=False, ): patched.return_value = None client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class", [CloudDeployClient, CloudDeployAsyncClient]) @mock.patch.object( CloudDeployClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployClient) ) @mock.patch.object( CloudDeployAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudDeployAsyncClient), ) def test_cloud_deploy_client_get_mtls_endpoint_and_cert_source(client_class): mock_client_cert_source = mock.Mock() # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): mock_api_endpoint = "foo" options = client_options.ClientOptions( client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options ) assert api_endpoint == mock_api_endpoint assert cert_source == mock_client_cert_source # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): mock_client_cert_source = mock.Mock() mock_api_endpoint = "foo" options = client_options.ClientOptions( client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint ) api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( options ) assert api_endpoint == mock_api_endpoint assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source is None # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=False, ): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_ENDPOINT assert cert_source is None with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): with mock.patch( "google.auth.transport.mtls.default_client_cert_source", return_value=mock_client_cert_source, ): ( api_endpoint, cert_source, ) = client_class.get_mtls_endpoint_and_cert_source() assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_cloud_deploy_client_client_options_scopes( client_class, transport_class, transport_name ): options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ (CloudDeployClient, transports.CloudDeployGrpcTransport, "grpc"), ( CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport, "grpc_asyncio", ), ], ) def test_cloud_deploy_client_client_options_credentials_file( client_class, transport_class, transport_name ): options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) def test_cloud_deploy_client_client_options_from_dict(): with mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployGrpcTransport.__init__" ) as grpc_transport: grpc_transport.return_value = None client = CloudDeployClient(client_options={"api_endpoint": "squid.clam.whelk"}) grpc_transport.assert_called_once_with( credentials=None, credentials_file=None, host="squid.clam.whelk", scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) @pytest.mark.parametrize( "request_type", [cloud_deploy.ListDeliveryPipelinesRequest, dict,] ) def test_list_delivery_pipelines(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = cloud_deploy.ListDeliveryPipelinesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_delivery_pipelines(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest() assert isinstance(response, pagers.ListDeliveryPipelinesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_delivery_pipelines_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: client.list_delivery_pipelines() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest() @pytest.mark.asyncio async def test_list_delivery_pipelines_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListDeliveryPipelinesRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListDeliveryPipelinesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_delivery_pipelines(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListDeliveryPipelinesRequest() assert isinstance(response, pagers.ListDeliveryPipelinesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_delivery_pipelines_async_from_dict(): await test_list_delivery_pipelines_async(request_type=dict) def test_list_delivery_pipelines_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListDeliveryPipelinesRequest() request.parent = "parent/value" with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = cloud_deploy.ListDeliveryPipelinesResponse() client.list_delivery_pipelines(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_delivery_pipelines_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListDeliveryPipelinesRequest() request.parent = "parent/value" with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListDeliveryPipelinesResponse() ) await client.list_delivery_pipelines(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_delivery_pipelines_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = cloud_deploy.ListDeliveryPipelinesResponse() client.list_delivery_pipelines(parent="parent_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_delivery_pipelines_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.list_delivery_pipelines( cloud_deploy.ListDeliveryPipelinesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_delivery_pipelines_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.return_value = cloud_deploy.ListDeliveryPipelinesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListDeliveryPipelinesResponse() ) response = await client.list_delivery_pipelines(parent="parent_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_delivery_pipelines_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.list_delivery_pipelines( cloud_deploy.ListDeliveryPipelinesRequest(), parent="parent_value", ) def test_list_delivery_pipelines_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_delivery_pipelines(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in results) def test_list_delivery_pipelines_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__" ) as call: call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) pages = list(client.list_delivery_pipelines(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_delivery_pipelines_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__", new_callable=mock.AsyncMock, ) as call: call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) async_pager = await client.list_delivery_pipelines(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.DeliveryPipeline) for i in responses) @pytest.mark.asyncio async def test_list_delivery_pipelines_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_delivery_pipelines), "__call__", new_callable=mock.AsyncMock, ) as call: call.side_effect = ( cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], next_page_token="abc", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[], next_page_token="def", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[cloud_deploy.DeliveryPipeline(),], next_page_token="ghi", ), cloud_deploy.ListDeliveryPipelinesResponse( delivery_pipelines=[ cloud_deploy.DeliveryPipeline(), cloud_deploy.DeliveryPipeline(), ], ), RuntimeError, ) pages = [] async for page_ in (await client.list_delivery_pipelines(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [cloud_deploy.GetDeliveryPipelineRequest, dict,] ) def test_get_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = cloud_deploy.DeliveryPipeline( name="name_value", uid="uid_value", description="description_value", etag="etag_value", serial_pipeline=cloud_deploy.SerialPipeline( stages=[cloud_deploy.Stage(target_id="target_id_value")] ), ) response = client.get_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetDeliveryPipelineRequest() assert isinstance(response, cloud_deploy.DeliveryPipeline) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.etag == "etag_value" def test_get_delivery_pipeline_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: client.get_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetDeliveryPipelineRequest() @pytest.mark.asyncio async def test_get_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.DeliveryPipeline( name="name_value", uid="uid_value", description="description_value", etag="etag_value", ) ) response = await client.get_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetDeliveryPipelineRequest() assert isinstance(response, cloud_deploy.DeliveryPipeline) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_delivery_pipeline_async_from_dict(): await test_get_delivery_pipeline_async(request_type=dict) def test_get_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetDeliveryPipelineRequest() request.name = "name/value" with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = cloud_deploy.DeliveryPipeline() client.get_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetDeliveryPipelineRequest() request.name = "name/value" with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.DeliveryPipeline() ) await client.get_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = cloud_deploy.DeliveryPipeline() client.get_delivery_pipeline(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.get_delivery_pipeline( cloud_deploy.GetDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.get_delivery_pipeline), "__call__" ) as call: call.return_value = cloud_deploy.DeliveryPipeline() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.DeliveryPipeline() ) response = await client.get_delivery_pipeline(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.get_delivery_pipeline( cloud_deploy.GetDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.parametrize( "request_type", [cloud_deploy.CreateDeliveryPipelineRequest, dict,] ) def test_create_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest() assert isinstance(response, future.Future) def test_create_delivery_pipeline_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: client.create_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest() @pytest.mark.asyncio async def test_create_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateDeliveryPipelineRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_delivery_pipeline_async_from_dict(): await test_create_delivery_pipeline_async(request_type=dict) def test_create_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateDeliveryPipelineRequest() request.parent = "parent/value" with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateDeliveryPipelineRequest() request.parent = "parent/value" with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_delivery_pipeline( parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].delivery_pipeline_id mock_val = "delivery_pipeline_id_value" assert arg == mock_val def test_create_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.create_delivery_pipeline( cloud_deploy.CreateDeliveryPipelineRequest(), parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) @pytest.mark.asyncio async def test_create_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.create_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_delivery_pipeline( parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].delivery_pipeline_id mock_val = "delivery_pipeline_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.create_delivery_pipeline( cloud_deploy.CreateDeliveryPipelineRequest(), parent="parent_value", delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), delivery_pipeline_id="delivery_pipeline_id_value", ) @pytest.mark.parametrize( "request_type", [cloud_deploy.UpdateDeliveryPipelineRequest, dict,] ) def test_update_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest() assert isinstance(response, future.Future) def test_update_delivery_pipeline_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: client.update_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest() @pytest.mark.asyncio async def test_update_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.update_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateDeliveryPipelineRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_update_delivery_pipeline_async_from_dict(): await test_update_delivery_pipeline_async(request_type=dict) def test_update_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.UpdateDeliveryPipelineRequest() request.delivery_pipeline.name = "delivery_pipeline.name/value" with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "delivery_pipeline.name=delivery_pipeline.name/value", ) in kw["metadata"] @pytest.mark.asyncio async def test_update_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.UpdateDeliveryPipelineRequest() request.delivery_pipeline.name = "delivery_pipeline.name/value" with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.update_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", "delivery_pipeline.name=delivery_pipeline.name/value", ) in kw["metadata"] def test_update_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_delivery_pipeline( delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val def test_update_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.update_delivery_pipeline( cloud_deploy.UpdateDeliveryPipelineRequest(), delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.update_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.update_delivery_pipeline( delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].delivery_pipeline mock_val = cloud_deploy.DeliveryPipeline(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio async def test_update_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.update_delivery_pipeline( cloud_deploy.UpdateDeliveryPipelineRequest(), delivery_pipeline=cloud_deploy.DeliveryPipeline(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize( "request_type", [cloud_deploy.DeleteDeliveryPipelineRequest, dict,] ) def test_delete_delivery_pipeline(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest() assert isinstance(response, future.Future) def test_delete_delivery_pipeline_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: client.delete_delivery_pipeline() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest() @pytest.mark.asyncio async def test_delete_delivery_pipeline_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteDeliveryPipelineRequest, ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.delete_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteDeliveryPipelineRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_delete_delivery_pipeline_async_from_dict(): await test_delete_delivery_pipeline_async(request_type=dict) def test_delete_delivery_pipeline_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.DeleteDeliveryPipelineRequest() request.name = "name/value" with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_delivery_pipeline(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_delivery_pipeline_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.DeleteDeliveryPipelineRequest() request.name = "name/value" with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.delete_delivery_pipeline(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_delivery_pipeline_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_delivery_pipeline(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_delete_delivery_pipeline_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.delete_delivery_pipeline( cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_delivery_pipeline_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object( type(client.transport.delete_delivery_pipeline), "__call__" ) as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.delete_delivery_pipeline(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_delete_delivery_pipeline_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.delete_delivery_pipeline( cloud_deploy.DeleteDeliveryPipelineRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ListTargetsRequest, dict,]) def test_list_targets(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = cloud_deploy.ListTargetsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_targets(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListTargetsRequest() assert isinstance(response, pagers.ListTargetsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_targets_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.list_targets), "__call__") as call: client.list_targets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListTargetsRequest() @pytest.mark.asyncio async def test_list_targets_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListTargetsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListTargetsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_targets(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListTargetsRequest() assert isinstance(response, pagers.ListTargetsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_targets_async_from_dict(): await test_list_targets_async(request_type=dict) def test_list_targets_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListTargetsRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = cloud_deploy.ListTargetsResponse() client.list_targets(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_targets_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListTargetsRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListTargetsResponse() ) await client.list_targets(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_targets_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = cloud_deploy.ListTargetsResponse() client.list_targets(parent="parent_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_targets_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.list_targets( cloud_deploy.ListTargetsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_targets_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.return_value = cloud_deploy.ListTargetsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListTargetsResponse() ) response = await client.list_targets(parent="parent_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_targets_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.list_targets( cloud_deploy.ListTargetsRequest(), parent="parent_value", ) def test_list_targets_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_targets(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.Target) for i in results) def test_list_targets_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object(type(client.transport.list_targets), "__call__") as call: call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) pages = list(client.list_targets(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_targets_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_targets), "__call__", new_callable=mock.AsyncMock ) as call: call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) async_pager = await client.list_targets(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.Target) for i in responses) @pytest.mark.asyncio async def test_list_targets_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_targets), "__call__", new_callable=mock.AsyncMock ) as call: call.side_effect = ( cloud_deploy.ListTargetsResponse( targets=[ cloud_deploy.Target(), cloud_deploy.Target(), cloud_deploy.Target(), ], next_page_token="abc", ), cloud_deploy.ListTargetsResponse(targets=[], next_page_token="def",), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(),], next_page_token="ghi", ), cloud_deploy.ListTargetsResponse( targets=[cloud_deploy.Target(), cloud_deploy.Target(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_targets(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [cloud_deploy.GetTargetRequest, dict,]) def test_get_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = cloud_deploy.Target( name="name_value", target_id="target_id_value", uid="uid_value", description="description_value", require_approval=True, etag="etag_value", gke=cloud_deploy.GkeCluster(cluster="cluster_value"), ) response = client.get_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetTargetRequest() assert isinstance(response, cloud_deploy.Target) assert response.name == "name_value" assert response.target_id == "target_id_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.require_approval is True assert response.etag == "etag_value" def test_get_target_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.get_target), "__call__") as call: client.get_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetTargetRequest() @pytest.mark.asyncio async def test_get_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Target( name="name_value", target_id="target_id_value", uid="uid_value", description="description_value", require_approval=True, etag="etag_value", ) ) response = await client.get_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetTargetRequest() assert isinstance(response, cloud_deploy.Target) assert response.name == "name_value" assert response.target_id == "target_id_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.require_approval is True assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_target_async_from_dict(): await test_get_target_async(request_type=dict) def test_get_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetTargetRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = cloud_deploy.Target() client.get_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetTargetRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Target()) await client.get_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = cloud_deploy.Target() client.get_target(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.get_target( cloud_deploy.GetTargetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_target), "__call__") as call: call.return_value = cloud_deploy.Target() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Target()) response = await client.get_target(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.get_target( cloud_deploy.GetTargetRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.CreateTargetRequest, dict,]) def test_create_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateTargetRequest() assert isinstance(response, future.Future) def test_create_target_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.create_target), "__call__") as call: client.create_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateTargetRequest() @pytest.mark.asyncio async def test_create_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateTargetRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_target_async_from_dict(): await test_create_target_async(request_type=dict) def test_create_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateTargetRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateTargetRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_target( parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].target_id mock_val = "target_id_value" assert arg == mock_val def test_create_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.create_target( cloud_deploy.CreateTargetRequest(), parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) @pytest.mark.asyncio async def test_create_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.create_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_target( parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].target_id mock_val = "target_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.create_target( cloud_deploy.CreateTargetRequest(), parent="parent_value", target=cloud_deploy.Target(name="name_value"), target_id="target_id_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.UpdateTargetRequest, dict,]) def test_update_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.update_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateTargetRequest() assert isinstance(response, future.Future) def test_update_target_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.update_target), "__call__") as call: client.update_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateTargetRequest() @pytest.mark.asyncio async def test_update_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.UpdateTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.update_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.UpdateTargetRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_update_target_async_from_dict(): await test_update_target_async(request_type=dict) def test_update_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.UpdateTargetRequest() request.target.name = "target.name/value" with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "target.name=target.name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_update_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.UpdateTargetRequest() request.target.name = "target.name/value" with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.update_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "target.name=target.name/value",) in kw["metadata"] def test_update_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.update_target( target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val def test_update_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.update_target( cloud_deploy.UpdateTargetRequest(), target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.update_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.update_target( target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].target mock_val = cloud_deploy.Target(name="name_value") assert arg == mock_val arg = args[0].update_mask mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) assert arg == mock_val @pytest.mark.asyncio async def test_update_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.update_target( cloud_deploy.UpdateTargetRequest(), target=cloud_deploy.Target(name="name_value"), update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.parametrize("request_type", [cloud_deploy.DeleteTargetRequest, dict,]) def test_delete_target(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.delete_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteTargetRequest() assert isinstance(response, future.Future) def test_delete_target_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.delete_target), "__call__") as call: client.delete_target() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteTargetRequest() @pytest.mark.asyncio async def test_delete_target_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.DeleteTargetRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.delete_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.DeleteTargetRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_delete_target_async_from_dict(): await test_delete_target_async(request_type=dict) def test_delete_target_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.DeleteTargetRequest() request.name = "name/value" with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_target(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_delete_target_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.DeleteTargetRequest() request.name = "name/value" with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.delete_target(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_delete_target_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.delete_target(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_delete_target_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.delete_target( cloud_deploy.DeleteTargetRequest(), name="name_value", ) @pytest.mark.asyncio async def test_delete_target_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.delete_target), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.delete_target(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_delete_target_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.delete_target( cloud_deploy.DeleteTargetRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ListReleasesRequest, dict,]) def test_list_releases(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = cloud_deploy.ListReleasesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_releases(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListReleasesRequest() assert isinstance(response, pagers.ListReleasesPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_releases_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.list_releases), "__call__") as call: client.list_releases() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListReleasesRequest() @pytest.mark.asyncio async def test_list_releases_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListReleasesRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListReleasesResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_releases(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListReleasesRequest() assert isinstance(response, pagers.ListReleasesAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_releases_async_from_dict(): await test_list_releases_async(request_type=dict) def test_list_releases_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListReleasesRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = cloud_deploy.ListReleasesResponse() client.list_releases(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_releases_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListReleasesRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListReleasesResponse() ) await client.list_releases(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_releases_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = cloud_deploy.ListReleasesResponse() client.list_releases(parent="parent_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_releases_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.list_releases( cloud_deploy.ListReleasesRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_releases_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.return_value = cloud_deploy.ListReleasesResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListReleasesResponse() ) response = await client.list_releases(parent="parent_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_releases_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.list_releases( cloud_deploy.ListReleasesRequest(), parent="parent_value", ) def test_list_releases_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_releases(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.Release) for i in results) def test_list_releases_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object(type(client.transport.list_releases), "__call__") as call: call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) pages = list(client.list_releases(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_releases_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_releases), "__call__", new_callable=mock.AsyncMock ) as call: call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) async_pager = await client.list_releases(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.Release) for i in responses) @pytest.mark.asyncio async def test_list_releases_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_releases), "__call__", new_callable=mock.AsyncMock ) as call: call.side_effect = ( cloud_deploy.ListReleasesResponse( releases=[ cloud_deploy.Release(), cloud_deploy.Release(), cloud_deploy.Release(), ], next_page_token="abc", ), cloud_deploy.ListReleasesResponse(releases=[], next_page_token="def",), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(),], next_page_token="ghi", ), cloud_deploy.ListReleasesResponse( releases=[cloud_deploy.Release(), cloud_deploy.Release(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_releases(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [cloud_deploy.GetReleaseRequest, dict,]) def test_get_release(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = cloud_deploy.Release( name="name_value", uid="uid_value", description="description_value", skaffold_config_uri="skaffold_config_uri_value", skaffold_config_path="skaffold_config_path_value", render_state=cloud_deploy.Release.RenderState.SUCCEEDED, etag="etag_value", skaffold_version="skaffold_version_value", ) response = client.get_release(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetReleaseRequest() assert isinstance(response, cloud_deploy.Release) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.skaffold_config_uri == "skaffold_config_uri_value" assert response.skaffold_config_path == "skaffold_config_path_value" assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED assert response.etag == "etag_value" assert response.skaffold_version == "skaffold_version_value" def test_get_release_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.get_release), "__call__") as call: client.get_release() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetReleaseRequest() @pytest.mark.asyncio async def test_get_release_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetReleaseRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Release( name="name_value", uid="uid_value", description="description_value", skaffold_config_uri="skaffold_config_uri_value", skaffold_config_path="skaffold_config_path_value", render_state=cloud_deploy.Release.RenderState.SUCCEEDED, etag="etag_value", skaffold_version="skaffold_version_value", ) ) response = await client.get_release(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetReleaseRequest() assert isinstance(response, cloud_deploy.Release) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.skaffold_config_uri == "skaffold_config_uri_value" assert response.skaffold_config_path == "skaffold_config_path_value" assert response.render_state == cloud_deploy.Release.RenderState.SUCCEEDED assert response.etag == "etag_value" assert response.skaffold_version == "skaffold_version_value" @pytest.mark.asyncio async def test_get_release_async_from_dict(): await test_get_release_async(request_type=dict) def test_get_release_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetReleaseRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = cloud_deploy.Release() client.get_release(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_release_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetReleaseRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Release() ) await client.get_release(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_release_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = cloud_deploy.Release() client.get_release(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_release_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.get_release( cloud_deploy.GetReleaseRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_release_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_release), "__call__") as call: call.return_value = cloud_deploy.Release() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Release() ) response = await client.get_release(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_release_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.get_release( cloud_deploy.GetReleaseRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.CreateReleaseRequest, dict,]) def test_create_release(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_release(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateReleaseRequest() assert isinstance(response, future.Future) def test_create_release_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.create_release), "__call__") as call: client.create_release() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateReleaseRequest() @pytest.mark.asyncio async def test_create_release_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateReleaseRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_release(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateReleaseRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_release_async_from_dict(): await test_create_release_async(request_type=dict) def test_create_release_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateReleaseRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_release(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_release_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateReleaseRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_release(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_release_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_release( parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].release mock_val = cloud_deploy.Release(name="name_value") assert arg == mock_val arg = args[0].release_id mock_val = "release_id_value" assert arg == mock_val def test_create_release_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.create_release( cloud_deploy.CreateReleaseRequest(), parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) @pytest.mark.asyncio async def test_create_release_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.create_release), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_release( parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].release mock_val = cloud_deploy.Release(name="name_value") assert arg == mock_val arg = args[0].release_id mock_val = "release_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_release_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.create_release( cloud_deploy.CreateReleaseRequest(), parent="parent_value", release=cloud_deploy.Release(name="name_value"), release_id="release_id_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ApproveRolloutRequest, dict,]) def test_approve_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = cloud_deploy.ApproveRolloutResponse() response = client.approve_rollout(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ApproveRolloutRequest() assert isinstance(response, cloud_deploy.ApproveRolloutResponse) def test_approve_rollout_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: client.approve_rollout() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ApproveRolloutRequest() @pytest.mark.asyncio async def test_approve_rollout_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ApproveRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ApproveRolloutResponse() ) response = await client.approve_rollout(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ApproveRolloutRequest() assert isinstance(response, cloud_deploy.ApproveRolloutResponse) @pytest.mark.asyncio async def test_approve_rollout_async_from_dict(): await test_approve_rollout_async(request_type=dict) def test_approve_rollout_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ApproveRolloutRequest() request.name = "name/value" with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = cloud_deploy.ApproveRolloutResponse() client.approve_rollout(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_approve_rollout_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ApproveRolloutRequest() request.name = "name/value" with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ApproveRolloutResponse() ) await client.approve_rollout(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_approve_rollout_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = cloud_deploy.ApproveRolloutResponse() client.approve_rollout(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_approve_rollout_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.approve_rollout( cloud_deploy.ApproveRolloutRequest(), name="name_value", ) @pytest.mark.asyncio async def test_approve_rollout_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.approve_rollout), "__call__") as call: call.return_value = cloud_deploy.ApproveRolloutResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ApproveRolloutResponse() ) response = await client.approve_rollout(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_approve_rollout_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.approve_rollout( cloud_deploy.ApproveRolloutRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.ListRolloutsRequest, dict,]) def test_list_rollouts(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) response = client.list_rollouts(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListRolloutsRequest() assert isinstance(response, pagers.ListRolloutsPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] def test_list_rollouts_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: client.list_rollouts() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListRolloutsRequest() @pytest.mark.asyncio async def test_list_rollouts_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.ListRolloutsRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListRolloutsResponse( next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) ) response = await client.list_rollouts(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.ListRolloutsRequest() assert isinstance(response, pagers.ListRolloutsAsyncPager) assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @pytest.mark.asyncio async def test_list_rollouts_async_from_dict(): await test_list_rollouts_async(request_type=dict) def test_list_rollouts_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListRolloutsRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = cloud_deploy.ListRolloutsResponse() client.list_rollouts(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_list_rollouts_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.ListRolloutsRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListRolloutsResponse() ) await client.list_rollouts(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_list_rollouts_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = cloud_deploy.ListRolloutsResponse() client.list_rollouts(parent="parent_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val def test_list_rollouts_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.list_rollouts( cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) @pytest.mark.asyncio async def test_list_rollouts_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.return_value = cloud_deploy.ListRolloutsResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.ListRolloutsResponse() ) response = await client.list_rollouts(parent="parent_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val @pytest.mark.asyncio async def test_list_rollouts_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.list_rollouts( cloud_deploy.ListRolloutsRequest(), parent="parent_value", ) def test_list_rollouts_pager(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) metadata = () metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_rollouts(request={}) assert pager._metadata == metadata results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, cloud_deploy.Rollout) for i in results) def test_list_rollouts_pages(transport_name: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials, transport=transport_name, ) with mock.patch.object(type(client.transport.list_rollouts), "__call__") as call: call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) pages = list(client.list_rollouts(request={}).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_list_rollouts_async_pager(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) async_pager = await client.list_rollouts(request={},) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: responses.append(response) assert len(responses) == 6 assert all(isinstance(i, cloud_deploy.Rollout) for i in responses) @pytest.mark.asyncio async def test_list_rollouts_async_pages(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials,) with mock.patch.object( type(client.transport.list_rollouts), "__call__", new_callable=mock.AsyncMock ) as call: call.side_effect = ( cloud_deploy.ListRolloutsResponse( rollouts=[ cloud_deploy.Rollout(), cloud_deploy.Rollout(), cloud_deploy.Rollout(), ], next_page_token="abc", ), cloud_deploy.ListRolloutsResponse(rollouts=[], next_page_token="def",), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(),], next_page_token="ghi", ), cloud_deploy.ListRolloutsResponse( rollouts=[cloud_deploy.Rollout(), cloud_deploy.Rollout(),], ), RuntimeError, ) pages = [] async for page_ in (await client.list_rollouts(request={})).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [cloud_deploy.GetRolloutRequest, dict,]) def test_get_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = cloud_deploy.Rollout( name="name_value", uid="uid_value", description="description_value", target_id="target_id_value", approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, state=cloud_deploy.Rollout.State.SUCCEEDED, failure_reason="failure_reason_value", deploying_build="deploying_build_value", etag="etag_value", ) response = client.get_rollout(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetRolloutRequest() assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.target_id == "target_id_value" assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL assert response.state == cloud_deploy.Rollout.State.SUCCEEDED assert response.failure_reason == "failure_reason_value" assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" def test_get_rollout_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: client.get_rollout() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetRolloutRequest() @pytest.mark.asyncio async def test_get_rollout_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Rollout( name="name_value", uid="uid_value", description="description_value", target_id="target_id_value", approval_state=cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL, state=cloud_deploy.Rollout.State.SUCCEEDED, failure_reason="failure_reason_value", deploying_build="deploying_build_value", etag="etag_value", ) ) response = await client.get_rollout(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetRolloutRequest() assert isinstance(response, cloud_deploy.Rollout) assert response.name == "name_value" assert response.uid == "uid_value" assert response.description == "description_value" assert response.target_id == "target_id_value" assert response.approval_state == cloud_deploy.Rollout.ApprovalState.NEEDS_APPROVAL assert response.state == cloud_deploy.Rollout.State.SUCCEEDED assert response.failure_reason == "failure_reason_value" assert response.deploying_build == "deploying_build_value" assert response.etag == "etag_value" @pytest.mark.asyncio async def test_get_rollout_async_from_dict(): await test_get_rollout_async(request_type=dict) def test_get_rollout_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetRolloutRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = cloud_deploy.Rollout() client.get_rollout(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_rollout_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetRolloutRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Rollout() ) await client.get_rollout(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_rollout_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = cloud_deploy.Rollout() client.get_rollout(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_rollout_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.get_rollout( cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_rollout_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_rollout), "__call__") as call: call.return_value = cloud_deploy.Rollout() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Rollout() ) response = await client.get_rollout(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_rollout_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.get_rollout( cloud_deploy.GetRolloutRequest(), name="name_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.CreateRolloutRequest, dict,]) def test_create_rollout(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/spam") response = client.create_rollout(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateRolloutRequest() assert isinstance(response, future.Future) def test_create_rollout_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: client.create_rollout() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateRolloutRequest() @pytest.mark.asyncio async def test_create_rollout_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.CreateRolloutRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_rollout(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.CreateRolloutRequest() assert isinstance(response, future.Future) @pytest.mark.asyncio async def test_create_rollout_async_from_dict(): await test_create_rollout_async(request_type=dict) def test_create_rollout_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateRolloutRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_rollout(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio async def test_create_rollout_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.CreateRolloutRequest() request.parent = "parent/value" with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/op") ) await client.create_rollout(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] def test_create_rollout_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") client.create_rollout( parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].rollout mock_val = cloud_deploy.Rollout(name="name_value") assert arg == mock_val arg = args[0].rollout_id mock_val = "rollout_id_value" assert arg == mock_val def test_create_rollout_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.create_rollout( cloud_deploy.CreateRolloutRequest(), parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) @pytest.mark.asyncio async def test_create_rollout_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.create_rollout), "__call__") as call: call.return_value = operations_pb2.Operation(name="operations/op") call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") ) response = await client.create_rollout( parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].parent mock_val = "parent_value" assert arg == mock_val arg = args[0].rollout mock_val = cloud_deploy.Rollout(name="name_value") assert arg == mock_val arg = args[0].rollout_id mock_val = "rollout_id_value" assert arg == mock_val @pytest.mark.asyncio async def test_create_rollout_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.create_rollout( cloud_deploy.CreateRolloutRequest(), parent="parent_value", rollout=cloud_deploy.Rollout(name="name_value"), rollout_id="rollout_id_value", ) @pytest.mark.parametrize("request_type", [cloud_deploy.GetConfigRequest, dict,]) def test_get_config(request_type, transport: str = "grpc"): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = cloud_deploy.Config( name="name_value", default_skaffold_version="default_skaffold_version_value", ) response = client.get_config(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetConfigRequest() assert isinstance(response, cloud_deploy.Config) assert response.name == "name_value" assert response.default_skaffold_version == "default_skaffold_version_value" def test_get_config_empty_call(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) with mock.patch.object(type(client.transport.get_config), "__call__") as call: client.get_config() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetConfigRequest() @pytest.mark.asyncio async def test_get_config_async( transport: str = "grpc_asyncio", request_type=cloud_deploy.GetConfigRequest ): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) request = request_type() with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloud_deploy.Config( name="name_value", default_skaffold_version="default_skaffold_version_value", ) ) response = await client.get_config(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == cloud_deploy.GetConfigRequest() assert isinstance(response, cloud_deploy.Config) assert response.name == "name_value" assert response.default_skaffold_version == "default_skaffold_version_value" @pytest.mark.asyncio async def test_get_config_async_from_dict(): await test_get_config_async(request_type=dict) def test_get_config_field_headers(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetConfigRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = cloud_deploy.Config() client.get_config(request) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] @pytest.mark.asyncio async def test_get_config_field_headers_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) request = cloud_deploy.GetConfigRequest() request.name = "name/value" with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) await client.get_config(request) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] assert args[0] == request _, _, kw = call.mock_calls[0] assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] def test_get_config_flattened(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = cloud_deploy.Config() client.get_config(name="name_value",) assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val def test_get_config_flattened_error(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): client.get_config( cloud_deploy.GetConfigRequest(), name="name_value", ) @pytest.mark.asyncio async def test_get_config_flattened_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with mock.patch.object(type(client.transport.get_config), "__call__") as call: call.return_value = cloud_deploy.Config() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_deploy.Config()) response = await client.get_config(name="name_value",) assert len(call.mock_calls) _, args, _ = call.mock_calls[0] arg = args[0].name mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio async def test_get_config_flattened_error_async(): client = CloudDeployAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) with pytest.raises(ValueError): await client.get_config( cloud_deploy.GetConfigRequest(), name="name_value", ) def test_credentials_transport_error(): transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudDeployClient( client_options={"credentials_file": "credentials.json"}, transport=transport, ) transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = CloudDeployClient(client_options=options, transport=transport,) options = mock.Mock() options.api_key = "api_key" with pytest.raises(ValueError): client = CloudDeployClient( client_options=options, credentials=ga_credentials.AnonymousCredentials() ) transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudDeployClient( client_options={"scopes": ["1", "2"]}, transport=transport, ) def test_transport_instance(): transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) client = CloudDeployClient(transport=transport) assert client.transport is transport def test_transport_get_channel(): transport = transports.CloudDeployGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudDeployGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport,], ) def test_transport_adc(transport_class): with mock.patch.object(google.auth, "default") as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): client = CloudDeployClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.CloudDeployGrpcTransport,) def test_cloud_deploy_base_transport_error(): with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudDeployTransport( credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) def test_cloud_deploy_base_transport(): with mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport.__init__" ) as Transport: Transport.return_value = None transport = transports.CloudDeployTransport( credentials=ga_credentials.AnonymousCredentials(), ) methods = ( "list_delivery_pipelines", "get_delivery_pipeline", "create_delivery_pipeline", "update_delivery_pipeline", "delete_delivery_pipeline", "list_targets", "get_target", "create_target", "update_target", "delete_target", "list_releases", "get_release", "create_release", "approve_rollout", "list_rollouts", "get_rollout", "create_rollout", "get_config", ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) with pytest.raises(NotImplementedError): transport.close() with pytest.raises(NotImplementedError): transport.operations_client def test_cloud_deploy_base_transport_with_credentials_file(): with mock.patch.object( google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudDeployTransport( credentials_file="credentials.json", quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) def test_cloud_deploy_base_transport_with_adc(): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.deploy_v1.services.cloud_deploy.transports.CloudDeployTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudDeployTransport() adc.assert_called_once() def test_cloud_deploy_auth_adc(): with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudDeployClient() adc.assert_called_once_with( scopes=None, default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id=None, ) @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport,], ) def test_cloud_deploy_transport_auth_adc(transport_class): with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], default_scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) @pytest.mark.parametrize( "transport_class,grpc_helpers", [ (transports.CloudDeployGrpcTransport, grpc_helpers), (transports.CloudDeployGrpcAsyncIOTransport, grpc_helpers_async), ], ) def test_cloud_deploy_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object( google.auth, "default", autospec=True ) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( "clouddeploy.googleapis.com:443", credentials=creds, credentials_file=None, quota_project_id="octopus", default_scopes=("https://www.googleapis.com/auth/cloud-platform",), scopes=["1", "2"], default_host="clouddeploy.googleapis.com", ssl_credentials=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport], ) def test_cloud_deploy_grpc_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() with mock.patch.object(transport_class, "create_channel") as mock_create_channel: mock_ssl_channel_creds = mock.Mock() transport_class( host="squid.clam.whelk", credentials=cred, ssl_channel_credentials=mock_ssl_channel_creds, ) mock_create_channel.assert_called_once_with( "squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: transport_class( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback, ) expected_cert, expected_key = client_cert_source_callback() mock_ssl_cred.assert_called_once_with( certificate_chain=expected_cert, private_key=expected_key ) def test_cloud_deploy_host_no_port(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="clouddeploy.googleapis.com" ), ) assert client.transport._host == "clouddeploy.googleapis.com:443" def test_cloud_deploy_host_with_port(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="clouddeploy.googleapis.com:8000" ), ) assert client.transport._host == "clouddeploy.googleapis.com:8000" def test_cloud_deploy_grpc_transport_channel(): channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) transport = transports.CloudDeployGrpcTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None def test_cloud_deploy_grpc_asyncio_transport_channel(): channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) transport = transports.CloudDeployGrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport], ) def test_cloud_deploy_transport_channel_mtls_with_client_cert_source(transport_class): with mock.patch( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=client_cert_source_callback, ) adc.assert_called_once() grpc_ssl_channel_cred.assert_called_once_with( certificate_chain=b"cert bytes", private_key=b"key bytes" ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( "transport_class", [transports.CloudDeployGrpcTransport, transports.CloudDeployGrpcAsyncIOTransport], ) def test_cloud_deploy_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() with pytest.warns(DeprecationWarning): transport = transport_class( host="squid.clam.whelk", credentials=mock_cred, api_mtls_endpoint="mtls.squid.clam.whelk", client_cert_source=None, ) grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel def test_cloud_deploy_grpc_lro_client(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) transport = client.transport assert isinstance(transport.operations_client, operations_v1.OperationsClient,) assert transport.operations_client is transport.operations_client def test_cloud_deploy_grpc_lro_async_client(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) transport = client.transport assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) assert transport.operations_client is transport.operations_client def test_build_path(): project = "squid" location = "clam" build = "whelk" expected = "projects/{project}/locations/{location}/builds/{build}".format( project=project, location=location, build=build, ) actual = CloudDeployClient.build_path(project, location, build) assert expected == actual def test_parse_build_path(): expected = { "project": "octopus", "location": "oyster", "build": "nudibranch", } path = CloudDeployClient.build_path(**expected) actual = CloudDeployClient.parse_build_path(path) assert expected == actual def test_cluster_path(): project = "cuttlefish" location = "mussel" cluster = "winkle" expected = "projects/{project}/locations/{location}/clusters/{cluster}".format( project=project, location=location, cluster=cluster, ) actual = CloudDeployClient.cluster_path(project, location, cluster) assert expected == actual def test_parse_cluster_path(): expected = { "project": "nautilus", "location": "scallop", "cluster": "abalone", } path = CloudDeployClient.cluster_path(**expected) actual = CloudDeployClient.parse_cluster_path(path) assert expected == actual def test_config_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}/config".format( project=project, location=location, ) actual = CloudDeployClient.config_path(project, location) assert expected == actual def test_parse_config_path(): expected = { "project": "whelk", "location": "octopus", } path = CloudDeployClient.config_path(**expected) actual = CloudDeployClient.parse_config_path(path) assert expected == actual def test_delivery_pipeline_path(): project = "oyster" location = "nudibranch" delivery_pipeline = "cuttlefish" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}".format( project=project, location=location, delivery_pipeline=delivery_pipeline, ) actual = CloudDeployClient.delivery_pipeline_path( project, location, delivery_pipeline ) assert expected == actual def test_parse_delivery_pipeline_path(): expected = { "project": "mussel", "location": "winkle", "delivery_pipeline": "nautilus", } path = CloudDeployClient.delivery_pipeline_path(**expected) actual = CloudDeployClient.parse_delivery_pipeline_path(path) assert expected == actual def test_release_path(): project = "scallop" location = "abalone" delivery_pipeline = "squid" release = "clam" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}".format( project=project, location=location, delivery_pipeline=delivery_pipeline, release=release, ) actual = CloudDeployClient.release_path( project, location, delivery_pipeline, release ) assert expected == actual def test_parse_release_path(): expected = { "project": "whelk", "location": "octopus", "delivery_pipeline": "oyster", "release": "nudibranch", } path = CloudDeployClient.release_path(**expected) actual = CloudDeployClient.parse_release_path(path) assert expected == actual def test_rollout_path(): project = "cuttlefish" location = "mussel" delivery_pipeline = "winkle" release = "nautilus" rollout = "scallop" expected = "projects/{project}/locations/{location}/deliveryPipelines/{delivery_pipeline}/releases/{release}/rollouts/{rollout}".format( project=project, location=location, delivery_pipeline=delivery_pipeline, release=release, rollout=rollout, ) actual = CloudDeployClient.rollout_path( project, location, delivery_pipeline, release, rollout ) assert expected == actual def test_parse_rollout_path(): expected = { "project": "abalone", "location": "squid", "delivery_pipeline": "clam", "release": "whelk", "rollout": "octopus", } path = CloudDeployClient.rollout_path(**expected) actual = CloudDeployClient.parse_rollout_path(path) assert expected == actual def test_target_path(): project = "oyster" location = "nudibranch" target = "cuttlefish" expected = "projects/{project}/locations/{location}/targets/{target}".format( project=project, location=location, target=target, ) actual = CloudDeployClient.target_path(project, location, target) assert expected == actual def test_parse_target_path(): expected = { "project": "mussel", "location": "winkle", "target": "nautilus", } path = CloudDeployClient.target_path(**expected) actual = CloudDeployClient.parse_target_path(path) assert expected == actual def test_worker_pool_path(): project = "scallop" location = "abalone" worker_pool = "squid" expected = "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, location=location, worker_pool=worker_pool, ) actual = CloudDeployClient.worker_pool_path(project, location, worker_pool) assert expected == actual def test_parse_worker_pool_path(): expected = { "project": "clam", "location": "whelk", "worker_pool": "octopus", } path = CloudDeployClient.worker_pool_path(**expected) actual = CloudDeployClient.parse_worker_pool_path(path) assert expected == actual def test_common_billing_account_path(): billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) actual = CloudDeployClient.common_billing_account_path(billing_account) assert expected == actual def test_parse_common_billing_account_path(): expected = { "billing_account": "nudibranch", } path = CloudDeployClient.common_billing_account_path(**expected) actual = CloudDeployClient.parse_common_billing_account_path(path) assert expected == actual def test_common_folder_path(): folder = "cuttlefish" expected = "folders/{folder}".format(folder=folder,) actual = CloudDeployClient.common_folder_path(folder) assert expected == actual def test_parse_common_folder_path(): expected = { "folder": "mussel", } path = CloudDeployClient.common_folder_path(**expected) actual = CloudDeployClient.parse_common_folder_path(path) assert expected == actual def test_common_organization_path(): organization = "winkle" expected = "organizations/{organization}".format(organization=organization,) actual = CloudDeployClient.common_organization_path(organization) assert expected == actual def test_parse_common_organization_path(): expected = { "organization": "nautilus", } path = CloudDeployClient.common_organization_path(**expected) actual = CloudDeployClient.parse_common_organization_path(path) assert expected == actual def test_common_project_path(): project = "scallop" expected = "projects/{project}".format(project=project,) actual = CloudDeployClient.common_project_path(project) assert expected == actual def test_parse_common_project_path(): expected = { "project": "abalone", } path = CloudDeployClient.common_project_path(**expected) actual = CloudDeployClient.parse_common_project_path(path) assert expected == actual def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) actual = CloudDeployClient.common_location_path(project, location) assert expected == actual def test_parse_common_location_path(): expected = { "project": "whelk", "location": "octopus", } path = CloudDeployClient.common_location_path(**expected) actual = CloudDeployClient.parse_common_location_path(path) assert expected == actual def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( transports.CloudDeployTransport, "_prep_wrapped_messages" ) as prep: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) with mock.patch.object( transports.CloudDeployTransport, "_prep_wrapped_messages" ) as prep: transport_class = CloudDeployClient.get_transport_class() transport = transport_class( credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @pytest.mark.asyncio async def test_transport_close_async(): client = CloudDeployAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() def test_transport_close(): transports = { "grpc": "_grpc_channel", } for transport, close_name in transports.items(): client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object( type(getattr(client.transport, close_name)), "close" ) as close: with client: close.assert_not_called() close.assert_called_once() def test_client_ctx(): transports = [ "grpc", ] for transport in transports: client = CloudDeployClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(client.transport), "close") as close: close.assert_not_called() with client: pass close.assert_called() @pytest.mark.parametrize( "client_class,transport_class", [ (CloudDeployClient, transports.CloudDeployGrpcTransport), (CloudDeployAsyncClient, transports.CloudDeployGrpcAsyncIOTransport), ], ) def test_api_key_credentials(client_class, transport_class): with mock.patch.object( google.auth._default, "get_api_key_credentials", create=True ) as get_api_key_credentials: mock_cred = mock.Mock() get_api_key_credentials.return_value = mock_cred options = client_options.ClientOptions() options.api_key = "api_key" with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, )
true
true
f70032a4a068d348409de058a25c94f8ceef13fd
22,657
py
Python
scripts/printingValidation/venv/lib/python3.9/site-packages/skimage/morphology/tests/test_max_tree.py
Air-Factories-2-0/af2-hyperledger
7aeeb831cf03fdf7fe64f9500da17c02688a0886
[ "Apache-2.0" ]
5
2022-01-05T00:41:46.000Z
2022-03-21T07:22:58.000Z
scripts/printingValidation/venv/lib/python3.9/site-packages/skimage/morphology/tests/test_max_tree.py
Air-Factories-2-0/af2-hyperledger
7aeeb831cf03fdf7fe64f9500da17c02688a0886
[ "Apache-2.0" ]
null
null
null
scripts/printingValidation/venv/lib/python3.9/site-packages/skimage/morphology/tests/test_max_tree.py
Air-Factories-2-0/af2-hyperledger
7aeeb831cf03fdf7fe64f9500da17c02688a0886
[ "Apache-2.0" ]
2
2022-03-20T17:35:44.000Z
2022-03-21T18:30:31.000Z
import numpy as np from skimage.morphology import max_tree, area_closing, area_opening from skimage.morphology import max_tree_local_maxima, diameter_opening from skimage.morphology import diameter_closing from skimage.util import invert from skimage._shared.testing import assert_array_equal, TestCase eps = 1e-12 def _full_type_test(img, param, expected, func, param_scale=False, **keywords): # images as they are out = func(img, param, **keywords) assert_array_equal(out, expected) # unsigned int for dt in [np.uint32, np.uint64]: img_cast = img.astype(dt) out = func(img_cast, param, **keywords) exp_cast = expected.astype(dt) assert_array_equal(out, exp_cast) # float data_float = img.astype(np.float64) data_float = data_float / 255.0 expected_float = expected.astype(np.float64) expected_float = expected_float / 255.0 if param_scale: param_cast = param / 255.0 else: param_cast = param for dt in [np.float32, np.float64]: data_cast = data_float.astype(dt) out = func(data_cast, param_cast, **keywords) exp_cast = expected_float.astype(dt) error_img = 255.0 * exp_cast - 255.0 * out error = (error_img >= 1.0).sum() assert error < eps # signed images img_signed = img.astype(np.int16) img_signed = img_signed - 128 exp_signed = expected.astype(np.int16) exp_signed = exp_signed - 128 for dt in [np.int8, np.int16, np.int32, np.int64]: img_s = img_signed.astype(dt) out = func(img_s, param, **keywords) exp_s = exp_signed.astype(dt) assert_array_equal(out, exp_s) class TestMaxtree(TestCase): def test_max_tree(self): "Test for max tree" img_type = np.uint8 img = np.array([[10, 8, 8, 9], [7, 7, 9, 9], [8, 7, 10, 10], [9, 9, 10, 10]], dtype=img_type) P_exp = np.array([[1, 4, 1, 1], [4, 4, 3, 3], [1, 4, 3, 10], [3, 3, 10, 10]], dtype=np.int64) S_exp = np.array([4, 5, 9, 1, 2, 8, 3, 6, 7, 12, 13, 0, 10, 11, 14, 15], dtype=np.int64) for img_type in [np.uint8, np.uint16, np.uint32, np.uint64]: img = img.astype(img_type) P, S = max_tree(img, connectivity=2) assert_array_equal(P, P_exp) assert_array_equal(S, S_exp) for img_type in [np.int8, np.int16, np.int32, np.int64]: img = img.astype(img_type) img_shifted = img - 9 P, S = max_tree(img_shifted, connectivity=2) assert_array_equal(P, P_exp) assert_array_equal(S, S_exp) img_float = img.astype(float) img_float = (img_float - 8) / 2.0 for img_type in [np.float32, np.float64]: img_float = img_float.astype(img_type) P, S = max_tree(img_float, connectivity=2) assert_array_equal(P, P_exp) assert_array_equal(S, S_exp) return def test_area_closing(self): "Test for Area Closing (2 thresholds, all types)" # original image img = np.array( [[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 200, 240, 200, 200, 240, 240, 200, 240], [240, 200, 40, 240, 240, 240, 240, 240, 240, 240, 40, 240], [240, 240, 240, 240, 100, 240, 100, 100, 240, 240, 200, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255], [200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 40], [200, 200, 200, 100, 200, 200, 200, 240, 255, 255, 255, 255], [200, 200, 200, 100, 200, 200, 200, 240, 200, 200, 255, 255], [200, 200, 200, 200, 200, 40, 200, 240, 240, 100, 255, 255], [200, 40, 255, 255, 255, 40, 200, 255, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]], dtype=np.uint8) # expected area closing with area 2 expected_2 = np.array( [[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 240, 240, 200, 200, 240, 240, 200, 240], [240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 200, 240], [240, 240, 240, 240, 240, 240, 100, 100, 240, 240, 200, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255], [200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 255], [200, 200, 200, 100, 200, 200, 200, 240, 255, 255, 255, 255], [200, 200, 200, 100, 200, 200, 200, 240, 200, 200, 255, 255], [200, 200, 200, 200, 200, 40, 200, 240, 240, 200, 255, 255], [200, 200, 255, 255, 255, 40, 200, 255, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]], dtype=np.uint8) # expected diameter closing with diameter 4 expected_4 = np.array( [[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [200, 200, 200, 200, 200, 200, 200, 240, 240, 240, 255, 255], [200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 255], [200, 200, 200, 200, 200, 200, 200, 240, 255, 255, 255, 255], [200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 240, 240, 200, 255, 255], [200, 200, 255, 255, 255, 200, 200, 255, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]], dtype=np.uint8) # _full_type_test makes a test with many image types. _full_type_test(img, 2, expected_2, area_closing, connectivity=2) _full_type_test(img, 4, expected_4, area_closing, connectivity=2) P, S = max_tree(invert(img), connectivity=2) _full_type_test(img, 4, expected_4, area_closing, parent=P, tree_traverser=S) def test_area_opening(self): "Test for Area Opening (2 thresholds, all types)" # original image img = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 55, 15, 55, 55, 15, 15, 55, 15], [15, 55, 215, 15, 15, 15, 15, 15, 15, 15, 215, 15], [15, 15, 15, 15, 155, 15, 155, 155, 15, 15, 55, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0], [55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 215], [55, 55, 55, 155, 55, 55, 55, 15, 0, 0, 0, 0], [55, 55, 55, 155, 55, 55, 55, 15, 55, 55, 0, 0], [55, 55, 55, 55, 55, 215, 55, 15, 15, 155, 0, 0], [55, 215, 0, 0, 0, 215, 55, 0, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]], dtype=np.uint8) # expected area closing with area 2 expected_2 = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 15, 15, 55, 55, 15, 15, 55, 15], [15, 55, 55, 15, 15, 15, 15, 15, 15, 15, 55, 15], [15, 15, 15, 15, 15, 15, 155, 155, 15, 15, 55, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0], [55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 0], [55, 55, 55, 155, 55, 55, 55, 15, 0, 0, 0, 0], [55, 55, 55, 155, 55, 55, 55, 15, 55, 55, 0, 0], [55, 55, 55, 55, 55, 215, 55, 15, 15, 55, 0, 0], [55, 55, 0, 0, 0, 215, 55, 0, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]], dtype=np.uint8) # expected diameter closing with diameter 4 expected_4 = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [55, 55, 55, 55, 55, 55, 55, 15, 15, 15, 0, 0], [55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 0], [55, 55, 55, 55, 55, 55, 55, 15, 0, 0, 0, 0], [55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 15, 15, 55, 0, 0], [55, 55, 0, 0, 0, 55, 55, 0, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]], dtype=np.uint8) # _full_type_test makes a test with many image types. _full_type_test(img, 2, expected_2, area_opening, connectivity=2) _full_type_test(img, 4, expected_4, area_opening, connectivity=2) P, S = max_tree(img, connectivity=2) _full_type_test(img, 4, expected_4, area_opening, parent=P, tree_traverser=S) def test_diameter_closing(self): "Test for Diameter Opening (2 thresholds, all types)" img = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93], [93, 63, 63, 63, 63, 86, 86, 86, 87, 43, 43, 91], [92, 89, 88, 86, 85, 85, 84, 85, 85, 43, 43, 89], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [90, 88, 86, 84, 83, 83, 82, 83, 83, 84, 86, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [92, 89, 23, 23, 85, 85, 84, 85, 85, 3, 3, 89], [93, 91, 23, 23, 87, 86, 86, 86, 87, 88, 3, 91], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]], dtype=np.uint8) ex2 = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93], [93, 63, 63, 63, 63, 86, 86, 86, 87, 43, 43, 91], [92, 89, 88, 86, 85, 85, 84, 85, 85, 43, 43, 89], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [90, 88, 86, 84, 83, 83, 83, 83, 83, 84, 86, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [92, 89, 23, 23, 85, 85, 84, 85, 85, 3, 3, 89], [93, 91, 23, 23, 87, 86, 86, 86, 87, 88, 3, 91], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]], dtype=np.uint8) ex4 = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93], [93, 63, 63, 63, 63, 86, 86, 86, 87, 84, 84, 91], [92, 89, 88, 86, 85, 85, 84, 85, 85, 84, 84, 89], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [90, 88, 86, 84, 83, 83, 83, 83, 83, 84, 86, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [92, 89, 84, 84, 85, 85, 84, 85, 85, 84, 84, 89], [93, 91, 84, 84, 87, 86, 86, 86, 87, 88, 84, 91], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]], dtype=np.uint8) # _full_type_test makes a test with many image types. _full_type_test(img, 2, ex2, diameter_closing, connectivity=2) _full_type_test(img, 4, ex4, diameter_closing, connectivity=2) P, S = max_tree(invert(img), connectivity=2) _full_type_test(img, 4, ex4, diameter_opening, parent=P, tree_traverser=S) def test_diameter_opening(self): "Test for Diameter Opening (2 thresholds, all types)" img = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10], [9, 40, 40, 40, 40, 16, 16, 16, 16, 60, 60, 11], [11, 13, 15, 16, 17, 18, 18, 18, 17, 60, 60, 13], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 15, 16, 18, 19, 19, 20, 19, 19, 18, 16, 15], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [11, 13, 80, 80, 17, 18, 18, 18, 17, 100, 100, 13], [9, 11, 80, 80, 16, 16, 16, 16, 16, 15, 100, 11], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]]) ex2 = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10], [9, 40, 40, 40, 40, 16, 16, 16, 16, 60, 60, 11], [11, 13, 15, 16, 17, 18, 18, 18, 17, 60, 60, 13], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 15, 16, 18, 19, 19, 19, 19, 19, 18, 16, 15], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [11, 13, 80, 80, 17, 18, 18, 18, 17, 100, 100, 13], [9, 11, 80, 80, 16, 16, 16, 16, 16, 15, 100, 11], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]]) ex4 = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10], [9, 40, 40, 40, 40, 16, 16, 16, 16, 18, 18, 11], [11, 13, 15, 16, 17, 18, 18, 18, 17, 18, 18, 13], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 15, 16, 18, 19, 19, 19, 19, 19, 18, 16, 15], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [11, 13, 18, 18, 17, 18, 18, 18, 17, 18, 18, 13], [9, 11, 18, 18, 16, 16, 16, 16, 16, 15, 18, 11], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]]) # _full_type_test makes a test with many image types. _full_type_test(img, 2, ex2, diameter_opening, connectivity=2) _full_type_test(img, 4, ex4, diameter_opening, connectivity=2) P, S = max_tree(img, connectivity=2) _full_type_test(img, 4, ex4, diameter_opening, parent=P, tree_traverser=S) def test_local_maxima(self): "local maxima for various data types" data = np.array([[10, 11, 13, 14, 14, 15, 14, 14, 13, 11], [11, 13, 15, 16, 16, 16, 16, 16, 15, 13], [13, 15, 40, 40, 18, 18, 18, 60, 60, 15], [14, 16, 40, 40, 19, 19, 19, 60, 60, 16], [14, 16, 18, 19, 19, 19, 19, 19, 18, 16], [15, 16, 18, 19, 19, 20, 19, 19, 18, 16], [14, 16, 18, 19, 19, 19, 19, 19, 18, 16], [14, 16, 80, 80, 19, 19, 19, 100, 100, 16], [13, 15, 80, 80, 18, 18, 18, 100, 100, 15], [11, 13, 15, 16, 16, 16, 16, 16, 15, 13]], dtype=np.uint8) expected_result = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.uint64) for dtype in [np.uint8, np.uint64, np.int8, np.int64]: test_data = data.astype(dtype) out = max_tree_local_maxima(test_data, connectivity=1) out_bin = out > 0 assert_array_equal(expected_result, out_bin) assert out.dtype == expected_result.dtype assert np.max(out) == 5 P, S = max_tree(test_data) out = max_tree_local_maxima(test_data, parent=P, tree_traverser=S) assert_array_equal(expected_result, out_bin) assert out.dtype == expected_result.dtype assert np.max(out) == 5 def test_extrema_float(self): "specific tests for float type" data = np.array([[0.10, 0.11, 0.13, 0.14, 0.14, 0.15, 0.14, 0.14, 0.13, 0.11], [0.11, 0.13, 0.15, 0.16, 0.16, 0.16, 0.16, 0.16, 0.15, 0.13], [0.13, 0.15, 0.40, 0.40, 0.18, 0.18, 0.18, 0.60, 0.60, 0.15], [0.14, 0.16, 0.40, 0.40, 0.19, 0.19, 0.19, 0.60, 0.60, 0.16], [0.14, 0.16, 0.18, 0.19, 0.19, 0.19, 0.19, 0.19, 0.18, 0.16], [0.15, 0.182, 0.18, 0.19, 0.204, 0.20, 0.19, 0.19, 0.18, 0.16], [0.14, 0.16, 0.18, 0.19, 0.19, 0.19, 0.19, 0.19, 0.18, 0.16], [0.14, 0.16, 0.80, 0.80, 0.19, 0.19, 0.19, 4.0, 1.0, 0.16], [0.13, 0.15, 0.80, 0.80, 0.18, 0.18, 0.18, 1.0, 1.0, 0.15], [0.11, 0.13, 0.15, 0.16, 0.16, 0.16, 0.16, 0.16, 0.15, 0.13]], dtype=np.float32) expected_result = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.uint8) # test for local maxima out = max_tree_local_maxima(data, connectivity=1) out_bin = out > 0 assert_array_equal(expected_result, out_bin) assert np.max(out) == 6 def test_3d(self): """tests the detection of maxima in 3D.""" img = np.zeros((8, 8, 8), dtype=np.uint8) local_maxima = np.zeros((8, 8, 8), dtype=np.uint64) # first maximum: only one pixel img[1, 1:3, 1:3] = 100 img[2, 2, 2] = 200 img[3, 1:3, 1:3] = 100 local_maxima[2, 2, 2] = 1 # second maximum: three pixels in z-direction img[5:8, 1, 1] = 200 local_maxima[5:8, 1, 1] = 1 # third: two maxima in 0 and 3. img[0, 5:8, 5:8] = 200 img[1, 6, 6] = 100 img[2, 5:7, 5:7] = 200 img[0:3, 5:8, 5:8] += 50 local_maxima[0, 5:8, 5:8] = 1 local_maxima[2, 5:7, 5:7] = 1 # four : one maximum in the corner of the square img[6:8, 6:8, 6:8] = 200 img[7, 7, 7] = 255 local_maxima[7, 7, 7] = 1 out = max_tree_local_maxima(img) out_bin = out > 0 assert_array_equal(local_maxima, out_bin) assert np.max(out) == 5
49.905286
76
0.404378
import numpy as np from skimage.morphology import max_tree, area_closing, area_opening from skimage.morphology import max_tree_local_maxima, diameter_opening from skimage.morphology import diameter_closing from skimage.util import invert from skimage._shared.testing import assert_array_equal, TestCase eps = 1e-12 def _full_type_test(img, param, expected, func, param_scale=False, **keywords): out = func(img, param, **keywords) assert_array_equal(out, expected) for dt in [np.uint32, np.uint64]: img_cast = img.astype(dt) out = func(img_cast, param, **keywords) exp_cast = expected.astype(dt) assert_array_equal(out, exp_cast) data_float = img.astype(np.float64) data_float = data_float / 255.0 expected_float = expected.astype(np.float64) expected_float = expected_float / 255.0 if param_scale: param_cast = param / 255.0 else: param_cast = param for dt in [np.float32, np.float64]: data_cast = data_float.astype(dt) out = func(data_cast, param_cast, **keywords) exp_cast = expected_float.astype(dt) error_img = 255.0 * exp_cast - 255.0 * out error = (error_img >= 1.0).sum() assert error < eps img_signed = img.astype(np.int16) img_signed = img_signed - 128 exp_signed = expected.astype(np.int16) exp_signed = exp_signed - 128 for dt in [np.int8, np.int16, np.int32, np.int64]: img_s = img_signed.astype(dt) out = func(img_s, param, **keywords) exp_s = exp_signed.astype(dt) assert_array_equal(out, exp_s) class TestMaxtree(TestCase): def test_max_tree(self): img_type = np.uint8 img = np.array([[10, 8, 8, 9], [7, 7, 9, 9], [8, 7, 10, 10], [9, 9, 10, 10]], dtype=img_type) P_exp = np.array([[1, 4, 1, 1], [4, 4, 3, 3], [1, 4, 3, 10], [3, 3, 10, 10]], dtype=np.int64) S_exp = np.array([4, 5, 9, 1, 2, 8, 3, 6, 7, 12, 13, 0, 10, 11, 14, 15], dtype=np.int64) for img_type in [np.uint8, np.uint16, np.uint32, np.uint64]: img = img.astype(img_type) P, S = max_tree(img, connectivity=2) assert_array_equal(P, P_exp) assert_array_equal(S, S_exp) for img_type in [np.int8, np.int16, np.int32, np.int64]: img = img.astype(img_type) img_shifted = img - 9 P, S = max_tree(img_shifted, connectivity=2) assert_array_equal(P, P_exp) assert_array_equal(S, S_exp) img_float = img.astype(float) img_float = (img_float - 8) / 2.0 for img_type in [np.float32, np.float64]: img_float = img_float.astype(img_type) P, S = max_tree(img_float, connectivity=2) assert_array_equal(P, P_exp) assert_array_equal(S, S_exp) return def test_area_closing(self): img = np.array( [[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 200, 240, 200, 200, 240, 240, 200, 240], [240, 200, 40, 240, 240, 240, 240, 240, 240, 240, 40, 240], [240, 240, 240, 240, 100, 240, 100, 100, 240, 240, 200, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255], [200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 40], [200, 200, 200, 100, 200, 200, 200, 240, 255, 255, 255, 255], [200, 200, 200, 100, 200, 200, 200, 240, 200, 200, 255, 255], [200, 200, 200, 200, 200, 40, 200, 240, 240, 100, 255, 255], [200, 40, 255, 255, 255, 40, 200, 255, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]], dtype=np.uint8) expected_2 = np.array( [[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 240, 240, 200, 200, 240, 240, 200, 240], [240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 200, 240], [240, 240, 240, 240, 240, 240, 100, 100, 240, 240, 200, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255], [200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 255], [200, 200, 200, 100, 200, 200, 200, 240, 255, 255, 255, 255], [200, 200, 200, 100, 200, 200, 200, 240, 200, 200, 255, 255], [200, 200, 200, 200, 200, 40, 200, 240, 240, 200, 255, 255], [200, 200, 255, 255, 255, 40, 200, 255, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]], dtype=np.uint8) expected_4 = np.array( [[240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 200, 200, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240], [200, 200, 200, 200, 200, 200, 200, 240, 240, 240, 255, 255], [200, 255, 200, 200, 200, 255, 200, 240, 255, 255, 255, 255], [200, 200, 200, 200, 200, 200, 200, 240, 255, 255, 255, 255], [200, 200, 200, 200, 200, 200, 200, 240, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 240, 240, 200, 255, 255], [200, 200, 255, 255, 255, 200, 200, 255, 200, 200, 255, 255], [200, 200, 200, 200, 200, 200, 200, 255, 255, 255, 255, 255]], dtype=np.uint8) _full_type_test(img, 2, expected_2, area_closing, connectivity=2) _full_type_test(img, 4, expected_4, area_closing, connectivity=2) P, S = max_tree(invert(img), connectivity=2) _full_type_test(img, 4, expected_4, area_closing, parent=P, tree_traverser=S) def test_area_opening(self): img = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 55, 15, 55, 55, 15, 15, 55, 15], [15, 55, 215, 15, 15, 15, 15, 15, 15, 15, 215, 15], [15, 15, 15, 15, 155, 15, 155, 155, 15, 15, 55, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0], [55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 215], [55, 55, 55, 155, 55, 55, 55, 15, 0, 0, 0, 0], [55, 55, 55, 155, 55, 55, 55, 15, 55, 55, 0, 0], [55, 55, 55, 55, 55, 215, 55, 15, 15, 155, 0, 0], [55, 215, 0, 0, 0, 215, 55, 0, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]], dtype=np.uint8) expected_2 = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 15, 15, 55, 55, 15, 15, 55, 15], [15, 55, 55, 15, 15, 15, 15, 15, 15, 15, 55, 15], [15, 15, 15, 15, 15, 15, 155, 155, 15, 15, 55, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0], [55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 0], [55, 55, 55, 155, 55, 55, 55, 15, 0, 0, 0, 0], [55, 55, 55, 155, 55, 55, 55, 15, 55, 55, 0, 0], [55, 55, 55, 55, 55, 215, 55, 15, 15, 55, 0, 0], [55, 55, 0, 0, 0, 215, 55, 0, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]], dtype=np.uint8) expected_4 = np.array([[15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 55, 55, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15], [55, 55, 55, 55, 55, 55, 55, 15, 15, 15, 0, 0], [55, 0, 55, 55, 55, 0, 55, 15, 0, 0, 0, 0], [55, 55, 55, 55, 55, 55, 55, 15, 0, 0, 0, 0], [55, 55, 55, 55, 55, 55, 55, 15, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 15, 15, 55, 0, 0], [55, 55, 0, 0, 0, 55, 55, 0, 55, 55, 0, 0], [55, 55, 55, 55, 55, 55, 55, 0, 0, 0, 0, 0]], dtype=np.uint8) _full_type_test(img, 2, expected_2, area_opening, connectivity=2) _full_type_test(img, 4, expected_4, area_opening, connectivity=2) P, S = max_tree(img, connectivity=2) _full_type_test(img, 4, expected_4, area_opening, parent=P, tree_traverser=S) def test_diameter_closing(self): img = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93], [93, 63, 63, 63, 63, 86, 86, 86, 87, 43, 43, 91], [92, 89, 88, 86, 85, 85, 84, 85, 85, 43, 43, 89], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [90, 88, 86, 84, 83, 83, 82, 83, 83, 84, 86, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [92, 89, 23, 23, 85, 85, 84, 85, 85, 3, 3, 89], [93, 91, 23, 23, 87, 86, 86, 86, 87, 88, 3, 91], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]], dtype=np.uint8) ex2 = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93], [93, 63, 63, 63, 63, 86, 86, 86, 87, 43, 43, 91], [92, 89, 88, 86, 85, 85, 84, 85, 85, 43, 43, 89], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [90, 88, 86, 84, 83, 83, 83, 83, 83, 84, 86, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [92, 89, 23, 23, 85, 85, 84, 85, 85, 3, 3, 89], [93, 91, 23, 23, 87, 86, 86, 86, 87, 88, 3, 91], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]], dtype=np.uint8) ex4 = np.array([[97, 95, 93, 92, 91, 90, 90, 90, 91, 92, 93, 95], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93], [93, 63, 63, 63, 63, 86, 86, 86, 87, 84, 84, 91], [92, 89, 88, 86, 85, 85, 84, 85, 85, 84, 84, 89], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [90, 88, 86, 84, 83, 83, 83, 83, 83, 84, 86, 88], [90, 88, 86, 85, 84, 83, 83, 83, 84, 85, 86, 88], [91, 88, 87, 85, 84, 84, 83, 84, 84, 85, 87, 88], [92, 89, 84, 84, 85, 85, 84, 85, 85, 84, 84, 89], [93, 91, 84, 84, 87, 86, 86, 86, 87, 88, 84, 91], [95, 93, 91, 89, 88, 88, 88, 88, 88, 89, 91, 93]], dtype=np.uint8) _full_type_test(img, 2, ex2, diameter_closing, connectivity=2) _full_type_test(img, 4, ex4, diameter_closing, connectivity=2) P, S = max_tree(invert(img), connectivity=2) _full_type_test(img, 4, ex4, diameter_opening, parent=P, tree_traverser=S) def test_diameter_opening(self): img = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10], [9, 40, 40, 40, 40, 16, 16, 16, 16, 60, 60, 11], [11, 13, 15, 16, 17, 18, 18, 18, 17, 60, 60, 13], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 15, 16, 18, 19, 19, 20, 19, 19, 18, 16, 15], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [11, 13, 80, 80, 17, 18, 18, 18, 17, 100, 100, 13], [9, 11, 80, 80, 16, 16, 16, 16, 16, 15, 100, 11], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]]) ex2 = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10], [9, 40, 40, 40, 40, 16, 16, 16, 16, 60, 60, 11], [11, 13, 15, 16, 17, 18, 18, 18, 17, 60, 60, 13], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 15, 16, 18, 19, 19, 19, 19, 19, 18, 16, 15], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [11, 13, 80, 80, 17, 18, 18, 18, 17, 100, 100, 13], [9, 11, 80, 80, 16, 16, 16, 16, 16, 15, 100, 11], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]]) ex4 = np.array([[5, 7, 9, 11, 12, 12, 12, 12, 12, 11, 9, 7], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10], [9, 40, 40, 40, 40, 16, 16, 16, 16, 18, 18, 11], [11, 13, 15, 16, 17, 18, 18, 18, 17, 18, 18, 13], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 15, 16, 18, 19, 19, 19, 19, 19, 18, 16, 15], [12, 14, 16, 18, 19, 19, 19, 19, 19, 18, 16, 14], [12, 14, 16, 17, 18, 19, 19, 19, 18, 17, 16, 14], [11, 13, 18, 18, 17, 18, 18, 18, 17, 18, 18, 13], [9, 11, 18, 18, 16, 16, 16, 16, 16, 15, 18, 11], [7, 10, 11, 13, 14, 14, 15, 14, 14, 13, 11, 10]]) _full_type_test(img, 2, ex2, diameter_opening, connectivity=2) _full_type_test(img, 4, ex4, diameter_opening, connectivity=2) P, S = max_tree(img, connectivity=2) _full_type_test(img, 4, ex4, diameter_opening, parent=P, tree_traverser=S) def test_local_maxima(self): data = np.array([[10, 11, 13, 14, 14, 15, 14, 14, 13, 11], [11, 13, 15, 16, 16, 16, 16, 16, 15, 13], [13, 15, 40, 40, 18, 18, 18, 60, 60, 15], [14, 16, 40, 40, 19, 19, 19, 60, 60, 16], [14, 16, 18, 19, 19, 19, 19, 19, 18, 16], [15, 16, 18, 19, 19, 20, 19, 19, 18, 16], [14, 16, 18, 19, 19, 19, 19, 19, 18, 16], [14, 16, 80, 80, 19, 19, 19, 100, 100, 16], [13, 15, 80, 80, 18, 18, 18, 100, 100, 15], [11, 13, 15, 16, 16, 16, 16, 16, 15, 13]], dtype=np.uint8) expected_result = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.uint64) for dtype in [np.uint8, np.uint64, np.int8, np.int64]: test_data = data.astype(dtype) out = max_tree_local_maxima(test_data, connectivity=1) out_bin = out > 0 assert_array_equal(expected_result, out_bin) assert out.dtype == expected_result.dtype assert np.max(out) == 5 P, S = max_tree(test_data) out = max_tree_local_maxima(test_data, parent=P, tree_traverser=S) assert_array_equal(expected_result, out_bin) assert out.dtype == expected_result.dtype assert np.max(out) == 5 def test_extrema_float(self): data = np.array([[0.10, 0.11, 0.13, 0.14, 0.14, 0.15, 0.14, 0.14, 0.13, 0.11], [0.11, 0.13, 0.15, 0.16, 0.16, 0.16, 0.16, 0.16, 0.15, 0.13], [0.13, 0.15, 0.40, 0.40, 0.18, 0.18, 0.18, 0.60, 0.60, 0.15], [0.14, 0.16, 0.40, 0.40, 0.19, 0.19, 0.19, 0.60, 0.60, 0.16], [0.14, 0.16, 0.18, 0.19, 0.19, 0.19, 0.19, 0.19, 0.18, 0.16], [0.15, 0.182, 0.18, 0.19, 0.204, 0.20, 0.19, 0.19, 0.18, 0.16], [0.14, 0.16, 0.18, 0.19, 0.19, 0.19, 0.19, 0.19, 0.18, 0.16], [0.14, 0.16, 0.80, 0.80, 0.19, 0.19, 0.19, 4.0, 1.0, 0.16], [0.13, 0.15, 0.80, 0.80, 0.18, 0.18, 0.18, 1.0, 1.0, 0.15], [0.11, 0.13, 0.15, 0.16, 0.16, 0.16, 0.16, 0.16, 0.15, 0.13]], dtype=np.float32) expected_result = np.array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 1, 1, 0, 0, 0, 1, 1, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 1, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=np.uint8) out = max_tree_local_maxima(data, connectivity=1) out_bin = out > 0 assert_array_equal(expected_result, out_bin) assert np.max(out) == 6 def test_3d(self): img = np.zeros((8, 8, 8), dtype=np.uint8) local_maxima = np.zeros((8, 8, 8), dtype=np.uint64) img[1, 1:3, 1:3] = 100 img[2, 2, 2] = 200 img[3, 1:3, 1:3] = 100 local_maxima[2, 2, 2] = 1 img[5:8, 1, 1] = 200 local_maxima[5:8, 1, 1] = 1 img[0, 5:8, 5:8] = 200 img[1, 6, 6] = 100 img[2, 5:7, 5:7] = 200 img[0:3, 5:8, 5:8] += 50 local_maxima[0, 5:8, 5:8] = 1 local_maxima[2, 5:7, 5:7] = 1 img[6:8, 6:8, 6:8] = 200 img[7, 7, 7] = 255 local_maxima[7, 7, 7] = 1 out = max_tree_local_maxima(img) out_bin = out > 0 assert_array_equal(local_maxima, out_bin) assert np.max(out) == 5
true
true
f70033d1cbc2d6abea9a13563db9c1b94096e116
2,444
py
Python
utils/dataset.py
Theia-4869/U-RISC
c493b11ab525b39a5ac029c3f83e059d703abaae
[ "MIT" ]
null
null
null
utils/dataset.py
Theia-4869/U-RISC
c493b11ab525b39a5ac029c3f83e059d703abaae
[ "MIT" ]
null
null
null
utils/dataset.py
Theia-4869/U-RISC
c493b11ab525b39a5ac029c3f83e059d703abaae
[ "MIT" ]
null
null
null
import logging import os from pathlib import Path from typing import Any, Callable, Optional from torch.utils.data import Dataset from torchvision import transforms from PIL import Image import cv2 import numpy as np class URISC(Dataset): def __init__( self, dir: str, mode: str = 'train', transform: Optional[Callable] = None, data_rank: str = 'simple', ): super(URISC, self).__init__() self.dir = dir self.mode = mode self.transform = transform self.data_rank = data_rank if data_rank == 'simple': self.transform_normalize = transforms.Normalize(mean=0.520, std=0.185) elif data_rank == 'complex': self.transform_normalize = transforms.Normalize(mean=0.518, std=0.190) self.transform_totensor = transforms.ToTensor() self.ids = [os.path.join(dir, data_rank, mode, filename) for filename in os.listdir(os.path.join(dir, data_rank, mode))] if not self.ids: raise RuntimeError(f'No input file found in {os.path.join(dir, data_rank, mode)}, make sure you put your images there') logging.info(f'Creating dataset with {len(self.ids)} examples') def __len__(self): return len(self.ids) def __getitem__(self, idx): image = cv2.imread(self.ids[idx]) # print(image.shape) if self.mode == 'test': if self.transform is not None: image = self.transform(image=image) return image.float().contiguous(), self.ids[idx] mask_path = self.ids[idx].replace(self.mode, "label/"+self.mode) mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE) # print(mask) if self.transform is not None: transformed = self.transform(image=image, mask=mask) transformed_image = transformed['image'] transformed_mask = transformed['mask'] else: transformed_image = image transformed_mask = mask transformed_image = self.transform_totensor(transformed_image) transformed_image = self.transform_normalize(transformed_image) transformed_mask = self.transform_totensor(transformed_mask) # transformed_image = np.transpose(transformed_image, (2, 0, 1)) # transformed_mask = np.expand_dims(transformed_mask, axis=0) return transformed_image, transformed_mask
34.914286
131
0.641162
import logging import os from pathlib import Path from typing import Any, Callable, Optional from torch.utils.data import Dataset from torchvision import transforms from PIL import Image import cv2 import numpy as np class URISC(Dataset): def __init__( self, dir: str, mode: str = 'train', transform: Optional[Callable] = None, data_rank: str = 'simple', ): super(URISC, self).__init__() self.dir = dir self.mode = mode self.transform = transform self.data_rank = data_rank if data_rank == 'simple': self.transform_normalize = transforms.Normalize(mean=0.520, std=0.185) elif data_rank == 'complex': self.transform_normalize = transforms.Normalize(mean=0.518, std=0.190) self.transform_totensor = transforms.ToTensor() self.ids = [os.path.join(dir, data_rank, mode, filename) for filename in os.listdir(os.path.join(dir, data_rank, mode))] if not self.ids: raise RuntimeError(f'No input file found in {os.path.join(dir, data_rank, mode)}, make sure you put your images there') logging.info(f'Creating dataset with {len(self.ids)} examples') def __len__(self): return len(self.ids) def __getitem__(self, idx): image = cv2.imread(self.ids[idx]) if self.mode == 'test': if self.transform is not None: image = self.transform(image=image) return image.float().contiguous(), self.ids[idx] mask_path = self.ids[idx].replace(self.mode, "label/"+self.mode) mask = cv2.imread(mask_path, cv2.IMREAD_GRAYSCALE) if self.transform is not None: transformed = self.transform(image=image, mask=mask) transformed_image = transformed['image'] transformed_mask = transformed['mask'] else: transformed_image = image transformed_mask = mask transformed_image = self.transform_totensor(transformed_image) transformed_image = self.transform_normalize(transformed_image) transformed_mask = self.transform_totensor(transformed_mask) return transformed_image, transformed_mask
true
true
f70034b3c3afba5a914261b55cf0abeab832391c
1,441
py
Python
imaginaire/losses/feature_matching.py
hw07216/imaginaire
87c774114622e39488a5ea8a7728b1a20896afb9
[ "RSA-MD" ]
3,308
2020-07-15T17:50:13.000Z
2022-03-31T14:53:31.000Z
imaginaire/losses/feature_matching.py
hw07216/imaginaire
87c774114622e39488a5ea8a7728b1a20896afb9
[ "RSA-MD" ]
132
2020-09-20T17:36:28.000Z
2022-03-28T12:40:03.000Z
src/imaginaire/losses/feature_matching.py
livingbio/imaginaire-fsvid2vid
d82c87aced50afd44fd162491ba5b59056b74034
[ "RSA-MD" ]
370
2020-09-29T00:34:08.000Z
2022-03-30T04:12:48.000Z
# Copyright (C) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # This work is made available under the Nvidia Source Code License-NC. # To view a copy of this license, check out LICENSE.md import torch.nn as nn class FeatureMatchingLoss(nn.Module): r"""Compute feature matching loss""" def __init__(self, criterion='l1'): super(FeatureMatchingLoss, self).__init__() if criterion == 'l1': self.criterion = nn.L1Loss() elif criterion == 'l2' or criterion == 'mse': self.criterion = nn.MSELoss() else: raise ValueError('Criterion %s is not recognized' % criterion) def forward(self, fake_features, real_features): r"""Return the target vector for the binary cross entropy loss computation. Args: fake_features (list of lists): Discriminator features of fake images. real_features (list of lists): Discriminator features of real images. Returns: (tensor): Loss value. """ num_d = len(fake_features) dis_weight = 1.0 / num_d loss = fake_features[0][0].new_tensor(0) for i in range(num_d): for j in range(len(fake_features[i])): tmp_loss = self.criterion(fake_features[i][j], real_features[i][j].detach()) loss += dis_weight * tmp_loss return loss
36.948718
80
0.605829
import torch.nn as nn class FeatureMatchingLoss(nn.Module): def __init__(self, criterion='l1'): super(FeatureMatchingLoss, self).__init__() if criterion == 'l1': self.criterion = nn.L1Loss() elif criterion == 'l2' or criterion == 'mse': self.criterion = nn.MSELoss() else: raise ValueError('Criterion %s is not recognized' % criterion) def forward(self, fake_features, real_features): num_d = len(fake_features) dis_weight = 1.0 / num_d loss = fake_features[0][0].new_tensor(0) for i in range(num_d): for j in range(len(fake_features[i])): tmp_loss = self.criterion(fake_features[i][j], real_features[i][j].detach()) loss += dis_weight * tmp_loss return loss
true
true
f70034b5d8bc1589a710450b847c2f39ab19cddb
19,031
py
Python
cinder/volume/drivers/datera/datera_iscsi.py
traghavendra/cinder-train
49af592c61da3216c04f5771b8ebf0927c5ce1c8
[ "Apache-2.0" ]
null
null
null
cinder/volume/drivers/datera/datera_iscsi.py
traghavendra/cinder-train
49af592c61da3216c04f5771b8ebf0927c5ce1c8
[ "Apache-2.0" ]
28
2017-08-17T14:46:05.000Z
2022-03-29T12:42:12.000Z
cinder/volume/drivers/datera/datera_iscsi.py
alokchandra11/cinder
121d9f512b4a6d1afe6a690effb7c2b379040a7b
[ "Apache-2.0" ]
3
2017-04-27T16:11:40.000Z
2020-02-12T21:27:00.000Z
# Copyright 2017 Datera # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time import uuid from eventlet.green import threading from oslo_config import cfg from oslo_log import log as logging import six from cinder import exception from cinder.i18n import _ from cinder import utils from cinder.volume import configuration from cinder.volume.drivers.san import san import cinder.volume.drivers.datera.datera_api2 as api2 import cinder.volume.drivers.datera.datera_api21 as api21 import cinder.volume.drivers.datera.datera_common as datc LOG = logging.getLogger(__name__) d_opts = [ cfg.StrOpt('datera_api_port', default='7717', help='Datera API port.'), cfg.StrOpt('datera_api_version', default='2', deprecated_for_removal=True, help='Datera API version.'), cfg.IntOpt('datera_503_timeout', default='120', help='Timeout for HTTP 503 retry messages'), cfg.IntOpt('datera_503_interval', default='5', help='Interval between 503 retries'), cfg.BoolOpt('datera_debug', default=False, help="True to set function arg and return logging"), cfg.BoolOpt('datera_debug_replica_count_override', default=False, help="ONLY FOR DEBUG/TESTING PURPOSES\n" "True to set replica_count to 1"), cfg.StrOpt('datera_tenant_id', default=None, help="If set to 'Map' --> OpenStack project ID will be mapped " "implicitly to Datera tenant ID\n" "If set to 'None' --> Datera tenant ID will not be used " "during volume provisioning\n" "If set to anything else --> Datera tenant ID will be the " "provided value"), cfg.BoolOpt('datera_disable_profiler', default=False, help="Set to True to disable profiling in the Datera driver"), ] CONF = cfg.CONF CONF.import_opt('driver_use_ssl', 'cinder.volume.driver') CONF.register_opts(d_opts, group=configuration.SHARED_CONF_GROUP) @six.add_metaclass(utils.TraceWrapperWithABCMetaclass) class DateraDriver(san.SanISCSIDriver, api2.DateraApi, api21.DateraApi): """The OpenStack Datera Driver Version history: * 1.0 - Initial driver * 1.1 - Look for lun-0 instead of lun-1. * 2.0 - Update For Datera API v2 * 2.1 - Multipath, ACL and reorg * 2.2 - Capabilites List, Extended Volume-Type Support Naming convention change, Volume Manage/Unmanage support * 2.3 - Templates, Tenants, Snapshot Polling, 2.1 Api Version Support, Restructure * 2.3.1 - Scalability bugfixes * 2.3.2 - Volume Placement, ACL multi-attach bugfix * 2.4.0 - Fast Retype Support """ VERSION = '2.4.0' CI_WIKI_NAME = "datera-ci" HEADER_DATA = {'Datera-Driver': 'OpenStack-Cinder-{}'.format(VERSION)} # TODO(jsbryant) Remove driver in the 'U' release if CI is not fixed. SUPPORTED = False def __init__(self, *args, **kwargs): super(DateraDriver, self).__init__(*args, **kwargs) self.configuration.append_config_values(d_opts) self.username = self.configuration.san_login self.password = self.configuration.san_password self.cluster_stats = {} self.datera_api_token = None self.interval = self.configuration.datera_503_interval self.retry_attempts = (self.configuration.datera_503_timeout / self.interval) self.driver_prefix = str(uuid.uuid4())[:4] self.datera_debug = self.configuration.datera_debug self.datera_api_versions = [] if self.datera_debug: utils.setup_tracing(['method']) self.tenant_id = self.configuration.datera_tenant_id if self.tenant_id and self.tenant_id.lower() == 'none': self.tenant_id = None self.api_check = time.time() self.api_cache = [] self.api_timeout = 0 self.do_profile = not self.configuration.datera_disable_profiler self.thread_local = threading.local() backend_name = self.configuration.safe_get( 'volume_backend_name') self.backend_name = backend_name or 'Datera' datc.register_driver(self) def do_setup(self, context): # If we can't authenticate through the old and new method, just fail # now. if not all([self.username, self.password]): msg = _("san_login and/or san_password is not set for Datera " "driver in the cinder.conf. Set this information and " "start the cinder-volume service again.") LOG.error(msg) raise exception.InvalidInput(msg) self.login() self._create_tenant() # ================= # ================= # = Create Volume = # ================= @datc._api_lookup def create_volume(self, volume): """Create a logical volume.""" pass # ================= # = Extend Volume = # ================= @datc._api_lookup def extend_volume(self, volume, new_size): pass # ================= # ================= # = Cloned Volume = # ================= @datc._api_lookup def create_cloned_volume(self, volume, src_vref): pass # ================= # = Delete Volume = # ================= @datc._api_lookup def delete_volume(self, volume): pass # ================= # = Ensure Export = # ================= @datc._api_lookup def ensure_export(self, context, volume, connector=None): """Gets the associated account, retrieves CHAP info and updates.""" # ========================= # = Initialize Connection = # ========================= @datc._api_lookup def initialize_connection(self, volume, connector): pass # ================= # = Create Export = # ================= @datc._api_lookup def create_export(self, context, volume, connector): pass # ================= # = Detach Volume = # ================= @datc._api_lookup def detach_volume(self, context, volume, attachment=None): pass # =================== # = Create Snapshot = # =================== @datc._api_lookup def create_snapshot(self, snapshot): pass # =================== # = Delete Snapshot = # =================== @datc._api_lookup def delete_snapshot(self, snapshot): pass # ======================== # = Volume From Snapshot = # ======================== @datc._api_lookup def create_volume_from_snapshot(self, volume, snapshot): pass # ========== # = Retype = # ========== @datc._api_lookup def retype(self, ctxt, volume, new_type, diff, host): """Convert the volume to be of the new type. Returns a boolean indicating whether the retype occurred. :param ctxt: Context :param volume: A dictionary describing the volume to migrate :param new_type: A dictionary describing the volume type to convert to :param diff: A dictionary with the difference between the two types :param host: A dictionary describing the host to migrate to, where host['host'] is its name, and host['capabilities'] is a dictionary of its reported capabilities (Not Used). """ pass # ========== # = Manage = # ========== @datc._api_lookup def manage_existing(self, volume, existing_ref): """Manage an existing volume on the Datera backend The existing_ref must be either the current name or Datera UUID of an app_instance on the Datera backend in a colon separated list with the storage instance name and volume name. This means only single storage instances and single volumes are supported for managing by cinder. Eg. (existing_ref['source-name'] == tenant:app_inst_name:storage_inst_name:vol_name) if using Datera 2.1 API or (existing_ref['source-name'] == app_inst_name:storage_inst_name:vol_name) if using 2.0 API :param volume: Cinder volume to manage :param existing_ref: Driver-specific information used to identify a volume """ pass # =================== # = Manage Get Size = # =================== @datc._api_lookup def manage_existing_get_size(self, volume, existing_ref): """Get the size of an unmanaged volume on the Datera backend The existing_ref must be either the current name or Datera UUID of an app_instance on the Datera backend in a colon separated list with the storage instance name and volume name. This means only single storage instances and single volumes are supported for managing by cinder. Eg. existing_ref == app_inst_name:storage_inst_name:vol_name :param volume: Cinder volume to manage :param existing_ref: Driver-specific information used to identify a volume on the Datera backend """ pass # ========================= # = Get Manageable Volume = # ========================= @datc._api_lookup def get_manageable_volumes(self, cinder_volumes, marker, limit, offset, sort_keys, sort_dirs): """List volumes on the backend available for management by Cinder. Returns a list of dictionaries, each specifying a volume in the host, with the following keys: - reference (dictionary): The reference for a volume, which can be passed to 'manage_existing'. - size (int): The size of the volume according to the storage backend, rounded up to the nearest GB. - safe_to_manage (boolean): Whether or not this volume is safe to manage according to the storage backend. For example, is the volume in use or invalid for any reason. - reason_not_safe (string): If safe_to_manage is False, the reason why. - cinder_id (string): If already managed, provide the Cinder ID. - extra_info (string): Any extra information to return to the user :param cinder_volumes: A list of volumes in this host that Cinder currently manages, used to determine if a volume is manageable or not. :param marker: The last item of the previous page; we return the next results after this value (after sorting) :param limit: Maximum number of items to return :param offset: Number of items to skip after marker :param sort_keys: List of keys to sort results by (valid keys are 'identifier' and 'size') :param sort_dirs: List of directions to sort by, corresponding to sort_keys (valid directions are 'asc' and 'desc') """ pass # ============ # = Unmanage = # ============ @datc._api_lookup def unmanage(self, volume): """Unmanage a currently managed volume in Cinder :param volume: Cinder volume to unmanage """ pass # ================ # = Volume Stats = # ================ @datc._api_lookup def get_volume_stats(self, refresh=False): """Get volume stats. If 'refresh' is True, run update first. The name is a bit misleading as the majority of the data here is cluster data. """ pass # ========= # = Login = # ========= @datc._api_lookup def login(self): pass # ======= # = QoS = # ======= def _update_qos(self, resource, policies): url = datc.URL_TEMPLATES['vol_inst']( policies['default_storage_name'], policies['default_volume_name']) + '/performance_policy' url = url.format(datc._get_name(resource['id'])) type_id = resource.get('volume_type_id', None) if type_id is not None: # Filter for just QOS policies in result. All of their keys # should end with "max" fpolicies = {k: int(v) for k, v in policies.items() if k.endswith("max")} # Filter all 0 values from being passed fpolicies = dict(filter(lambda _v: _v[1] > 0, fpolicies.items())) if fpolicies: self._issue_api_request(url, 'post', body=fpolicies, api_version='2') def _get_lunid(self): return 0 # ============================ # = Volume-Types/Extra-Specs = # ============================ def _init_vendor_properties(self): """Create a dictionary of vendor unique properties. This method creates a dictionary of vendor unique properties and returns both created dictionary and vendor name. Returned vendor name is used to check for name of vendor unique properties. - Vendor name shouldn't include colon(:) because of the separator and it is automatically replaced by underscore(_). ex. abc:d -> abc_d - Vendor prefix is equal to vendor name. ex. abcd - Vendor unique properties must start with vendor prefix + ':'. ex. abcd:maxIOPS Each backend driver needs to override this method to expose its own properties using _set_property() like this: self._set_property( properties, "vendorPrefix:specific_property", "Title of property", _("Description of property"), "type") : return dictionary of vendor unique properties : return vendor name prefix: DF --> Datera Fabric """ properties = {} self._set_property( properties, "DF:placement_mode", "Datera Volume Placement", _("'single_flash' for single-flash-replica placement, " "'all_flash' for all-flash-replica placement, " "'hybrid' for hybrid placement"), "string", default="hybrid") self._set_property( properties, "DF:round_robin", "Datera Round Robin Portals", _("True to round robin the provided portals for a target"), "boolean", default=False) if self.configuration.get('datera_debug_replica_count_override'): replica_count = 1 else: replica_count = 3 self._set_property( properties, "DF:replica_count", "Datera Volume Replica Count", _("Specifies number of replicas for each volume. Can only be " "increased once volume is created"), "integer", minimum=1, default=replica_count) self._set_property( properties, "DF:acl_allow_all", "Datera ACL Allow All", _("True to set acl 'allow_all' on volumes created. Cannot be " "changed on volume once set"), "boolean", default=False) self._set_property( properties, "DF:ip_pool", "Datera IP Pool", _("Specifies IP pool to use for volume"), "string", default="default") self._set_property( properties, "DF:template", "Datera Template", _("Specifies Template to use for volume provisioning"), "string", default="") # ###### QoS Settings ###### # self._set_property( properties, "DF:read_bandwidth_max", "Datera QoS Max Bandwidth Read", _("Max read bandwidth setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:default_storage_name", "Datera Default Storage Instance Name", _("The name to use for storage instances created"), "string", default="storage-1") self._set_property( properties, "DF:default_volume_name", "Datera Default Volume Name", _("The name to use for volumes created"), "string", default="volume-1") self._set_property( properties, "DF:write_bandwidth_max", "Datera QoS Max Bandwidth Write", _("Max write bandwidth setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:total_bandwidth_max", "Datera QoS Max Bandwidth Total", _("Max total bandwidth setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:read_iops_max", "Datera QoS Max iops Read", _("Max read iops setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:write_iops_max", "Datera QoS Max IOPS Write", _("Max write iops setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:total_iops_max", "Datera QoS Max IOPS Total", _("Max total iops setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) # ###### End QoS Settings ###### # return properties, 'DF'
32.255932
79
0.561347
import time import uuid from eventlet.green import threading from oslo_config import cfg from oslo_log import log as logging import six from cinder import exception from cinder.i18n import _ from cinder import utils from cinder.volume import configuration from cinder.volume.drivers.san import san import cinder.volume.drivers.datera.datera_api2 as api2 import cinder.volume.drivers.datera.datera_api21 as api21 import cinder.volume.drivers.datera.datera_common as datc LOG = logging.getLogger(__name__) d_opts = [ cfg.StrOpt('datera_api_port', default='7717', help='Datera API port.'), cfg.StrOpt('datera_api_version', default='2', deprecated_for_removal=True, help='Datera API version.'), cfg.IntOpt('datera_503_timeout', default='120', help='Timeout for HTTP 503 retry messages'), cfg.IntOpt('datera_503_interval', default='5', help='Interval between 503 retries'), cfg.BoolOpt('datera_debug', default=False, help="True to set function arg and return logging"), cfg.BoolOpt('datera_debug_replica_count_override', default=False, help="ONLY FOR DEBUG/TESTING PURPOSES\n" "True to set replica_count to 1"), cfg.StrOpt('datera_tenant_id', default=None, help="If set to 'Map' --> OpenStack project ID will be mapped " "implicitly to Datera tenant ID\n" "If set to 'None' --> Datera tenant ID will not be used " "during volume provisioning\n" "If set to anything else --> Datera tenant ID will be the " "provided value"), cfg.BoolOpt('datera_disable_profiler', default=False, help="Set to True to disable profiling in the Datera driver"), ] CONF = cfg.CONF CONF.import_opt('driver_use_ssl', 'cinder.volume.driver') CONF.register_opts(d_opts, group=configuration.SHARED_CONF_GROUP) @six.add_metaclass(utils.TraceWrapperWithABCMetaclass) class DateraDriver(san.SanISCSIDriver, api2.DateraApi, api21.DateraApi): VERSION = '2.4.0' CI_WIKI_NAME = "datera-ci" HEADER_DATA = {'Datera-Driver': 'OpenStack-Cinder-{}'.format(VERSION)} SUPPORTED = False def __init__(self, *args, **kwargs): super(DateraDriver, self).__init__(*args, **kwargs) self.configuration.append_config_values(d_opts) self.username = self.configuration.san_login self.password = self.configuration.san_password self.cluster_stats = {} self.datera_api_token = None self.interval = self.configuration.datera_503_interval self.retry_attempts = (self.configuration.datera_503_timeout / self.interval) self.driver_prefix = str(uuid.uuid4())[:4] self.datera_debug = self.configuration.datera_debug self.datera_api_versions = [] if self.datera_debug: utils.setup_tracing(['method']) self.tenant_id = self.configuration.datera_tenant_id if self.tenant_id and self.tenant_id.lower() == 'none': self.tenant_id = None self.api_check = time.time() self.api_cache = [] self.api_timeout = 0 self.do_profile = not self.configuration.datera_disable_profiler self.thread_local = threading.local() backend_name = self.configuration.safe_get( 'volume_backend_name') self.backend_name = backend_name or 'Datera' datc.register_driver(self) def do_setup(self, context): # now. if not all([self.username, self.password]): msg = _("san_login and/or san_password is not set for Datera " "driver in the cinder.conf. Set this information and " "start the cinder-volume service again.") LOG.error(msg) raise exception.InvalidInput(msg) self.login() self._create_tenant() # ================= # ================= # = Create Volume = # ================= @datc._api_lookup def create_volume(self, volume): pass # ================= # = Extend Volume = # ================= @datc._api_lookup def extend_volume(self, volume, new_size): pass # ================= # ================= # = Cloned Volume = # ================= @datc._api_lookup def create_cloned_volume(self, volume, src_vref): pass # ================= # = Delete Volume = # ================= @datc._api_lookup def delete_volume(self, volume): pass # ================= # = Ensure Export = # ================= @datc._api_lookup def ensure_export(self, context, volume, connector=None): # ========================= # = Initialize Connection = # ========================= @datc._api_lookup def initialize_connection(self, volume, connector): pass # ================= # = Create Export = # ================= @datc._api_lookup def create_export(self, context, volume, connector): pass # ================= # = Detach Volume = # ================= @datc._api_lookup def detach_volume(self, context, volume, attachment=None): pass # =================== # = Create Snapshot = # =================== @datc._api_lookup def create_snapshot(self, snapshot): pass # =================== # = Delete Snapshot = # =================== @datc._api_lookup def delete_snapshot(self, snapshot): pass # ======================== # = Volume From Snapshot = # ======================== @datc._api_lookup def create_volume_from_snapshot(self, volume, snapshot): pass # ========== # = Retype = # ========== @datc._api_lookup def retype(self, ctxt, volume, new_type, diff, host): pass # ========== # = Manage = # ========== @datc._api_lookup def manage_existing(self, volume, existing_ref): pass # =================== # = Manage Get Size = # =================== @datc._api_lookup def manage_existing_get_size(self, volume, existing_ref): pass # ========================= # = Get Manageable Volume = # ========================= @datc._api_lookup def get_manageable_volumes(self, cinder_volumes, marker, limit, offset, sort_keys, sort_dirs): pass # ============ # = Unmanage = # ============ @datc._api_lookup def unmanage(self, volume): pass # ================ # = Volume Stats = # ================ @datc._api_lookup def get_volume_stats(self, refresh=False): pass # ========= # = Login = # ========= @datc._api_lookup def login(self): pass # ======= # = QoS = # ======= def _update_qos(self, resource, policies): url = datc.URL_TEMPLATES['vol_inst']( policies['default_storage_name'], policies['default_volume_name']) + '/performance_policy' url = url.format(datc._get_name(resource['id'])) type_id = resource.get('volume_type_id', None) if type_id is not None: # Filter for just QOS policies in result. All of their keys # should end with "max" fpolicies = {k: int(v) for k, v in policies.items() if k.endswith("max")} # Filter all 0 values from being passed fpolicies = dict(filter(lambda _v: _v[1] > 0, fpolicies.items())) if fpolicies: self._issue_api_request(url, 'post', body=fpolicies, api_version='2') def _get_lunid(self): return 0 # ============================ # = Volume-Types/Extra-Specs = # ============================ def _init_vendor_properties(self): properties = {} self._set_property( properties, "DF:placement_mode", "Datera Volume Placement", _("'single_flash' for single-flash-replica placement, " "'all_flash' for all-flash-replica placement, " "'hybrid' for hybrid placement"), "string", default="hybrid") self._set_property( properties, "DF:round_robin", "Datera Round Robin Portals", _("True to round robin the provided portals for a target"), "boolean", default=False) if self.configuration.get('datera_debug_replica_count_override'): replica_count = 1 else: replica_count = 3 self._set_property( properties, "DF:replica_count", "Datera Volume Replica Count", _("Specifies number of replicas for each volume. Can only be " "increased once volume is created"), "integer", minimum=1, default=replica_count) self._set_property( properties, "DF:acl_allow_all", "Datera ACL Allow All", _("True to set acl 'allow_all' on volumes created. Cannot be " "changed on volume once set"), "boolean", default=False) self._set_property( properties, "DF:ip_pool", "Datera IP Pool", _("Specifies IP pool to use for volume"), "string", default="default") self._set_property( properties, "DF:template", "Datera Template", _("Specifies Template to use for volume provisioning"), "string", default="") # ###### QoS Settings ###### # self._set_property( properties, "DF:read_bandwidth_max", "Datera QoS Max Bandwidth Read", _("Max read bandwidth setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:default_storage_name", "Datera Default Storage Instance Name", _("The name to use for storage instances created"), "string", default="storage-1") self._set_property( properties, "DF:default_volume_name", "Datera Default Volume Name", _("The name to use for volumes created"), "string", default="volume-1") self._set_property( properties, "DF:write_bandwidth_max", "Datera QoS Max Bandwidth Write", _("Max write bandwidth setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:total_bandwidth_max", "Datera QoS Max Bandwidth Total", _("Max total bandwidth setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:read_iops_max", "Datera QoS Max iops Read", _("Max read iops setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:write_iops_max", "Datera QoS Max IOPS Write", _("Max write iops setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) self._set_property( properties, "DF:total_iops_max", "Datera QoS Max IOPS Total", _("Max total iops setting for volume qos, " "use 0 for unlimited"), "integer", minimum=0, default=0) # ###### End QoS Settings ###### # return properties, 'DF'
true
true
f7003591f8a18a1fae373eb4663b223dd7a8098b
1,466
py
Python
tests/WriteTest.py
FlaminMad/researchProject
309577602c0974c402a3f7c9cf1ba3e443e963b5
[ "MIT" ]
null
null
null
tests/WriteTest.py
FlaminMad/researchProject
309577602c0974c402a3f7c9cf1ba3e443e963b5
[ "MIT" ]
2
2018-02-12T18:34:01.000Z
2018-02-12T18:34:33.000Z
tests/WriteTest.py
FlaminMad/researchProject
309577602c0974c402a3f7c9cf1ba3e443e963b5
[ "MIT" ]
null
null
null
#!/usr/bin/env python """ Author: Alexander David Leech Date: 30/09/2015 Rev: 2 Lang: Python 2.7 Deps: Pyserial, Pymodbus, logging """ import time # For sleep functionality import logging # For detailed error output from pymodbus.client.sync import ModbusSerialClient \ as ModbusClient # Import MODBUS support class comSettings = { "method" : 'rtu', "port" : 'COM3', "stopbits" : 1, "bytesize" : 8, "parity" : 'N', "baudrate" : 9600, "timeout" : 1 } logging.basicConfig() # Setup error logging log = logging.getLogger() # Start logging client = ModbusClient(**comSettings) # Setup connection object client.connect() # Open the MODBUS connection while(True): client.write_register(3,1000,unit=0x01) # Write valve to 100% time.sleep(4) # Sleep 4 seconds client.write_register(3,0,unit=0x01) # Write valve to 0% time.sleep(4) # Sleep 4 seconds client.close() # Close the connection
39.621622
84
0.439973
import time import logging from pymodbus.client.sync import ModbusSerialClient \ as ModbusClient comSettings = { "method" : 'rtu', "port" : 'COM3', "stopbits" : 1, "bytesize" : 8, "parity" : 'N', "baudrate" : 9600, "timeout" : 1 } logging.basicConfig() log = logging.getLogger() client = ModbusClient(**comSettings) client.connect() while(True): client.write_register(3,1000,unit=0x01) time.sleep(4) client.write_register(3,0,unit=0x01) time.sleep(4) client.close()
true
true
f70035b606446f284009b95a484ebb6bbfc5b6ed
12,093
py
Python
tests/autofix_lib_test.py
charlievieth/all-repos
279d2910c56567d9518ab41bd8894216b9f649e5
[ "MIT" ]
null
null
null
tests/autofix_lib_test.py
charlievieth/all-repos
279d2910c56567d9518ab41bd8894216b9f649e5
[ "MIT" ]
null
null
null
tests/autofix_lib_test.py
charlievieth/all-repos
279d2910c56567d9518ab41bd8894216b9f649e5
[ "MIT" ]
2
2020-09-03T12:50:13.000Z
2020-10-30T07:45:29.000Z
import os import subprocess from unittest import mock import pytest from pre_commit.constants import VERSION as PRE_COMMIT_VERSION import testing.git from all_repos import autofix_lib from all_repos import clone from all_repos import git from all_repos.config import load_config @pytest.mark.parametrize( ('cli_repos', 'expected'), ( (None, ['found_repo']), ([], []), (['cli_repo'], ['cli_repo']), ), ) def test_filter_repos(file_config, cli_repos, expected): ret = autofix_lib.filter_repos( file_config, cli_repos, lambda _: ['found_repo'], ) assert ret == expected def test_assert_importable_is_importable(): autofix_lib.assert_importable('pre_commit', install='pre-commit') def test_assert_importable_not_importable(): with pytest.raises(SystemExit) as excinfo: autofix_lib.assert_importable('watmodule', install='wat') msg, = excinfo.value.args assert msg == ( 'This tool requires the `watmodule` module to be installed.\n' 'Try installing it via `pip install wat`.' ) def test_require_version_new_enough(): autofix_lib.require_version_gte('pre-commit', '0.17.0') def test_require_version_not_new_enough(): with pytest.raises(SystemExit) as excinfo: autofix_lib.require_version_gte('pre-commit', '999') msg, = excinfo.value.args assert msg == ( f'This tool requires the `pre-commit` package is at least version ' f'999. The currently installed version is {PRE_COMMIT_VERSION}.\n\n' f'Try `pip install --upgrade pre-commit`' ) def test_run(capfd): autofix_lib.run('echo', 'h"i') out, _ = capfd.readouterr() assert out == ( '$ echo \'h"i\'\n' 'h"i\n' ) def test_cwd(tmpdir): orig = os.getcwd() with autofix_lib.cwd(tmpdir): assert os.getcwd() == tmpdir assert os.getcwd() == orig def test_repo_context_success(file_config_files, capsys): expected_rev = testing.git.revparse(file_config_files.dir1) with autofix_lib.repo_context( str(file_config_files.output_dir.join('repo1')), use_color=False, ): assert testing.git.revparse('.') == expected_rev assert git.remote('.') == file_config_files.dir1 out, err = capsys.readouterr() assert err == '' assert 'Errored' not in out def test_repo_context_errors(file_config_files, capsys): with autofix_lib.repo_context( str(file_config_files.output_dir.join('repo1')), use_color=False, ): assert False out, err = capsys.readouterr() assert 'Errored' in out assert 'assert False' in err def test_interactive_control_c(mock_input, capfd): mock_input.set_side_effect(KeyboardInterrupt) with pytest.raises(SystemExit): autofix_lib._interactive_check(use_color=False) out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? ^C\n' 'Goodbye!\n' ) def test_interactive_eof(mock_input, capfd): mock_input.set_side_effect(EOFError) with pytest.raises(SystemExit): autofix_lib._interactive_check(use_color=False) out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? ^D\n' 'Goodbye!\n' ) def test_interactive_quit(mock_input, capfd): mock_input.set_side_effect('q') with pytest.raises(SystemExit): autofix_lib._interactive_check(use_color=False) out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<q\n' 'Goodbye!\n' ) def test_interactive_yes(mock_input, capfd): mock_input.set_side_effect('y') assert autofix_lib._interactive_check(use_color=False) is True out, _ = capfd.readouterr() assert out == '***Looks good [y,n,s,q,?]? <<y\n' def test_interactive_no(mock_input, capfd): mock_input.set_side_effect('n') assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == '***Looks good [y,n,s,q,?]? <<n\n' def test_interactive_shell(mock_input, capfd): mock_input.set_side_effect('s', 'n') with mock.patch.dict(os.environ, {'SHELL': 'echo'}): assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<s\n' 'Opening an interactive shell, type `exit` to continue.\n' 'Any modifications will be committed.\n' # A newline from echo '\n' '***Looks good [y,n,s,q,?]? <<n\n' ) def test_interactive_help(mock_input, capfd): mock_input.set_side_effect('?', 'n') assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<?\n' 'y (yes): yes it looks good, commit and continue.\n' 'n (no): no, do not commit this repository.\n' 's (shell): open an interactive shell in the repo.\n' 'q (quit, ^C): early exit from the autofixer.\n' '? (help): show this help message.\n' '***Looks good [y,n,s,q,?]? <<n\n' ) def test_interactive_garbage(mock_input, capfd): mock_input.set_side_effect('garbage', 'n') assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<garbage\n' 'Unexpected input: garbage\n' 'y (yes): yes it looks good, commit and continue.\n' 'n (no): no, do not commit this repository.\n' 's (shell): open an interactive shell in the repo.\n' 'q (quit, ^C): early exit from the autofixer.\n' '? (help): show this help message.\n' '***Looks good [y,n,s,q,?]? <<n\n' ) def lower_case_f(): f_contents = open('f').read() with open('f', 'w') as f: f.write(f_contents.lower()) def failing_check_fix(): raise AssertionError('nope!') def test_fix_dry_run_no_change(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=True, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out # Showed the diff of what would have happened assert '-OHAI\n+ohai\n' in out assert '-OHELLO\n+ohello\n' in out # Didn't actually perform any changes assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n' def test_fix_with_limit(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=1, dry_run=True, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out # Should still see the diff from the first repository assert '-OHAI\n+ohai\n' in out assert '-OHELLO\n+ohello\n' not in out def test_fix_interactive(file_config_files, capfd, mock_input): mock_input.set_side_effect('y', 'n') autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=True, ), ) assert file_config_files.dir1.join('f').read() == 'ohai\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n' def test_autofix_makes_commits(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out assert file_config_files.dir1.join('f').read() == 'ohai\n' assert file_config_files.dir2.join('f').read() == 'ohello\n' # The branch name should be what we specified last_commit_msg = subprocess.check_output(( 'git', '-C', file_config_files.dir1, 'log', '--format=%s', '--first-parent', '-1', )).decode() assert last_commit_msg == "Merge branch 'all-repos_autofix_test-branch'\n" # We should see a commit from the autofix change we made commit = subprocess.check_output(( 'git', '-C', file_config_files.dir1, 'log', '--patch', '--grep', 'message!', '--format=%an %ae\n%B', )).decode() assert commit.startswith( 'A B [email protected]\n' 'message!\n' '\n' 'Committed via https://github.com/asottile/all-repos\n', ) assert commit.endswith('-OHAI\n+ohai\n') def test_fix_failing_check_no_changes(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, check_fix=failing_check_fix, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) out, err = capfd.readouterr() assert 'nope!' in err assert out.count('Errored') == 2 # An error while checking should not allow the changes assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n' def test_noop_does_not_commit(file_config_files): rev_before1 = testing.git.revparse(file_config_files.dir1) rev_before2 = testing.git.revparse(file_config_files.dir2) autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lambda: None, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) rev_after1 = testing.git.revparse(file_config_files.dir1) rev_after2 = testing.git.revparse(file_config_files.dir2) assert (rev_before1, rev_before2) == (rev_after1, rev_after2) def test_fix_non_default_branch(file_config_non_default): clone.main(('--config-filename', str(file_config_non_default.cfg))) autofix_lib.fix( ( str(file_config_non_default.output_dir.join('repo1')), ), apply_fix=lower_case_f, config=load_config(file_config_non_default.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) assert file_config_non_default.dir1.join('f').read() == 'ohai\n'
32.772358
78
0.644753
import os import subprocess from unittest import mock import pytest from pre_commit.constants import VERSION as PRE_COMMIT_VERSION import testing.git from all_repos import autofix_lib from all_repos import clone from all_repos import git from all_repos.config import load_config @pytest.mark.parametrize( ('cli_repos', 'expected'), ( (None, ['found_repo']), ([], []), (['cli_repo'], ['cli_repo']), ), ) def test_filter_repos(file_config, cli_repos, expected): ret = autofix_lib.filter_repos( file_config, cli_repos, lambda _: ['found_repo'], ) assert ret == expected def test_assert_importable_is_importable(): autofix_lib.assert_importable('pre_commit', install='pre-commit') def test_assert_importable_not_importable(): with pytest.raises(SystemExit) as excinfo: autofix_lib.assert_importable('watmodule', install='wat') msg, = excinfo.value.args assert msg == ( 'This tool requires the `watmodule` module to be installed.\n' 'Try installing it via `pip install wat`.' ) def test_require_version_new_enough(): autofix_lib.require_version_gte('pre-commit', '0.17.0') def test_require_version_not_new_enough(): with pytest.raises(SystemExit) as excinfo: autofix_lib.require_version_gte('pre-commit', '999') msg, = excinfo.value.args assert msg == ( f'This tool requires the `pre-commit` package is at least version ' f'999. The currently installed version is {PRE_COMMIT_VERSION}.\n\n' f'Try `pip install --upgrade pre-commit`' ) def test_run(capfd): autofix_lib.run('echo', 'h"i') out, _ = capfd.readouterr() assert out == ( '$ echo \'h"i\'\n' 'h"i\n' ) def test_cwd(tmpdir): orig = os.getcwd() with autofix_lib.cwd(tmpdir): assert os.getcwd() == tmpdir assert os.getcwd() == orig def test_repo_context_success(file_config_files, capsys): expected_rev = testing.git.revparse(file_config_files.dir1) with autofix_lib.repo_context( str(file_config_files.output_dir.join('repo1')), use_color=False, ): assert testing.git.revparse('.') == expected_rev assert git.remote('.') == file_config_files.dir1 out, err = capsys.readouterr() assert err == '' assert 'Errored' not in out def test_repo_context_errors(file_config_files, capsys): with autofix_lib.repo_context( str(file_config_files.output_dir.join('repo1')), use_color=False, ): assert False out, err = capsys.readouterr() assert 'Errored' in out assert 'assert False' in err def test_interactive_control_c(mock_input, capfd): mock_input.set_side_effect(KeyboardInterrupt) with pytest.raises(SystemExit): autofix_lib._interactive_check(use_color=False) out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? ^C\n' 'Goodbye!\n' ) def test_interactive_eof(mock_input, capfd): mock_input.set_side_effect(EOFError) with pytest.raises(SystemExit): autofix_lib._interactive_check(use_color=False) out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? ^D\n' 'Goodbye!\n' ) def test_interactive_quit(mock_input, capfd): mock_input.set_side_effect('q') with pytest.raises(SystemExit): autofix_lib._interactive_check(use_color=False) out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<q\n' 'Goodbye!\n' ) def test_interactive_yes(mock_input, capfd): mock_input.set_side_effect('y') assert autofix_lib._interactive_check(use_color=False) is True out, _ = capfd.readouterr() assert out == '***Looks good [y,n,s,q,?]? <<y\n' def test_interactive_no(mock_input, capfd): mock_input.set_side_effect('n') assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == '***Looks good [y,n,s,q,?]? <<n\n' def test_interactive_shell(mock_input, capfd): mock_input.set_side_effect('s', 'n') with mock.patch.dict(os.environ, {'SHELL': 'echo'}): assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<s\n' 'Opening an interactive shell, type `exit` to continue.\n' 'Any modifications will be committed.\n' # A newline from echo '\n' '***Looks good [y,n,s,q,?]? <<n\n' ) def test_interactive_help(mock_input, capfd): mock_input.set_side_effect('?', 'n') assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<?\n' 'y (yes): yes it looks good, commit and continue.\n' 'n (no): no, do not commit this repository.\n' 's (shell): open an interactive shell in the repo.\n' 'q (quit, ^C): early exit from the autofixer.\n' '? (help): show this help message.\n' '***Looks good [y,n,s,q,?]? <<n\n' ) def test_interactive_garbage(mock_input, capfd): mock_input.set_side_effect('garbage', 'n') assert autofix_lib._interactive_check(use_color=False) is False out, _ = capfd.readouterr() assert out == ( '***Looks good [y,n,s,q,?]? <<garbage\n' 'Unexpected input: garbage\n' 'y (yes): yes it looks good, commit and continue.\n' 'n (no): no, do not commit this repository.\n' 's (shell): open an interactive shell in the repo.\n' 'q (quit, ^C): early exit from the autofixer.\n' '? (help): show this help message.\n' '***Looks good [y,n,s,q,?]? <<n\n' ) def lower_case_f(): f_contents = open('f').read() with open('f', 'w') as f: f.write(f_contents.lower()) def failing_check_fix(): raise AssertionError('nope!') def test_fix_dry_run_no_change(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=True, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out # Showed the diff of what would have happened assert '-OHAI\n+ohai\n' in out assert '-OHELLO\n+ohello\n' in out # Didn't actually perform any changes assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n' def test_fix_with_limit(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=1, dry_run=True, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out # Should still see the diff from the first repository assert '-OHAI\n+ohai\n' in out assert '-OHELLO\n+ohello\n' not in out def test_fix_interactive(file_config_files, capfd, mock_input): mock_input.set_side_effect('y', 'n') autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=True, ), ) assert file_config_files.dir1.join('f').read() == 'ohai\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n' def test_autofix_makes_commits(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) out, err = capfd.readouterr() assert err == '' assert 'Errored' not in out assert file_config_files.dir1.join('f').read() == 'ohai\n' assert file_config_files.dir2.join('f').read() == 'ohello\n' # The branch name should be what we specified last_commit_msg = subprocess.check_output(( 'git', '-C', file_config_files.dir1, 'log', '--format=%s', '--first-parent', '-1', )).decode() assert last_commit_msg == "Merge branch 'all-repos_autofix_test-branch'\n" # We should see a commit from the autofix change we made commit = subprocess.check_output(( 'git', '-C', file_config_files.dir1, 'log', '--patch', '--grep', 'message!', '--format=%an %ae\n%B', )).decode() assert commit.startswith( 'A B [email protected]\n' 'message!\n' '\n' 'Committed via https://github.com/asottile/all-repos\n', ) assert commit.endswith('-OHAI\n+ohai\n') def test_fix_failing_check_no_changes(file_config_files, capfd): autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lower_case_f, check_fix=failing_check_fix, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) out, err = capfd.readouterr() assert 'nope!' in err assert out.count('Errored') == 2 # An error while checking should not allow the changes assert file_config_files.dir1.join('f').read() == 'OHAI\n' assert file_config_files.dir2.join('f').read() == 'OHELLO\n' def test_noop_does_not_commit(file_config_files): rev_before1 = testing.git.revparse(file_config_files.dir1) rev_before2 = testing.git.revparse(file_config_files.dir2) autofix_lib.fix( ( str(file_config_files.output_dir.join('repo1')), str(file_config_files.output_dir.join('repo2')), ), apply_fix=lambda: None, config=load_config(file_config_files.cfg), commit=autofix_lib.Commit('message!', 'test-branch', None), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) rev_after1 = testing.git.revparse(file_config_files.dir1) rev_after2 = testing.git.revparse(file_config_files.dir2) assert (rev_before1, rev_before2) == (rev_after1, rev_after2) def test_fix_non_default_branch(file_config_non_default): clone.main(('--config-filename', str(file_config_non_default.cfg))) autofix_lib.fix( ( str(file_config_non_default.output_dir.join('repo1')), ), apply_fix=lower_case_f, config=load_config(file_config_non_default.cfg), commit=autofix_lib.Commit('message!', 'test-branch', 'A B <[email protected]>'), autofix_settings=autofix_lib.AutofixSettings( jobs=1, color=False, limit=None, dry_run=False, interactive=False, ), ) assert file_config_non_default.dir1.join('f').read() == 'ohai\n'
true
true
f70035de58a42b6f7cb899e0378cdef544b4ada4
426
py
Python
django_quicky/namegen/namegen.py
sametmax/django-quicky
2a87dbdcc6db400aff5a9119533bd3784fc4afb4
[ "Zlib" ]
149
2015-01-02T19:48:47.000Z
2022-02-18T15:43:34.000Z
django_quicky/namegen/namegen.py
keshapps/django-quicky
2a87dbdcc6db400aff5a9119533bd3784fc4afb4
[ "Zlib" ]
3
2015-01-28T18:44:42.000Z
2017-05-23T18:50:02.000Z
django_quicky/namegen/namegen.py
keshapps/django-quicky
2a87dbdcc6db400aff5a9119533bd3784fc4afb4
[ "Zlib" ]
11
2015-01-05T19:22:16.000Z
2021-01-25T13:06:20.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Generate random usernames in """ import random from .names import names as default_names class NameGenerator(object): def __init__(self, names=None): self.names = names or default_names def __call__(self): return self.names.pop(random.randrange(len(self.names))) def __iter__(self): while self.names: yield self()
16.384615
64
0.638498
import random from .names import names as default_names class NameGenerator(object): def __init__(self, names=None): self.names = names or default_names def __call__(self): return self.names.pop(random.randrange(len(self.names))) def __iter__(self): while self.names: yield self()
true
true
f7003651cdfe6161aff5b98a1b8d08b57ca2b62d
718
py
Python
authors/utils/authentication_handlers.py
andela/ah-the-immortals-backend
d0f73bf166ad41f243cff6d82caced2f9facf2f9
[ "BSD-3-Clause" ]
2
2020-03-11T12:50:18.000Z
2020-07-23T13:07:07.000Z
authors/utils/authentication_handlers.py
andela/ah-the-immortals-backend
d0f73bf166ad41f243cff6d82caced2f9facf2f9
[ "BSD-3-Clause" ]
30
2019-04-23T17:00:25.000Z
2022-02-10T08:45:32.000Z
authors/utils/authentication_handlers.py
andela/ah-the-immortals-backend
d0f73bf166ad41f243cff6d82caced2f9facf2f9
[ "BSD-3-Clause" ]
11
2019-06-29T11:52:31.000Z
2022-02-13T02:05:43.000Z
from django.utils import timezone from rest_framework.authtoken.models import Token class AuthTokenHandler: """ Handles variations in auth token """ @staticmethod def expired_token(auth_token): """ Checks expiry of auth token """ utc_now = timezone.now() expired = auth_token.created < utc_now - \ timezone.timedelta(hours=24) return expired @staticmethod def create_auth_token(user): """ Creates an auth token for a user """ token, created = Token.objects.get_or_create(user=user) if not created: token.created = timezone.now() token.save() return token
24.758621
63
0.600279
from django.utils import timezone from rest_framework.authtoken.models import Token class AuthTokenHandler: @staticmethod def expired_token(auth_token): utc_now = timezone.now() expired = auth_token.created < utc_now - \ timezone.timedelta(hours=24) return expired @staticmethod def create_auth_token(user): token, created = Token.objects.get_or_create(user=user) if not created: token.created = timezone.now() token.save() return token
true
true
f700365fe0ecc671437a603b7d5f3a8575dde895
511
py
Python
StemLemPipe/__init__.py
PasaOpasen/Stem-Lem-Pipeline
98f5ab7e0aa019f5403830fc3e6176d06124f365
[ "MIT" ]
1
2020-12-03T23:32:07.000Z
2020-12-03T23:32:07.000Z
StemLemPipe/__init__.py
PasaOpasen/Stem-Lem-Pipeline
98f5ab7e0aa019f5403830fc3e6176d06124f365
[ "MIT" ]
null
null
null
StemLemPipe/__init__.py
PasaOpasen/Stem-Lem-Pipeline
98f5ab7e0aa019f5403830fc3e6176d06124f365
[ "MIT" ]
null
null
null
from .useful_functions import get_ngrams, words_to_ngrams_list, remove_hook_words, remove_words from .transformers import phrases_transform, phrases2lower, phrases_without_excess_symbols from .tokenizers import text2sentences, split_by_words, sentence_split from .stemlem_operators import create_stemmer_lemmer, create_stemmer, create_lemmatizer from .pipeline import StemLemPipeline from .simplifiers import sum_phrases, wordlist2set from .stopwords import stopwords from .metrics import Levenstein
25.55
95
0.861057
from .useful_functions import get_ngrams, words_to_ngrams_list, remove_hook_words, remove_words from .transformers import phrases_transform, phrases2lower, phrases_without_excess_symbols from .tokenizers import text2sentences, split_by_words, sentence_split from .stemlem_operators import create_stemmer_lemmer, create_stemmer, create_lemmatizer from .pipeline import StemLemPipeline from .simplifiers import sum_phrases, wordlist2set from .stopwords import stopwords from .metrics import Levenstein
true
true
f700372959084882c409772f7096a8fa1fe06d06
971
py
Python
Vortex/__main__.py
1upCommunity/Vortex
abd7d6de3f66ba91b9b6b1a8c338420d90832b08
[ "MIT" ]
null
null
null
Vortex/__main__.py
1upCommunity/Vortex
abd7d6de3f66ba91b9b6b1a8c338420d90832b08
[ "MIT" ]
null
null
null
Vortex/__main__.py
1upCommunity/Vortex
abd7d6de3f66ba91b9b6b1a8c338420d90832b08
[ "MIT" ]
null
null
null
# internal imports import dependency_checker import dependency_installer import dependency_updater import logger from rendering import VortexWindow # external imports import pyglet import sys # check if python version is too old. If it is, exit. if sys.version_info < (3, 6): # if python version is less than 3.6 logger.critical( "Vortex", "Python version is too old. Please use python 3.6 or higher.") sys.exit(1) # check all deps and update them if needed if not dependency_checker.check_deps(): # if any deps are missing dependency_installer.install_deps() # install them if not dependency_checker.check_deps(): # if any deps are still missing # warn user and exit logger.warn( "Vortex", "Dependencies are not installed. Please install them manually.") sys.exit(1) else: dependency_updater.update_deps() # update deps window = VortexWindow() # create the window pyglet.app.run() # run the app
31.322581
86
0.719876
import dependency_checker import dependency_installer import dependency_updater import logger from rendering import VortexWindow import pyglet import sys if sys.version_info < (3, 6): logger.critical( "Vortex", "Python version is too old. Please use python 3.6 or higher.") sys.exit(1) if not dependency_checker.check_deps(): dependency_installer.install_deps() if not dependency_checker.check_deps(): logger.warn( "Vortex", "Dependencies are not installed. Please install them manually.") sys.exit(1) else: dependency_updater.update_deps() window = VortexWindow() pyglet.app.run()
true
true
f7003754514cd37f51ea1286be38a5043fe6241f
1,219
py
Python
flambe/nlp/transformers/gpt.py
axel-sirota/flambe
15e985ab456973b40e6b75c6b3d153ea9b4e8849
[ "MIT" ]
null
null
null
flambe/nlp/transformers/gpt.py
axel-sirota/flambe
15e985ab456973b40e6b75c6b3d153ea9b4e8849
[ "MIT" ]
null
null
null
flambe/nlp/transformers/gpt.py
axel-sirota/flambe
15e985ab456973b40e6b75c6b3d153ea9b4e8849
[ "MIT" ]
null
null
null
""" Intergation of the pytorch_transformers openai and gpt2 modules. Note that these objects are only to be used to load pretrained models. The pytorch-transformers library wasn't designed to train these models from scratch. """ import pytorch_transformers as pt from flambe.nlp.transformers.utils import TransformerTextField, TransformerEmbedder class GPTTextField(TransformerTextField): """Integrate the pytorch_transformers OpenAIGPTTokenizer. Currently available aliases: . `openai-gpt` """ _cls = pt.OpenAIGPTTokenizer class GPTEmbedder(TransformerEmbedder): """Integrate the pytorch_transformers OpenAIGPTmodel. Currently available aliases: . `openai-gpt` """ _cls = pt.OpenAIGPTModel class GPT2TextField(TransformerTextField): """Integrate the pytorch_transformers GPT2Tokenizer. Currently available aliases: . `gpt2` . `gpt2-medium` . `gpt2-large` """ _cls = pt.GPT2Tokenizer class GPT2Embedder(TransformerEmbedder): """Integrate the pytorch_transformers GPT2Model. Currently available aliases: . `gpt2` . `gpt2-medium` . `gpt2-large` """ _cls = pt.GPT2Model
19.983607
83
0.704676
import pytorch_transformers as pt from flambe.nlp.transformers.utils import TransformerTextField, TransformerEmbedder class GPTTextField(TransformerTextField): _cls = pt.OpenAIGPTTokenizer class GPTEmbedder(TransformerEmbedder): _cls = pt.OpenAIGPTModel class GPT2TextField(TransformerTextField): _cls = pt.GPT2Tokenizer class GPT2Embedder(TransformerEmbedder): _cls = pt.GPT2Model
true
true
f700384c604ac91f885c84ab6ed838d3ba8c4771
13,878
py
Python
pyzoo/zoo/zouwu/model/Seq2Seq.py
GZHoffie/analytics-zoo
d0258aa113ffd1a5c4927376fb32b09fb0baf73c
[ "Apache-2.0" ]
null
null
null
pyzoo/zoo/zouwu/model/Seq2Seq.py
GZHoffie/analytics-zoo
d0258aa113ffd1a5c4927376fb32b09fb0baf73c
[ "Apache-2.0" ]
null
null
null
pyzoo/zoo/zouwu/model/Seq2Seq.py
GZHoffie/analytics-zoo
d0258aa113ffd1a5c4927376fb32b09fb0baf73c
[ "Apache-2.0" ]
null
null
null
# # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, LSTM, Dense import tensorflow.keras as keras from zoo.automl.model.abstract import BaseModel from zoo.automl.common.util import * from zoo.automl.common.metrics import Evaluator class LSTMSeq2Seq(BaseModel): def __init__(self, check_optional_config=True, future_seq_len=2): """ Constructor of LSTM Seq2Seq model """ self.model = None self.past_seq_len = None self.future_seq_len = future_seq_len self.feature_num = None self.target_col_num = None self.metric = None self.latent_dim = None self.batch_size = None self.check_optional_config = check_optional_config def _build_train(self, mc=False, **config): """ build LSTM Seq2Seq model :param config: :return: """ super()._check_config(**config) self.metric = config.get('metric', 'mean_squared_error') self.latent_dim = config.get('latent_dim', 128) self.dropout = config.get('dropout', 0.2) self.lr = config.get('lr', 0.001) # for restore in continuous training self.batch_size = config.get('batch_size', 64) training = True if mc else None # Define an input sequence and process it. self.encoder_inputs = Input(shape=(None, self.feature_num), name="encoder_inputs") encoder = LSTM(units=self.latent_dim, dropout=self.dropout, return_state=True, name="encoder_lstm") encoder_outputs, state_h, state_c = encoder(self.encoder_inputs, training=training) # We discard `encoder_outputs` and only keep the states. self.encoder_states = [state_h, state_c] # Set up the decoder, using `encoder_states` as initial state. self.decoder_inputs = Input(shape=(None, self.target_col_num), name="decoder_inputs") # We set up our decoder to return full output sequences, # and to return internal states as well. We don't use the # return states in the training model, but we will use them in inference. self.decoder_lstm = LSTM(self.latent_dim, dropout=self.dropout, return_sequences=True, return_state=True, name="decoder_lstm") decoder_outputs, _, _ = self.decoder_lstm(self.decoder_inputs, training=training, initial_state=self.encoder_states) self.decoder_dense = Dense(self.target_col_num, name="decoder_dense") decoder_outputs = self.decoder_dense(decoder_outputs) # Define the model that will turn # `encoder_input_data` & `decoder_input_data` into `decoder_target_data` self.model = Model([self.encoder_inputs, self.decoder_inputs], decoder_outputs) self.model.compile(loss='mse', metrics=[self.metric], optimizer=keras.optimizers.RMSprop(lr=self.lr)) return self.model def _restore_model(self): self.encoder_inputs = self.model.input[0] # input_1 encoder_outputs, state_h_enc, state_c_enc = self.model.layers[2].output # lstm_1 self.encoder_states = [state_h_enc, state_c_enc] self.decoder_inputs = self.model.input[1] # input_2 self.decoder_lstm = self.model.layers[3] self.decoder_dense = self.model.layers[4] def _build_inference(self, mc=False): training = True if mc else None # from our previous model - mapping encoder sequence to state vectors encoder_model = Model(self.encoder_inputs, self.encoder_states) # A modified version of the decoding stage that takes in predicted target inputs # and encoded state vectors, returning predicted target outputs and decoder state vectors. # We need to hang onto these state vectors to run the next step of the inference loop. decoder_state_input_h = Input(shape=(self.latent_dim,)) decoder_state_input_c = Input(shape=(self.latent_dim,)) decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c] decoder_outputs, state_h, state_c = self.decoder_lstm(self.decoder_inputs, training=training, initial_state=decoder_states_inputs) decoder_states = [state_h, state_c] decoder_outputs = self.decoder_dense(decoder_outputs) decoder_model = Model([self.decoder_inputs] + decoder_states_inputs, [decoder_outputs] + decoder_states) return encoder_model, decoder_model def _decode_sequence(self, input_seq, mc=False): encoder_model, decoder_model = self._build_inference(mc=mc) # Encode the input as state vectors. states_value = encoder_model.predict(input_seq) # Generate empty target sequence of length 1. target_seq = np.zeros((len(input_seq), 1, self.target_col_num)) # Populate the first target sequence with end of encoding series value target_seq[:, 0] = input_seq[:, -1, :self.target_col_num] # Sampling loop for a batch of sequences - we will fill decoded_seq with predictions # (to simplify, here we assume a batch of size 1). decoded_seq = np.zeros((len(input_seq), self.future_seq_len, self.target_col_num)) for i in range(self.future_seq_len): output, h, c = decoder_model.predict([target_seq] + states_value) decoded_seq[:, i] = output[:, 0] # Update the target sequence (of length 1). target_seq = np.zeros((len(input_seq), 1, self.target_col_num)) target_seq[:, 0] = output[:, 0] # Update states states_value = [h, c] return decoded_seq def _get_decoder_inputs(self, x, y): """ lagged target series for teacher forcing decoder_input data is one timestamp ahead of y :param x: 3-d array in format of (sample_num, past_sequence_len, feature_num) :param y: 3-d array in format of (sample_num, future_sequence_len, target_col_num) Need to expand dimension if y is a 2-d array with one target col :return: 3-d array of decoder inputs """ decoder_input_data = np.zeros(y.shape) decoder_input_data[1:, ] = y[:-1, ] decoder_input_data[0, 0] = x[-1, -1, :self.target_col_num] decoder_input_data[0, 1:] = y[0, :-1] return decoder_input_data def _get_len(self, x, y): self.past_seq_len = x.shape[1] self.feature_num = x.shape[2] # self.future_seq_len = y.shape[1] self.target_col_num = y.shape[2] def _expand_y(self, y): """ expand dims for y. :param y: :return: """ while len(y.shape) < 3: y = np.expand_dims(y, axis=2) return y def _pre_processing(self, x, y, validation_data): """ pre_process input data. 1. expand dims for y and val_y 2. get decoder inputs for train data 3. get decoder inputs for validation data :param x: train_x :param y: train_y :param validation_data: :return: network input """ y = self._expand_y(y) self._get_len(x, y) decoder_input_data = self._get_decoder_inputs(x, y) if validation_data is not None: val_x, val_y = validation_data val_y = self._expand_y(val_y) val_decoder_input = self._get_decoder_inputs(val_x, val_y) validation_data = ([val_x, val_decoder_input], val_y) return x, y, decoder_input_data, validation_data def fit_eval(self, data, validation_data=None, mc=False, verbose=0, **config): """ fit for one iteration :param data: could be a tuple with numpy ndarray with form (x, y) x: 3-d array in format (no. of samples, past sequence length, 2+feature length), in the last dimension, the 1st col is the time index (data type needs to be numpy datetime type, e.g. "datetime64"), the 2nd col is the target value (data type should be numeric) y: 2-d numpy array in format (no. of samples, future sequence length) if future sequence length > 1, or 1-d numpy array in format (no. of samples, ) if future sequence length = 1 :param validation_data: tuple in format (x_test,y_test), data used for validation. If this is specified, validation result will be the optimization target for automl. Otherwise, train metric will be the optimization target. :param config: optimization hyper parameters :return: the resulting metric """ x, y = data[0], data[1] x, y, decoder_input_data, validation_data = self._pre_processing(x, y, validation_data) # if model is not initialized, __build the model if self.model is None: self._build_train(mc=mc, **config) # batch_size = config.get('batch_size', 64) # lr = self.lr # name = "seq2seq-batch_size-{}-epochs-{}-lr-{}-time-{}"\ # .format(batch_size, epochs, lr, time()) # tensorboard = TensorBoard(log_dir="logs/" + name) hist = self.model.fit([x, decoder_input_data], y, validation_data=validation_data, batch_size=self.batch_size, epochs=config.get("epochs", 10), verbose=verbose, # callbacks=[tensorboard] ) # print(hist.history) if validation_data is None: # get train metrics # results = self.model.evaluate(x, y) result = hist.history.get(self.metric)[-1] else: result = hist.history.get('val_' + str(self.metric))[-1] return result def evaluate(self, x, y, metric=['mse']): """ Evaluate on x, y :param x: input :param y: target :param metric: a list of metrics in string format :return: a list of metric evaluation results """ y_pred = self.predict(x) # y = np.squeeze(y, axis=2) if self.target_col_num == 1: return [Evaluator.evaluate(m, y, y_pred) for m in metric] else: return [np.array([Evaluator.evaluate(m, y[:, i, :], y_pred[:, i, :]) for i in range(self.future_seq_len)]) for m in metric] def predict(self, x, mc=False): """ Prediction on x. :param x: input :return: predicted y (expected dimension = 2) """ y_pred = self._decode_sequence(x, mc=mc) if self.target_col_num == 1: y_pred = np.squeeze(y_pred, axis=2) return y_pred def predict_with_uncertainty(self, x, n_iter=100): result = np.array([self.predict(x, mc=True) for i in range(n_iter)]) prediction = result.mean(axis=0) uncertainty = result.var(axis=0) return prediction, uncertainty def save(self, model_path, config_path): """ save model to file. :param model_path: the model file path to be saved to. :param config_path: the config file path to be saved to. :return: """ self.model.save(model_path) config_to_save = {"past_seq_len": self.past_seq_len, "feature_num": self.feature_num, "future_seq_len": self.future_seq_len, "target_col_num": self.target_col_num, "metric": self.metric, "latent_dim": self.latent_dim, "batch_size": self.batch_size} save_config(config_path, config_to_save) def restore(self, model_path, **config): """ restore model from file :param model_path: the model file :param config: the trial config :return: the restored model """ self.past_seq_len = config["past_seq_len"] self.feature_num = config["feature_num"] self.future_seq_len = config["future_seq_len"] self.target_col_num = config["target_col_num"] self.metric = config["metric"] self.latent_dim = config["latent_dim"] self.batch_size = config["batch_size"] self.model = keras.models.load_model(model_path) self._restore_model() # self.model.load_weights(file_path) def _get_required_parameters(self): return { # 'input_shape_x', # 'input_shape_y', # 'out_units' } def _get_optional_parameters(self): return { 'past_seq_len' 'latent_dim' 'dropout', 'metric', 'lr', 'epochs', 'batch_size' }
40.343023
98
0.602392
from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, LSTM, Dense import tensorflow.keras as keras from zoo.automl.model.abstract import BaseModel from zoo.automl.common.util import * from zoo.automl.common.metrics import Evaluator class LSTMSeq2Seq(BaseModel): def __init__(self, check_optional_config=True, future_seq_len=2): self.model = None self.past_seq_len = None self.future_seq_len = future_seq_len self.feature_num = None self.target_col_num = None self.metric = None self.latent_dim = None self.batch_size = None self.check_optional_config = check_optional_config def _build_train(self, mc=False, **config): super()._check_config(**config) self.metric = config.get('metric', 'mean_squared_error') self.latent_dim = config.get('latent_dim', 128) self.dropout = config.get('dropout', 0.2) self.lr = config.get('lr', 0.001) self.batch_size = config.get('batch_size', 64) training = True if mc else None self.encoder_inputs = Input(shape=(None, self.feature_num), name="encoder_inputs") encoder = LSTM(units=self.latent_dim, dropout=self.dropout, return_state=True, name="encoder_lstm") encoder_outputs, state_h, state_c = encoder(self.encoder_inputs, training=training) self.encoder_states = [state_h, state_c] self.decoder_inputs = Input(shape=(None, self.target_col_num), name="decoder_inputs") # return states in the training model, but we will use them in inference. self.decoder_lstm = LSTM(self.latent_dim, dropout=self.dropout, return_sequences=True, return_state=True, name="decoder_lstm") decoder_outputs, _, _ = self.decoder_lstm(self.decoder_inputs, training=training, initial_state=self.encoder_states) self.decoder_dense = Dense(self.target_col_num, name="decoder_dense") decoder_outputs = self.decoder_dense(decoder_outputs) # Define the model that will turn # `encoder_input_data` & `decoder_input_data` into `decoder_target_data` self.model = Model([self.encoder_inputs, self.decoder_inputs], decoder_outputs) self.model.compile(loss='mse', metrics=[self.metric], optimizer=keras.optimizers.RMSprop(lr=self.lr)) return self.model def _restore_model(self): self.encoder_inputs = self.model.input[0] # input_1 encoder_outputs, state_h_enc, state_c_enc = self.model.layers[2].output # lstm_1 self.encoder_states = [state_h_enc, state_c_enc] self.decoder_inputs = self.model.input[1] # input_2 self.decoder_lstm = self.model.layers[3] self.decoder_dense = self.model.layers[4] def _build_inference(self, mc=False): training = True if mc else None # from our previous model - mapping encoder sequence to state vectors encoder_model = Model(self.encoder_inputs, self.encoder_states) # A modified version of the decoding stage that takes in predicted target inputs # and encoded state vectors, returning predicted target outputs and decoder state vectors. # We need to hang onto these state vectors to run the next step of the inference loop. decoder_state_input_h = Input(shape=(self.latent_dim,)) decoder_state_input_c = Input(shape=(self.latent_dim,)) decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c] decoder_outputs, state_h, state_c = self.decoder_lstm(self.decoder_inputs, training=training, initial_state=decoder_states_inputs) decoder_states = [state_h, state_c] decoder_outputs = self.decoder_dense(decoder_outputs) decoder_model = Model([self.decoder_inputs] + decoder_states_inputs, [decoder_outputs] + decoder_states) return encoder_model, decoder_model def _decode_sequence(self, input_seq, mc=False): encoder_model, decoder_model = self._build_inference(mc=mc) # Encode the input as state vectors. states_value = encoder_model.predict(input_seq) # Generate empty target sequence of length 1. target_seq = np.zeros((len(input_seq), 1, self.target_col_num)) # Populate the first target sequence with end of encoding series value target_seq[:, 0] = input_seq[:, -1, :self.target_col_num] # Sampling loop for a batch of sequences - we will fill decoded_seq with predictions # (to simplify, here we assume a batch of size 1). decoded_seq = np.zeros((len(input_seq), self.future_seq_len, self.target_col_num)) for i in range(self.future_seq_len): output, h, c = decoder_model.predict([target_seq] + states_value) decoded_seq[:, i] = output[:, 0] # Update the target sequence (of length 1). target_seq = np.zeros((len(input_seq), 1, self.target_col_num)) target_seq[:, 0] = output[:, 0] # Update states states_value = [h, c] return decoded_seq def _get_decoder_inputs(self, x, y): decoder_input_data = np.zeros(y.shape) decoder_input_data[1:, ] = y[:-1, ] decoder_input_data[0, 0] = x[-1, -1, :self.target_col_num] decoder_input_data[0, 1:] = y[0, :-1] return decoder_input_data def _get_len(self, x, y): self.past_seq_len = x.shape[1] self.feature_num = x.shape[2] # self.future_seq_len = y.shape[1] self.target_col_num = y.shape[2] def _expand_y(self, y): while len(y.shape) < 3: y = np.expand_dims(y, axis=2) return y def _pre_processing(self, x, y, validation_data): y = self._expand_y(y) self._get_len(x, y) decoder_input_data = self._get_decoder_inputs(x, y) if validation_data is not None: val_x, val_y = validation_data val_y = self._expand_y(val_y) val_decoder_input = self._get_decoder_inputs(val_x, val_y) validation_data = ([val_x, val_decoder_input], val_y) return x, y, decoder_input_data, validation_data def fit_eval(self, data, validation_data=None, mc=False, verbose=0, **config): x, y = data[0], data[1] x, y, decoder_input_data, validation_data = self._pre_processing(x, y, validation_data) # if model is not initialized, __build the model if self.model is None: self._build_train(mc=mc, **config) # batch_size = config.get('batch_size', 64) # lr = self.lr # name = "seq2seq-batch_size-{}-epochs-{}-lr-{}-time-{}"\ # .format(batch_size, epochs, lr, time()) # tensorboard = TensorBoard(log_dir="logs/" + name) hist = self.model.fit([x, decoder_input_data], y, validation_data=validation_data, batch_size=self.batch_size, epochs=config.get("epochs", 10), verbose=verbose, # callbacks=[tensorboard] ) # print(hist.history) if validation_data is None: # get train metrics # results = self.model.evaluate(x, y) result = hist.history.get(self.metric)[-1] else: result = hist.history.get('val_' + str(self.metric))[-1] return result def evaluate(self, x, y, metric=['mse']): y_pred = self.predict(x) # y = np.squeeze(y, axis=2) if self.target_col_num == 1: return [Evaluator.evaluate(m, y, y_pred) for m in metric] else: return [np.array([Evaluator.evaluate(m, y[:, i, :], y_pred[:, i, :]) for i in range(self.future_seq_len)]) for m in metric] def predict(self, x, mc=False): y_pred = self._decode_sequence(x, mc=mc) if self.target_col_num == 1: y_pred = np.squeeze(y_pred, axis=2) return y_pred def predict_with_uncertainty(self, x, n_iter=100): result = np.array([self.predict(x, mc=True) for i in range(n_iter)]) prediction = result.mean(axis=0) uncertainty = result.var(axis=0) return prediction, uncertainty def save(self, model_path, config_path): self.model.save(model_path) config_to_save = {"past_seq_len": self.past_seq_len, "feature_num": self.feature_num, "future_seq_len": self.future_seq_len, "target_col_num": self.target_col_num, "metric": self.metric, "latent_dim": self.latent_dim, "batch_size": self.batch_size} save_config(config_path, config_to_save) def restore(self, model_path, **config): self.past_seq_len = config["past_seq_len"] self.feature_num = config["feature_num"] self.future_seq_len = config["future_seq_len"] self.target_col_num = config["target_col_num"] self.metric = config["metric"] self.latent_dim = config["latent_dim"] self.batch_size = config["batch_size"] self.model = keras.models.load_model(model_path) self._restore_model() # self.model.load_weights(file_path) def _get_required_parameters(self): return { # 'input_shape_x', # 'input_shape_y', # 'out_units' } def _get_optional_parameters(self): return { 'past_seq_len' 'latent_dim' 'dropout', 'metric', 'lr', 'epochs', 'batch_size' }
true
true
f70038c0705176be86c930b583ff2896d4eb6f71
1,547
py
Python
lib/src/layers/RNN.py
ILoveRedEd55/AIML_Detection_System
b2fdd8475f069884060f7bb31f41953bae057d7b
[ "BSD-3-Clause" ]
null
null
null
lib/src/layers/RNN.py
ILoveRedEd55/AIML_Detection_System
b2fdd8475f069884060f7bb31f41953bae057d7b
[ "BSD-3-Clause" ]
null
null
null
lib/src/layers/RNN.py
ILoveRedEd55/AIML_Detection_System
b2fdd8475f069884060f7bb31f41953bae057d7b
[ "BSD-3-Clause" ]
null
null
null
from src.layers.LayerHelper import * from settings import LayerSettings as layerSettings import tensorflow as tf import os CUDA_VISIBLE_DEVICES=0 os.environ["CUDA_VISIBLE_DEVICES"] = "0" # set gpu number def LSTM(name_, inputTensor_, numberOfOutputs_, isTraining_, dropoutProb_=None): with tf.name_scope(name_): cell = tf.nn.rnn_cell.LSTMCell(num_units=numberOfOutputs_, use_peepholes=True, initializer=layerSettings.LSTM_INITIALIZER, forget_bias=1.0, state_is_tuple=True, activation=tf.nn.tanh, name=name_+"_cell") if dropoutProb_ != None: dropoutProbTensor = tf.cond(isTraining_, lambda: 0.5, lambda: 1.0) cell = tf.nn.rnn_cell.DropoutWrapper(cell, input_keep_prob=dropoutProbTensor, output_keep_prob=dropoutProbTensor) statePlaceHolder = tf.nn.rnn_cell.LSTMStateTuple( tf.placeholder(layerSettings.FLOAT_TYPE, [None, numberOfOutputs_]), tf.placeholder(layerSettings.FLOAT_TYPE, [None, numberOfOutputs_]) ) outputTensor, stateTensor = tf.nn.dynamic_rnn( cell=cell, initial_state=statePlaceHolder, inputs=inputTensor_) # Add Regularization Loss for eachVariable in tf.trainable_variables(): if name_ in eachVariable.name: if ('bias' not in eachVariable.name)and(layerSettings.REGULARIZER_WEIGHTS_DECAY != None): regularizationLoss = L2_Regularizer(eachVariable) tf.losses.add_loss(regularizationLoss, loss_collection=tf.GraphKeys.REGULARIZATION_LOSSES) return outputTensor, stateTensor, statePlaceHolder
37.731707
119
0.752424
from src.layers.LayerHelper import * from settings import LayerSettings as layerSettings import tensorflow as tf import os CUDA_VISIBLE_DEVICES=0 os.environ["CUDA_VISIBLE_DEVICES"] = "0" def LSTM(name_, inputTensor_, numberOfOutputs_, isTraining_, dropoutProb_=None): with tf.name_scope(name_): cell = tf.nn.rnn_cell.LSTMCell(num_units=numberOfOutputs_, use_peepholes=True, initializer=layerSettings.LSTM_INITIALIZER, forget_bias=1.0, state_is_tuple=True, activation=tf.nn.tanh, name=name_+"_cell") if dropoutProb_ != None: dropoutProbTensor = tf.cond(isTraining_, lambda: 0.5, lambda: 1.0) cell = tf.nn.rnn_cell.DropoutWrapper(cell, input_keep_prob=dropoutProbTensor, output_keep_prob=dropoutProbTensor) statePlaceHolder = tf.nn.rnn_cell.LSTMStateTuple( tf.placeholder(layerSettings.FLOAT_TYPE, [None, numberOfOutputs_]), tf.placeholder(layerSettings.FLOAT_TYPE, [None, numberOfOutputs_]) ) outputTensor, stateTensor = tf.nn.dynamic_rnn( cell=cell, initial_state=statePlaceHolder, inputs=inputTensor_) for eachVariable in tf.trainable_variables(): if name_ in eachVariable.name: if ('bias' not in eachVariable.name)and(layerSettings.REGULARIZER_WEIGHTS_DECAY != None): regularizationLoss = L2_Regularizer(eachVariable) tf.losses.add_loss(regularizationLoss, loss_collection=tf.GraphKeys.REGULARIZATION_LOSSES) return outputTensor, stateTensor, statePlaceHolder
true
true
f700393f1927165c9cf9f30f328806087c1c1101
12,366
py
Python
lib/enthought/traits/ui/tk/menu.py
mattfoster/matplotlib
0b47697b19b77226c633ec6a3d74a2199a153315
[ "PSF-2.0", "BSD-3-Clause" ]
1
2016-05-08T18:33:12.000Z
2016-05-08T18:33:12.000Z
lib/enthought/traits/ui/tk/menu.py
mattfoster/matplotlib
0b47697b19b77226c633ec6a3d74a2199a153315
[ "PSF-2.0", "BSD-3-Clause" ]
null
null
null
lib/enthought/traits/ui/tk/menu.py
mattfoster/matplotlib
0b47697b19b77226c633ec6a3d74a2199a153315
[ "PSF-2.0", "BSD-3-Clause" ]
null
null
null
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! # # Author: David C. Morrill # Date: 02/02/2004 # Description: Dynamically construct Tkinter Menus or MenuBars from a supplied # string string description of the menu. #------------------------------------------------------------------------------ # # Menu Description Syntax: # # submenu_label {help_string} # menuitem_label | accelerator {help_string} [~/-name]: code # - # # where: # submenu_label = Label of a sub menu # menuitem_label = Label of a menu item # help_string = Help string to display on the status line (optional) # accelerator = Accelerator key (e.g. Ctrl-C) (| and key are optional) # [~] = Menu item checkable, but not checked initially (optional) # [/] = Menu item checkable, and checked initially (optional) # [-] = Menu item disabled initially (optional) # [name] = Symbolic name used to refer to menu item (optional) # code = Python code invoked when menu item is selected # #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # Imports: #------------------------------------------------------------------------------- import wx import re import string #------------------------------------------------------------------------------- # Constants: #------------------------------------------------------------------------------- help_pat = re.compile( r'(.*){(.*)}(.*)' ) options_pat = re.compile( r'(.*)\[(.*)\](.*)' ) key_map = { 'F1': wx.WXK_F1, 'F2': wx.WXK_F2, 'F3': wx.WXK_F3, 'F4': wx.WXK_F4, 'F5': wx.WXK_F5, 'F6': wx.WXK_F6, 'F7': wx.WXK_F7, 'F8': wx.WXK_F8, 'F9': wx.WXK_F9, 'F10': wx.WXK_F10, 'F11': wx.WXK_F11, 'F12': wx.WXK_F12 } #------------------------------------------------------------------------------- # 'MakeMenu' class: #------------------------------------------------------------------------------- class MakeMenu: # Initialize the globally unique menu ID: cur_id = 1000 #--------------------------------------------------------------------------- # Initializes the object: #--------------------------------------------------------------------------- def __init__ ( self, desc, owner, popup = False, window = None ): """ Initializes the object. """ self.owner = owner if window is None: window = owner self.window = window self.indirect = getattr( owner, 'call_menu', None ) self.names = {} self.desc = desc.split( '\n' ) self.index = 0 self.keys = [] if popup: self.menu = menu = wx.Menu() self.parse( menu, -1 ) else: self.menu = menu = wx.MenuBar() self.parse( menu, -1 ) window.SetMenuBar( menu ) if len( self.keys ) > 0: window.SetAcceleratorTable( wx.AcceleratorTable( self.keys ) ) #--------------------------------------------------------------------------- # Recursively parses menu items from the description: #--------------------------------------------------------------------------- def parse ( self, menu, indent ): """ Recursively parses menu items from the description. """ while True: # Make sure we have not reached the end of the menu description yet: if self.index >= len( self.desc ): return # Get the next menu description line and check its indentation: dline = self.desc[ self.index ] line = dline.lstrip() indented = len( dline ) - len( line ) if indented <= indent: return # Indicate that the current line has been processed: self.index += 1 # Check for a blank or comment line: if (line == '') or (line[0:1] == '#'): continue # Check for a menu separator: if line[0:1] == '-': menu.AppendSeparator() continue # Allocate a new menu ID: MakeMenu.cur_id += 1 cur_id = MakeMenu.cur_id # Extract the help string (if any): help = '' match = help_pat.search( line ) if match: help = ' ' + match.group(2).strip() line = match.group(1) + match.group(3) # Check for a menu item: col = line.find( ':' ) if col >= 0: handler = line[ col + 1: ].strip() if handler != '': if self.indirect: self.indirect( cur_id, handler ) handler = self.indirect else: try: exec ('def handler(event,self=self.owner):\n %s\n' % handler) except: handler = null_handler else: try: exec 'def handler(event,self=self.owner):\n%s\n' % ( self.get_body( indented ), ) in globals() except: handler = null_handler wx.EVT_MENU( self.window, cur_id, handler ) not_checked = checked = disabled = False line = line[ : col ] match = options_pat.search( line ) if match: line = match.group(1) + match.group(3) not_checked, checked, disabled, name = option_check( '~/-', match.group(2).strip() ) if name != '': self.names[ name ] = cur_id setattr( self.owner, name, MakeMenuItem( self, cur_id ) ) label = line.strip() col = label.find( '|' ) if col >= 0: key = label[ col + 1: ].strip() label = '%s%s%s' % ( label[ : col ].strip(), '\t', key ) key = key.upper() flag = wx.ACCEL_NORMAL col = key.find( '-' ) if col >= 0: flag = { 'CTRL': wx.ACCEL_CTRL, 'SHIFT': wx.ACCEL_SHIFT, 'ALT': wx.ACCEL_ALT }.get( key[ : col ].strip(), wx.ACCEL_CTRL ) key = key[ col + 1: ].strip() code = key_map.get( key, None ) try: if code is None: code = ord( key ) self.keys.append( wx.AcceleratorEntry( flag, code, cur_id ) ) except: pass menu.Append( cur_id, label, help, not_checked or checked ) if checked: menu.Check( cur_id, True ) if disabled: menu.Enable( cur_id, False ) continue # Else must be the start of a sub menu: submenu = wx.Menu() label = line.strip() # Recursively parse the sub-menu: self.parse( submenu, indented ) # Add the menu to its parent: try: menu.AppendMenu( cur_id, label, submenu, help ) except: # Handle the case where 'menu' is really a 'MenuBar' (which does # not understand 'MenuAppend'): menu.Append( submenu, label ) #--------------------------------------------------------------------------- # Returns the body of an inline method: #--------------------------------------------------------------------------- def get_body ( self, indent ): """ Returns the body of an inline method. """ result = [] while self.index < len( self.desc ): line = self.desc[ self.index ] if (len( line ) - len( line.lstrip() )) <= indent: break result.append( line ) self.index += 1 result = '\n'.join( result ).rstrip() if result != '': return result return ' pass' #--------------------------------------------------------------------------- # Returns the id associated with a specified name: #--------------------------------------------------------------------------- def get_id ( self, name ): """ Returns the id associated with a specified name. """ if isinstance(name, basestring): return self.names[ name ] return name #--------------------------------------------------------------------------- # Checks (or unchecks) a menu item specified by name: #--------------------------------------------------------------------------- def checked ( self, name, check = None ): """ Checks (or unchecks) a menu item specified by name. """ if check is None: return self.menu.IsChecked( self.get_id( name ) ) self.menu.Check( self.get_id( name ), check ) #--------------------------------------------------------------------------- # Enables (or disables) a menu item specified by name: #--------------------------------------------------------------------------- def enabled ( self, name, enable = None ): """ Enables (or disables) a menu item specified by name. """ if enable is None: return self.menu.IsEnabled( self.get_id( name ) ) self.menu.Enable( self.get_id( name ), enable ) #--------------------------------------------------------------------------- # Gets/Sets the label for a menu item: #--------------------------------------------------------------------------- def label ( self, name, label = None ): """ Gets/Sets the label for a menu item. """ if label is None: return self.menu.GetLabel( self.get_id( name ) ) self.menu.SetLabel( self.get_id( name ), label ) #------------------------------------------------------------------------------- # 'MakeMenuItem' class: #------------------------------------------------------------------------------- class MakeMenuItem: def __init__ ( self, menu, id ): self.menu = menu self.id = id def checked ( self, check = None ): return self.menu.checked( self.id, check ) def toggle ( self ): checked = not self.checked() self.checked( checked ) return checked def enabled ( self, enable = None ): return self.menu.enabled( self.id, enable ) def label ( self, label = None ): return self.menu.label( self.id, label ) #------------------------------------------------------------------------------- # Determine whether a string contains any specified option characters, and # remove them if it does: #------------------------------------------------------------------------------- def option_check ( test, string ): result = [] for char in test: col = string.find( char ) result.append( col >= 0 ) if col >= 0: string = string[ : col ] + string[ col + 1: ] return result + [ string.strip() ] #------------------------------------------------------------------------------- # Null menu option selection handler: #------------------------------------------------------------------------------- def null_handler ( event ): print 'null_handler invoked'
37.70122
81
0.404334
import wx import re import string help_pat = re.compile( r'(.*){(.*)}(.*)' ) options_pat = re.compile( r'(.*)\[(.*)\](.*)' ) key_map = { 'F1': wx.WXK_F1, 'F2': wx.WXK_F2, 'F3': wx.WXK_F3, 'F4': wx.WXK_F4, 'F5': wx.WXK_F5, 'F6': wx.WXK_F6, 'F7': wx.WXK_F7, 'F8': wx.WXK_F8, 'F9': wx.WXK_F9, 'F10': wx.WXK_F10, 'F11': wx.WXK_F11, 'F12': wx.WXK_F12 } class MakeMenu: cur_id = 1000 def __init__ ( self, desc, owner, popup = False, window = None ): """ Initializes the object. """ self.owner = owner if window is None: window = owner self.window = window self.indirect = getattr( owner, 'call_menu', None ) self.names = {} self.desc = desc.split( '\n' ) self.index = 0 self.keys = [] if popup: self.menu = menu = wx.Menu() self.parse( menu, -1 ) else: self.menu = menu = wx.MenuBar() self.parse( menu, -1 ) window.SetMenuBar( menu ) if len( self.keys ) > 0: window.SetAcceleratorTable( wx.AcceleratorTable( self.keys ) ) def parse ( self, menu, indent ): """ Recursively parses menu items from the description. """ while True: if self.index >= len( self.desc ): return dline = self.desc[ self.index ] line = dline.lstrip() indented = len( dline ) - len( line ) if indented <= indent: return self.index += 1 if (line == '') or (line[0:1] == '#'): continue if line[0:1] == '-': menu.AppendSeparator() continue MakeMenu.cur_id += 1 cur_id = MakeMenu.cur_id help = '' match = help_pat.search( line ) if match: help = ' ' + match.group(2).strip() line = match.group(1) + match.group(3) col = line.find( ':' ) if col >= 0: handler = line[ col + 1: ].strip() if handler != '': if self.indirect: self.indirect( cur_id, handler ) handler = self.indirect else: try: exec ('def handler(event,self=self.owner):\n %s\n' % handler) except: handler = null_handler else: try: exec 'def handler(event,self=self.owner):\n%s\n' % ( self.get_body( indented ), ) in globals() except: handler = null_handler wx.EVT_MENU( self.window, cur_id, handler ) not_checked = checked = disabled = False line = line[ : col ] match = options_pat.search( line ) if match: line = match.group(1) + match.group(3) not_checked, checked, disabled, name = option_check( '~/-', match.group(2).strip() ) if name != '': self.names[ name ] = cur_id setattr( self.owner, name, MakeMenuItem( self, cur_id ) ) label = line.strip() col = label.find( '|' ) if col >= 0: key = label[ col + 1: ].strip() label = '%s%s%s' % ( label[ : col ].strip(), '\t', key ) key = key.upper() flag = wx.ACCEL_NORMAL col = key.find( '-' ) if col >= 0: flag = { 'CTRL': wx.ACCEL_CTRL, 'SHIFT': wx.ACCEL_SHIFT, 'ALT': wx.ACCEL_ALT }.get( key[ : col ].strip(), wx.ACCEL_CTRL ) key = key[ col + 1: ].strip() code = key_map.get( key, None ) try: if code is None: code = ord( key ) self.keys.append( wx.AcceleratorEntry( flag, code, cur_id ) ) except: pass menu.Append( cur_id, label, help, not_checked or checked ) if checked: menu.Check( cur_id, True ) if disabled: menu.Enable( cur_id, False ) continue submenu = wx.Menu() label = line.strip() self.parse( submenu, indented ) try: menu.AppendMenu( cur_id, label, submenu, help ) except: menu.Append( submenu, label ) def get_body ( self, indent ): """ Returns the body of an inline method. """ result = [] while self.index < len( self.desc ): line = self.desc[ self.index ] if (len( line ) - len( line.lstrip() )) <= indent: break result.append( line ) self.index += 1 result = '\n'.join( result ).rstrip() if result != '': return result return ' pass' def get_id ( self, name ): """ Returns the id associated with a specified name. """ if isinstance(name, basestring): return self.names[ name ] return name def checked ( self, name, check = None ): """ Checks (or unchecks) a menu item specified by name. """ if check is None: return self.menu.IsChecked( self.get_id( name ) ) self.menu.Check( self.get_id( name ), check ) def enabled ( self, name, enable = None ): """ Enables (or disables) a menu item specified by name. """ if enable is None: return self.menu.IsEnabled( self.get_id( name ) ) self.menu.Enable( self.get_id( name ), enable ) def label ( self, name, label = None ): """ Gets/Sets the label for a menu item. """ if label is None: return self.menu.GetLabel( self.get_id( name ) ) self.menu.SetLabel( self.get_id( name ), label ) class MakeMenuItem: def __init__ ( self, menu, id ): self.menu = menu self.id = id def checked ( self, check = None ): return self.menu.checked( self.id, check ) def toggle ( self ): checked = not self.checked() self.checked( checked ) return checked def enabled ( self, enable = None ): return self.menu.enabled( self.id, enable ) def label ( self, label = None ): return self.menu.label( self.id, label ) def option_check ( test, string ): result = [] for char in test: col = string.find( char ) result.append( col >= 0 ) if col >= 0: string = string[ : col ] + string[ col + 1: ] return result + [ string.strip() ] def null_handler ( event ): print 'null_handler invoked'
false
true
f70039735ffe11d74efe8cef9ed002422d74d66c
1,651
py
Python
externals/libbot/bot2-param/lcmtypes/python/bot_param/request_t.py
ericmanzi/double_pendulum_lqr
76bba3091295abb7d412c4a3156258918f280c96
[ "BSD-3-Clause" ]
null
null
null
externals/libbot/bot2-param/lcmtypes/python/bot_param/request_t.py
ericmanzi/double_pendulum_lqr
76bba3091295abb7d412c4a3156258918f280c96
[ "BSD-3-Clause" ]
null
null
null
externals/libbot/bot2-param/lcmtypes/python/bot_param/request_t.py
ericmanzi/double_pendulum_lqr
76bba3091295abb7d412c4a3156258918f280c96
[ "BSD-3-Clause" ]
null
null
null
"""LCM type definitions This file automatically generated by lcm. DO NOT MODIFY BY HAND!!!! """ import cStringIO as StringIO import struct class request_t(object): __slots__ = ["utime"] def __init__(self): self.utime = 0 def encode(self): buf = StringIO.StringIO() buf.write(request_t._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue() def _encode_one(self, buf): buf.write(struct.pack(">q", self.utime)) def decode(data): if hasattr(data, 'read'): buf = data else: buf = StringIO.StringIO(data) if buf.read(8) != request_t._get_packed_fingerprint(): raise ValueError("Decode error") return request_t._decode_one(buf) decode = staticmethod(decode) def _decode_one(buf): self = request_t() self.utime = struct.unpack(">q", buf.read(8))[0] return self _decode_one = staticmethod(_decode_one) _hash = None def _get_hash_recursive(parents): if request_t in parents: return 0 tmphash = (0xa686a0e0f882d897) & 0xffffffffffffffff tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff return tmphash _get_hash_recursive = staticmethod(_get_hash_recursive) _packed_fingerprint = None def _get_packed_fingerprint(): if request_t._packed_fingerprint is None: request_t._packed_fingerprint = struct.pack(">Q", request_t._get_hash_recursive([])) return request_t._packed_fingerprint _get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
30.018182
96
0.658389
import cStringIO as StringIO import struct class request_t(object): __slots__ = ["utime"] def __init__(self): self.utime = 0 def encode(self): buf = StringIO.StringIO() buf.write(request_t._get_packed_fingerprint()) self._encode_one(buf) return buf.getvalue() def _encode_one(self, buf): buf.write(struct.pack(">q", self.utime)) def decode(data): if hasattr(data, 'read'): buf = data else: buf = StringIO.StringIO(data) if buf.read(8) != request_t._get_packed_fingerprint(): raise ValueError("Decode error") return request_t._decode_one(buf) decode = staticmethod(decode) def _decode_one(buf): self = request_t() self.utime = struct.unpack(">q", buf.read(8))[0] return self _decode_one = staticmethod(_decode_one) _hash = None def _get_hash_recursive(parents): if request_t in parents: return 0 tmphash = (0xa686a0e0f882d897) & 0xffffffffffffffff tmphash = (((tmphash<<1)&0xffffffffffffffff) + (tmphash>>63)) & 0xffffffffffffffff return tmphash _get_hash_recursive = staticmethod(_get_hash_recursive) _packed_fingerprint = None def _get_packed_fingerprint(): if request_t._packed_fingerprint is None: request_t._packed_fingerprint = struct.pack(">Q", request_t._get_hash_recursive([])) return request_t._packed_fingerprint _get_packed_fingerprint = staticmethod(_get_packed_fingerprint)
true
true
f70039ac21b0c913b647473f2df763d599defccb
670
py
Python
test_package/conanfile.py
amrayn/conan-easyloggingpp
e8964070859af0fe5164e2b0a56d58265a99f14e
[ "MIT" ]
null
null
null
test_package/conanfile.py
amrayn/conan-easyloggingpp
e8964070859af0fe5164e2b0a56d58265a99f14e
[ "MIT" ]
null
null
null
test_package/conanfile.py
amrayn/conan-easyloggingpp
e8964070859af0fe5164e2b0a56d58265a99f14e
[ "MIT" ]
null
null
null
from conans import ConanFile, CMake import os channel = os.getenv("CONAN_CHANNEL", "testing") username = os.getenv("CONAN_USERNAME", "memsharded") class EasyLoggingTestConan(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "easyloggingpp/9.94.1@%s/%s" % (username, channel) generators = "cmake" def build(self): cmake = CMake(self.settings) self.run('cmake "%s" %s' % (self.conanfile_directory, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def imports(self): self.copy("*.cc") def test(self): os.chdir("bin") self.run(".%sexample" % os.sep)
29.130435
82
0.632836
from conans import ConanFile, CMake import os channel = os.getenv("CONAN_CHANNEL", "testing") username = os.getenv("CONAN_USERNAME", "memsharded") class EasyLoggingTestConan(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "easyloggingpp/9.94.1@%s/%s" % (username, channel) generators = "cmake" def build(self): cmake = CMake(self.settings) self.run('cmake "%s" %s' % (self.conanfile_directory, cmake.command_line)) self.run("cmake --build . %s" % cmake.build_config) def imports(self): self.copy("*.cc") def test(self): os.chdir("bin") self.run(".%sexample" % os.sep)
true
true
f7003a232d6ec1fee548b1e75d560f765252f333
2,108
py
Python
itea/inspection/__init__.py
gAldeia/itea-python
689cd1eff61cd475277d63ca9387c6f9b6b7ee53
[ "BSD-3-Clause" ]
null
null
null
itea/inspection/__init__.py
gAldeia/itea-python
689cd1eff61cd475277d63ca9387c6f9b6b7ee53
[ "BSD-3-Clause" ]
null
null
null
itea/inspection/__init__.py
gAldeia/itea-python
689cd1eff61cd475277d63ca9387c6f9b6b7ee53
[ "BSD-3-Clause" ]
null
null
null
# Author: Guilherme Aldeia # Contact: [email protected] # Version: 1.0.1 # Last modified: 06-07-2021 by Guilherme Aldeia """Interaction Transformation expression's **Inspector** Sub-module containing three classes to help inspect and explain the results obtained with the itea. - ``ITExpr_explainer``: Implementations of feature importances methods specific to the Interaction-Transformation representation, and several visualization tools to help interpret the final expression; - ``ITExpr_inspector``: Based on a more statistical approach, this class implements methods to measure the quality of the final expression by calculating information between individual terms; - ``ITExpr_texifier``: Creation of latex representations of the final expression and its derivatives. In cases where the final expression is simple enough, the analysis of the expression can provide useful insights. All the modules are designed to work with `ITExpr`s. After the evolutionary process is performed (by calling `fit()` on the `ITEA_classifier` or `ITEA_regressor`), the best final expression can be accessed by `itea.bestsol_`, and those classes are specialized in different ways of inspecting the final model. Additionally, there is one class designed to work with the ´`itea``, instead of ``ITExpr`` expressions. The class ``ITEA_summarizer`` implements a method to automatically create a pdf file containing information generated with all the inspection classes, in an attempt to automate the task of generating an interpretability report. """ from itea.inspection._ITExpr_explainer import ITExpr_explainer from itea.inspection._ITExpr_inspector import ITExpr_inspector from itea.inspection._ITExpr_texifier import ITExpr_texifier from itea.inspection._ITEA_summarizer import ITEA_summarizer import jax # Must be used at startup. We'll perform lightweight usage with jax jax.config.update('jax_platform_name', 'cpu') __all__ = [ 'ITExpr_explainer', 'ITExpr_inspector', 'ITExpr_texifier', 'ITEA_summarizer' ]
39.773585
81
0.776091
from itea.inspection._ITExpr_explainer import ITExpr_explainer from itea.inspection._ITExpr_inspector import ITExpr_inspector from itea.inspection._ITExpr_texifier import ITExpr_texifier from itea.inspection._ITEA_summarizer import ITEA_summarizer import jax jax.config.update('jax_platform_name', 'cpu') __all__ = [ 'ITExpr_explainer', 'ITExpr_inspector', 'ITExpr_texifier', 'ITEA_summarizer' ]
true
true
f7003b91bb4ee77fca845b180c133744331feb0a
365
py
Python
test/plugin_tests.py
builderjer/skill-ovos-timer
b7d15c9ca102d2de0a514402599db1bb542189e8
[ "Apache-2.0" ]
2
2021-11-18T16:34:12.000Z
2021-11-20T14:52:54.000Z
test/plugin_tests.py
builderjer/skill-ovos-timer
b7d15c9ca102d2de0a514402599db1bb542189e8
[ "Apache-2.0" ]
null
null
null
test/plugin_tests.py
builderjer/skill-ovos-timer
b7d15c9ca102d2de0a514402599db1bb542189e8
[ "Apache-2.0" ]
1
2022-03-11T22:30:23.000Z
2022-03-11T22:30:23.000Z
# write your first unittest! import unittest from ovos_plugin_manager.skills import find_skill_plugins class TestPlugin(unittest.TestCase): @classmethod def setUpClass(self): self.skill_id = "ovos-skill-timer.OpenVoiceOS" def test_find_plugin(self): plugins = find_skill_plugins() self.assertIn(self.skill_id, list(plugins))
24.333333
57
0.734247
import unittest from ovos_plugin_manager.skills import find_skill_plugins class TestPlugin(unittest.TestCase): @classmethod def setUpClass(self): self.skill_id = "ovos-skill-timer.OpenVoiceOS" def test_find_plugin(self): plugins = find_skill_plugins() self.assertIn(self.skill_id, list(plugins))
true
true
f7003ce868d990006e15c94a79022efc04576c27
6,869
py
Python
sdc/detection/cnn_classifier.py
tadasdanielius/P5-Vehicle-Detection-And-Tracking
38513e91d863f7fff50703349aacbe5d5bbfae39
[ "MIT" ]
null
null
null
sdc/detection/cnn_classifier.py
tadasdanielius/P5-Vehicle-Detection-And-Tracking
38513e91d863f7fff50703349aacbe5d5bbfae39
[ "MIT" ]
null
null
null
sdc/detection/cnn_classifier.py
tadasdanielius/P5-Vehicle-Detection-And-Tracking
38513e91d863f7fff50703349aacbe5d5bbfae39
[ "MIT" ]
null
null
null
from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential from keras.layers import Convolution2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense, Lambda, ELU from keras.optimizers import Adam from sklearn.model_selection import train_test_split from keras.models import model_from_json from sklearn.preprocessing import normalize import cv2 import numpy as np import glob import json from keras.layers import merge from keras.layers.core import Lambda from keras.models import Model import tensorflow as tf def make_parallel(model, gpu_count): def get_slice(data, idx, parts): shape = tf.shape(data) size = tf.concat(0, [shape[:1] // parts, shape[1:]]) stride = tf.concat(0, [shape[:1] // parts, shape[1:] * 0]) start = stride * idx return tf.slice(data, start, size) outputs_all = [] for i in range(len(model.outputs)): outputs_all.append([]) # Place a copy of the model on each GPU, each getting a slice of the batch for i in range(gpu_count): with tf.device('/gpu:%d' % i): with tf.name_scope('tower_%d' % i) as scope: inputs = [] # Slice each input into a piece for processing on this GPU for x in model.inputs: input_shape = tuple(x.get_shape().as_list())[1:] slice_n = Lambda(get_slice, output_shape=input_shape, arguments={'idx': i, 'parts': gpu_count})(x) inputs.append(slice_n) outputs = model(inputs) if not isinstance(outputs, list): outputs = [outputs] # Save all the outputs for merging back together later for l in range(len(outputs)): outputs_all[l].append(outputs[l]) # merge outputs on CPU with tf.device('/cpu:0'): merged = [] for outputs in outputs_all: merged.append(merge(outputs, mode='concat', concat_axis=0)) return Model(input=model.inputs, output=merged) class CNNClassifier: def __init__(self): self.classifier = None def get_model(self, parallel=False): model = Sequential() #model.add(Lambda(lambda x: x / 127.5 - 1., input_shape=(64, 64, 3))) model.add(Convolution2D(8, 8, 8, subsample=(4, 4), border_mode="same", activation='elu', name='Conv1')) model.add(Convolution2D(16, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv2')) model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv3')) model.add(Flatten()) model.add(ELU()) model.add(Dense(1024, activation='elu')) model.add(Dropout(.5)) model.add(ELU()) model.add(Dense(512, activation='elu')) model.add(Dropout(.5)) model.add(Dense(1, name='output')) model.add(Activation('sigmoid')) if parallel: model = make_parallel(model, 2) #model.compile(optimizer='sgd', loss='binary_crossentropy', metrics=['accuracy']) self.model = model return model def _model(self): img_width, img_height = 64, 64 model = Sequential() model.add(Convolution2D(8, 3, 3, input_shape=(img_width, img_height, 3))) model.add(Activation('elu')) model.add(MaxPooling2D(pool_size=(2, 2))) #model.add(Convolution2D(16, 3, 3)) #model.add(Activation('elu')) #model.add(MaxPooling2D(pool_size=(2, 2))) #model.add(Convolution2D(32, 3, 3)) #model.add(Activation('elu')) #model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Flatten()) model.add(Dense(512)) model.add(Dropout(0.5)) model.add(Dense(1, activation='sigmoid')) #model = make_parallel(model, 2) self.model = model def compile(self): self.model.compile(loss='binary_crossentropy', optimizer='rmsprop', class_mode='binary', metrics=['accuracy']) def save(self): model_json = self.model.to_json() with open("./model.json", "w") as json_file: json.dump(model_json, json_file) self.model.save_weights("./model.h5") print("Saved model to disk") def load(self): with open('./model.json', 'r') as jfile: self.model = model_from_json(json.load(jfile)) self.compile() self.model.load_weights('./model.h5') def get_list(self): vehicles = np.array(glob.glob('training_data/vehicles/*/*')) y_vehicles = np.zeros(vehicles.shape) + 1 non_vehicles = np.array(glob.glob('training_data/non-vehicles/*/*')) y_non_vehicles = np.zeros(non_vehicles.shape) X_data = np.concatenate((vehicles, non_vehicles)) Y_data = np.concatenate((y_vehicles, y_non_vehicles)) return X_data, Y_data def predict(self, image): #img = np.copy(image) #img = cv2.resize(img, (64, 64)) x = image[None, :, :, :] result = self.model.predict(x, 1) return result def train(self, file_list, labels, test_size=0.2, nb_epoch=30, batch_size=128): X_train, X_test, Y_train, Y_test = train_test_split(file_list, labels, test_size=test_size, random_state=100) test_images = build_images(X_test) train_images = build_images(X_train) train_datagen = ImageDataGenerator( rescale=1. / 255, shear_range=0.05, zoom_range=0.05, width_shift_range=0.1, height_shift_range=0.1, rotation_range=5, horizontal_flip=True) test_datagen = ImageDataGenerator(rescale=1. / 255) train_generator = train_datagen.flow(train_images, Y_train, batch_size) test_generator = test_datagen.flow(test_images, Y_test, batch_size) nb_train_samples = (batch_size-1)*100 nb_validation_samples = (batch_size-1)*20 #self.get_model(parallel=False) self._model() self.compile() self.model.fit_generator( train_generator, samples_per_epoch=nb_train_samples, nb_epoch=nb_epoch, show_accuracy=True, validation_data=test_generator, nb_val_samples=nb_validation_samples) def build_images(x): images = np.zeros((len(x), 64, 64, 3)) for idx, img_fname in enumerate(x): im = cv2.imread(img_fname) im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_AREA) images[idx] = im return images def do_all(nb_epoch=30, batch_size=256): clf = CNNClassifier() x, y = clf.get_list() clf.train(x, y, nb_epoch=nb_epoch, batch_size=batch_size) clf.save()
35.963351
118
0.615519
from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential from keras.layers import Convolution2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense, Lambda, ELU from keras.optimizers import Adam from sklearn.model_selection import train_test_split from keras.models import model_from_json from sklearn.preprocessing import normalize import cv2 import numpy as np import glob import json from keras.layers import merge from keras.layers.core import Lambda from keras.models import Model import tensorflow as tf def make_parallel(model, gpu_count): def get_slice(data, idx, parts): shape = tf.shape(data) size = tf.concat(0, [shape[:1] // parts, shape[1:]]) stride = tf.concat(0, [shape[:1] // parts, shape[1:] * 0]) start = stride * idx return tf.slice(data, start, size) outputs_all = [] for i in range(len(model.outputs)): outputs_all.append([]) for i in range(gpu_count): with tf.device('/gpu:%d' % i): with tf.name_scope('tower_%d' % i) as scope: inputs = [] for x in model.inputs: input_shape = tuple(x.get_shape().as_list())[1:] slice_n = Lambda(get_slice, output_shape=input_shape, arguments={'idx': i, 'parts': gpu_count})(x) inputs.append(slice_n) outputs = model(inputs) if not isinstance(outputs, list): outputs = [outputs] for l in range(len(outputs)): outputs_all[l].append(outputs[l]) with tf.device('/cpu:0'): merged = [] for outputs in outputs_all: merged.append(merge(outputs, mode='concat', concat_axis=0)) return Model(input=model.inputs, output=merged) class CNNClassifier: def __init__(self): self.classifier = None def get_model(self, parallel=False): model = Sequential() model.add(Convolution2D(8, 8, 8, subsample=(4, 4), border_mode="same", activation='elu', name='Conv1')) model.add(Convolution2D(16, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv2')) model.add(Convolution2D(32, 5, 5, subsample=(2, 2), border_mode="same", activation='elu', name='Conv3')) model.add(Flatten()) model.add(ELU()) model.add(Dense(1024, activation='elu')) model.add(Dropout(.5)) model.add(ELU()) model.add(Dense(512, activation='elu')) model.add(Dropout(.5)) model.add(Dense(1, name='output')) model.add(Activation('sigmoid')) if parallel: model = make_parallel(model, 2) self.model = model return model def _model(self): img_width, img_height = 64, 64 model = Sequential() model.add(Convolution2D(8, 3, 3, input_shape=(img_width, img_height, 3))) model.add(Activation('elu')) model.add(MaxPooling2D(pool_size=(2, 2))) model.add(Flatten()) model.add(Dense(512)) model.add(Dropout(0.5)) model.add(Dense(1, activation='sigmoid')) self.model = model def compile(self): self.model.compile(loss='binary_crossentropy', optimizer='rmsprop', class_mode='binary', metrics=['accuracy']) def save(self): model_json = self.model.to_json() with open("./model.json", "w") as json_file: json.dump(model_json, json_file) self.model.save_weights("./model.h5") print("Saved model to disk") def load(self): with open('./model.json', 'r') as jfile: self.model = model_from_json(json.load(jfile)) self.compile() self.model.load_weights('./model.h5') def get_list(self): vehicles = np.array(glob.glob('training_data/vehicles/*/*')) y_vehicles = np.zeros(vehicles.shape) + 1 non_vehicles = np.array(glob.glob('training_data/non-vehicles/*/*')) y_non_vehicles = np.zeros(non_vehicles.shape) X_data = np.concatenate((vehicles, non_vehicles)) Y_data = np.concatenate((y_vehicles, y_non_vehicles)) return X_data, Y_data def predict(self, image): x = image[None, :, :, :] result = self.model.predict(x, 1) return result def train(self, file_list, labels, test_size=0.2, nb_epoch=30, batch_size=128): X_train, X_test, Y_train, Y_test = train_test_split(file_list, labels, test_size=test_size, random_state=100) test_images = build_images(X_test) train_images = build_images(X_train) train_datagen = ImageDataGenerator( rescale=1. / 255, shear_range=0.05, zoom_range=0.05, width_shift_range=0.1, height_shift_range=0.1, rotation_range=5, horizontal_flip=True) test_datagen = ImageDataGenerator(rescale=1. / 255) train_generator = train_datagen.flow(train_images, Y_train, batch_size) test_generator = test_datagen.flow(test_images, Y_test, batch_size) nb_train_samples = (batch_size-1)*100 nb_validation_samples = (batch_size-1)*20 self._model() self.compile() self.model.fit_generator( train_generator, samples_per_epoch=nb_train_samples, nb_epoch=nb_epoch, show_accuracy=True, validation_data=test_generator, nb_val_samples=nb_validation_samples) def build_images(x): images = np.zeros((len(x), 64, 64, 3)) for idx, img_fname in enumerate(x): im = cv2.imread(img_fname) im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB) im = cv2.resize(im, (64, 64), interpolation=cv2.INTER_AREA) images[idx] = im return images def do_all(nb_epoch=30, batch_size=256): clf = CNNClassifier() x, y = clf.get_list() clf.train(x, y, nb_epoch=nb_epoch, batch_size=batch_size) clf.save()
true
true
f7003f183c485860f122f79dba419f856151c5fc
2,935
py
Python
tests/test_uniswap_add.py
Dahlia-Finance/dahlia_contracts
3cf35103b2341a2ca07f8b5653946c74cedf3a78
[ "MIT" ]
1
2022-01-14T11:04:09.000Z
2022-01-14T11:04:09.000Z
tests/test_uniswap_add.py
Pinnata/pinnata-contracts
3cf35103b2341a2ca07f8b5653946c74cedf3a78
[ "MIT" ]
3
2021-12-16T19:54:57.000Z
2021-12-20T18:32:21.000Z
tests/test_uniswap_add.py
Dahlia-Finance/pinnata_contracts
3cf35103b2341a2ca07f8b5653946c74cedf3a78
[ "MIT" ]
2
2021-11-24T05:01:56.000Z
2021-12-04T00:27:54.000Z
import pytest from brownie import interface def test_uniswap_add_two_tokens( admin, alice, chain, bank, werc20, ufactory, urouter, simple_oracle, oracle, celo, cusd, ceur, UniswapV2SpellV1, UniswapV2Oracle, core_oracle ): spell = UniswapV2SpellV1.deploy(bank, werc20, urouter, celo, {'from': admin}) cusd.mint(admin, 10000000 * 10**6, {'from': admin}) ceur.mint(admin, 10000000 * 10**6, {'from': admin}) cusd.approve(urouter, 2**256-1, {'from': admin}) ceur.approve(urouter, 2**256-1, {'from': admin}) urouter.addLiquidity( cusd, ceur, 1000000 * 10**6, 1000000 * 10**6, 0, 0, admin, chain.time() + 60, {'from': admin}, ) lp = ufactory.getPair(cusd, ceur) print('admin lp bal', interface.IERC20(lp).balanceOf(admin)) uniswap_lp_oracle = UniswapV2Oracle.deploy(core_oracle, {'from': admin}) print('ceur Px', simple_oracle.getCELOPx(ceur)) print('cusd Px', simple_oracle.getCELOPx(cusd)) core_oracle.setRoute([cusd, ceur, lp], [simple_oracle, simple_oracle, uniswap_lp_oracle]) print('lp Px', uniswap_lp_oracle.getCELOPx(lp)) oracle.setTokenFactors( [cusd, ceur, lp], [ [10000, 10000, 10000], [10000, 10000, 10000], [10000, 10000, 10000], ], {'from': admin}, ) cusd.mint(alice, 10000000 * 10**6, {'from': admin}) ceur.mint(alice, 10000000 * 10**6, {'from': admin}) cusd.approve(bank, 2**256-1, {'from': alice}) ceur.approve(bank, 2**256-1, {'from': alice}) spell.getAndApprovePair(cusd, ceur, {'from': admin}) lp = ufactory.getPair(cusd, ceur) spell.setWhitelistLPTokens([lp], [True], {'from': admin}) bank.setWhitelistSpells([spell], [True], {'from': admin}) bank.setWhitelistTokens([cusd, ceur], [True, True], {'from': admin}) tx = bank.execute( 0, spell, spell.addLiquidityWERC20.encode_input( ceur, # token 0 cusd, # token 1 [ 40000 * 10**6, # 40000 ceur 50000 * 10**6, # 50000 cusd 0, 1000 * 10**6, # 1000 ceur 200 * 10**6, # 200 cusd 0, # borrow LP tokens 0, # min ceur 0, # min cusd ], ), {'from': alice} ) position_id = tx.return_value print('tx gas used', tx.gas_used) print('bank collateral size', bank.getPositionInfo(position_id)) print('bank collateral value', bank.getCollateralCELOValue(position_id)) print('bank borrow value', bank.getBorrowCELOValue(position_id)) print('bank ceur', bank.getBankInfo(ceur)) print('bank cusd', bank.getBankInfo(cusd)) print('ceur Px', simple_oracle.getCELOPx(ceur)) print('cusd Px', simple_oracle.getCELOPx(cusd)) print('lp Px', uniswap_lp_oracle.getCELOPx(lp))
34.127907
145
0.586712
import pytest from brownie import interface def test_uniswap_add_two_tokens( admin, alice, chain, bank, werc20, ufactory, urouter, simple_oracle, oracle, celo, cusd, ceur, UniswapV2SpellV1, UniswapV2Oracle, core_oracle ): spell = UniswapV2SpellV1.deploy(bank, werc20, urouter, celo, {'from': admin}) cusd.mint(admin, 10000000 * 10**6, {'from': admin}) ceur.mint(admin, 10000000 * 10**6, {'from': admin}) cusd.approve(urouter, 2**256-1, {'from': admin}) ceur.approve(urouter, 2**256-1, {'from': admin}) urouter.addLiquidity( cusd, ceur, 1000000 * 10**6, 1000000 * 10**6, 0, 0, admin, chain.time() + 60, {'from': admin}, ) lp = ufactory.getPair(cusd, ceur) print('admin lp bal', interface.IERC20(lp).balanceOf(admin)) uniswap_lp_oracle = UniswapV2Oracle.deploy(core_oracle, {'from': admin}) print('ceur Px', simple_oracle.getCELOPx(ceur)) print('cusd Px', simple_oracle.getCELOPx(cusd)) core_oracle.setRoute([cusd, ceur, lp], [simple_oracle, simple_oracle, uniswap_lp_oracle]) print('lp Px', uniswap_lp_oracle.getCELOPx(lp)) oracle.setTokenFactors( [cusd, ceur, lp], [ [10000, 10000, 10000], [10000, 10000, 10000], [10000, 10000, 10000], ], {'from': admin}, ) cusd.mint(alice, 10000000 * 10**6, {'from': admin}) ceur.mint(alice, 10000000 * 10**6, {'from': admin}) cusd.approve(bank, 2**256-1, {'from': alice}) ceur.approve(bank, 2**256-1, {'from': alice}) spell.getAndApprovePair(cusd, ceur, {'from': admin}) lp = ufactory.getPair(cusd, ceur) spell.setWhitelistLPTokens([lp], [True], {'from': admin}) bank.setWhitelistSpells([spell], [True], {'from': admin}) bank.setWhitelistTokens([cusd, ceur], [True, True], {'from': admin}) tx = bank.execute( 0, spell, spell.addLiquidityWERC20.encode_input( ceur, cusd, [ 40000 * 10**6, 50000 * 10**6, 0, 1000 * 10**6, 200 * 10**6, 0, 0, 0, ], ), {'from': alice} ) position_id = tx.return_value print('tx gas used', tx.gas_used) print('bank collateral size', bank.getPositionInfo(position_id)) print('bank collateral value', bank.getCollateralCELOValue(position_id)) print('bank borrow value', bank.getBorrowCELOValue(position_id)) print('bank ceur', bank.getBankInfo(ceur)) print('bank cusd', bank.getBankInfo(cusd)) print('ceur Px', simple_oracle.getCELOPx(ceur)) print('cusd Px', simple_oracle.getCELOPx(cusd)) print('lp Px', uniswap_lp_oracle.getCELOPx(lp))
true
true
f700404190ae8f9854ae05ee091bc6c1b9adc263
3,424
py
Python
verbify_service_websockets/patched_websocket.py
Verbify/verbify-service-websockets
08ae7b0b930c183a55bb96635d7d7c73faf5f629
[ "BSD-3-Clause" ]
94
2016-04-19T06:13:22.000Z
2018-04-19T11:33:28.000Z
verbify_service_websockets/patched_websocket.py
Verbify/verbify-service-websockets
08ae7b0b930c183a55bb96635d7d7c73faf5f629
[ "BSD-3-Clause" ]
10
2016-05-18T22:11:55.000Z
2017-11-17T11:03:24.000Z
verbify_service_websockets/patched_websocket.py
Verbify/verbify-service-websockets
08ae7b0b930c183a55bb96635d7d7c73faf5f629
[ "BSD-3-Clause" ]
17
2016-04-19T06:13:24.000Z
2018-03-24T19:25:53.000Z
""" This module patches a few core functions to add compression capabilities, since gevent-websocket does not appear to be maintained anymore. """ from socket import error from zlib import ( decompressobj, MAX_WBITS, Z_FULL_FLUSH, ) from geventwebsocket.exceptions import ( ProtocolError, WebSocketError, ) from geventwebsocket.websocket import ( MSG_SOCKET_DEAD, Header, WebSocket, ) DECOMPRESSOR = decompressobj(-MAX_WBITS) def _encode_bytes(text): if isinstance(text, str): return text if not isinstance(text, unicode): text = unicode(text or '') return text.encode('utf-8') def make_compressed_frame(message, compressor): """ Make a compressed websocket frame from a message and compressor. Generates header and a compressed message which can then be used on any websocket connection where `no_context_takeover` has been negotiated. This prevents the need to re-compress a broadcast-style message for every websocket connection. `compressor` is a zlib compressor object. """ binary = not isinstance(message, (str, unicode)) opcode = WebSocket.OPCODE_BINARY if binary else WebSocket.OPCODE_TEXT if binary: message = str(message) else: message = _encode_bytes(message) message = compressor.compress(message) # We use Z_FULL_FLUSH (rather than Z_SYNC_FLUSH) here when # server_no_context_takeover has been passed, to reset the context at # the end of every frame. Patches to the actual gevent-websocket # library should probably be able to support both. message += compressor.flush(Z_FULL_FLUSH) # See https://tools.ietf.org/html/rfc7692#page-19 if message.endswith('\x00\x00\xff\xff'): message = message[:-4] # Generate header. The RSV0 bit indicates the payload is compressed. flags = Header.RSV0_MASK header = Header.encode_header( fin=True, opcode=opcode, mask='', length=len(message), flags=flags) return header + message def send_raw_frame(websocket, raw_message): """ `raw_message` includes both the header and the encoded message. """ try: websocket.raw_write(raw_message) except error: websocket.current_app.on_close(MSG_SOCKET_DEAD) raise WebSocketError(MSG_SOCKET_DEAD) def read_frame(websocket): # Patched `read_frame` method that supports decompression header = Header.decode_header(websocket.stream) # Start patched lines compressed = header.flags & header.RSV0_MASK if compressed: header.flags &= ~header.RSV0_MASK # End patched lines if header.flags: raise ProtocolError if not header.length: return header, '' try: payload = websocket.raw_read(header.length) except error: payload = '' except Exception: # Start patched lines raise WebSocketError('Could not read payload') # End patched lines if len(payload) != header.length: raise WebSocketError('Unexpected EOF reading frame payload') if header.mask: payload = header.unmask_payload(payload) # Start patched lines if compressed: payload = ''.join(( DECOMPRESSOR.decompress(payload), DECOMPRESSOR.decompress('\0\0\xff\xff'), DECOMPRESSOR.flush(), )) # End patched lines return header, payload
27.392
77
0.684871
from socket import error from zlib import ( decompressobj, MAX_WBITS, Z_FULL_FLUSH, ) from geventwebsocket.exceptions import ( ProtocolError, WebSocketError, ) from geventwebsocket.websocket import ( MSG_SOCKET_DEAD, Header, WebSocket, ) DECOMPRESSOR = decompressobj(-MAX_WBITS) def _encode_bytes(text): if isinstance(text, str): return text if not isinstance(text, unicode): text = unicode(text or '') return text.encode('utf-8') def make_compressed_frame(message, compressor): binary = not isinstance(message, (str, unicode)) opcode = WebSocket.OPCODE_BINARY if binary else WebSocket.OPCODE_TEXT if binary: message = str(message) else: message = _encode_bytes(message) message = compressor.compress(message) message += compressor.flush(Z_FULL_FLUSH) if message.endswith('\x00\x00\xff\xff'): message = message[:-4] flags = Header.RSV0_MASK header = Header.encode_header( fin=True, opcode=opcode, mask='', length=len(message), flags=flags) return header + message def send_raw_frame(websocket, raw_message): try: websocket.raw_write(raw_message) except error: websocket.current_app.on_close(MSG_SOCKET_DEAD) raise WebSocketError(MSG_SOCKET_DEAD) def read_frame(websocket): header = Header.decode_header(websocket.stream) compressed = header.flags & header.RSV0_MASK if compressed: header.flags &= ~header.RSV0_MASK if header.flags: raise ProtocolError if not header.length: return header, '' try: payload = websocket.raw_read(header.length) except error: payload = '' except Exception: raise WebSocketError('Could not read payload') if len(payload) != header.length: raise WebSocketError('Unexpected EOF reading frame payload') if header.mask: payload = header.unmask_payload(payload) if compressed: payload = ''.join(( DECOMPRESSOR.decompress(payload), DECOMPRESSOR.decompress('\0\0\xff\xff'), DECOMPRESSOR.flush(), )) return header, payload
true
true