code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from permabots.models import TelegramUpdate
from telegram import User
from permabots.test import factories
from django.core.urlresolvers import reverse
from rest_framework import status
from telegram.replykeyboardhide import ReplyKeyboardHide
from permabots.models import KikMessage
from permabots.models import MessengerMessage
from messengerbot.elements import PostbackButton, WebUrlButton
import json
try:
from unittest import mock
except ImportError:
import mock # noqa
class BaseTestBot(TestCase):
def _gen_token(self, token):
return 'Token %s' % str(token)
def _create_kik_api_message(self):
self.kik_message = factories.KikTextMessageLibFactory()
self.kik_message.participants = [self.kik_message.from_user]
self.kik_messages = {'messages': [self.kik_message]}
def _create_messenger_api_message(self):
self.messenger_text_message = factories.MessengerMessagingFactory()
self.messenger_entry = factories.MessengerEntryFactory()
self.messenger_entry.messaging = [self.messenger_text_message]
self.messenger_webhook_message = factories.MessengerWebhookFactory()
self.messenger_webhook_message.entries = [self.messenger_entry]
def setUp(self):
with mock.patch("telegram.bot.Bot.set_webhook", callable=mock.MagicMock()):
with mock.patch("kik.api.KikApi.set_configuration", callable=mock.MagicMock()):
with mock.patch("messengerbot.MessengerClient.subscribe_app", callable=mock.MagicMock()):
with mock.patch("telegram.bot.Bot.get_me", callable=mock.MagicMock()) as mock_get_me:
user_dict = {'username': u'Microbot_test_bot', 'first_name': u'Microbot_test', 'id': 204840063}
mock_get_me.return_value = User(**user_dict)
self.bot = factories.BotFactory()
self.telegram_webhook_url = reverse('permabots:telegrambot', kwargs={'hook_id': self.bot.telegram_bot.hook_id})
self.kik_webhook_url = reverse('permabots:kikbot', kwargs={'hook_id': self.bot.kik_bot.hook_id})
self.messenger_webhook_url = reverse('permabots:messengerbot', kwargs={'hook_id': self.bot.messenger_bot.hook_id})
self.telegram_update = factories.TelegramUpdateLibFactory()
self._create_kik_api_message()
self._create_messenger_api_message()
self.kwargs = {'content_type': 'application/json', }
def _test_message(self, action, message_api=None, number=1, no_handler=False):
if not message_api:
message_api = self.message_api
with mock.patch(self.send_message_to_patch, callable=mock.MagicMock()) as mock_send:
if 'in' in action:
self.set_text(action['in'], message_api)
response = self.client.post(self.webhook_url, self.to_send(message_api), **self.kwargs)
# Check response 200 OK
self.assertEqual(response.status_code, status.HTTP_200_OK)
# Check
if no_handler:
self.assertEqual(0, mock_send.call_count)
else:
self.assertBotResponse(mock_send, action, number)
self.assertAPI(number, message_api)
def _test_hook(self, action, data, no_hook=False, num_recipients=1, recipients=[], auth=None, status_to_check=None,
error_to_check=None):
with mock.patch(self.send_message_to_patch, callable=mock.MagicMock()) as mock_send:
hook_url = reverse('permabots:hook', kwargs={'key': action['in']})
if auth:
response = self.client.post(hook_url, data, HTTP_AUTHORIZATION=auth, **self.kwargs)
else:
response = self.client.post(hook_url, data, **self.kwargs)
if no_hook:
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
else:
if status_to_check:
self.assertEqual(response.status_code, status_to_check)
if error_to_check:
self.assertIn(error_to_check, response.data)
else:
# Check response 200 OK
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertBotResponse(mock_send, action, num=num_recipients, recipients=recipients)
class TelegramTestBot(BaseTestBot):
def setUp(self):
super(TelegramTestBot, self).setUp()
self.send_message_to_patch = 'telegram.bot.Bot.send_message'
self.webhook_url = self.telegram_webhook_url
self.message_api = self.telegram_update
def set_text(self, text, update):
if update.message:
update.message.text = text
elif update.callback_query:
update.callback_query.data = text
def to_send(self, update):
if update.callback_query:
update_dict = update.to_dict()
user = update_dict['callback_query'].pop('from_user')
update_dict['callback_query']['from'] = user
return json.dumps(update_dict)
return update.to_json()
def assertTelegramUser(self, model_user, user):
self.assertEqual(model_user.id, user.id)
self.assertEqual(model_user.first_name, user.first_name)
self.assertEqual(model_user.last_name, user.last_name)
self.assertEqual(model_user.username, user.username)
def assertTelegramChat(self, model_chat, chat):
self.assertEqual(model_chat.id, chat.id)
self.assertEqual(model_chat.type, chat.type)
self.assertEqual(model_chat.title, chat.title)
self.assertEqual(model_chat.username, chat.username)
self.assertEqual(model_chat.first_name, chat.first_name)
self.assertEqual(model_chat.last_name, chat.last_name)
def assertTelegramMessage(self, model_message, message):
self.assertEqual(model_message.message_id, message.message_id)
self.assertTelegramUser(model_message.from_user, message.from_user)
self.assertTelegramChat(model_message.chat, message.chat)
# TODO: problems with UTCs
# self.assertEqual(model_message.date, message.date)
self.assertEqual(model_message.text, message.text)
def assertTelegramCallbackQuery(self, model_callback_query, callback_query):
self.assertEqual(model_callback_query.callback_id, callback_query.id)
self.assertTelegramUser(model_callback_query.from_user, callback_query.from_user)
self.assertTelegramChat(model_callback_query.message.chat, callback_query.message.chat)
# TODO: problems with UTCs
# self.assertEqual(model_message.date, message.date)
self.assertEqual(model_callback_query.data, callback_query.data)
def assertTelegramUpdate(self, model_update, update):
self.assertEqual(model_update.update_id, update.update_id)
if update.message:
self.assertTelegramMessage(model_update.message, update.message)
elif update.callback_query:
self.assertTelegramCallbackQuery(model_update.callback_query, update.callback_query)
def assertInTelegramKeyboard(self, button, keyboard):
found = False
for line in keyboard:
if button in line[0].text:
found = True
break
elif line[0].url:
if button in line[0].url:
found = True
break
elif line[0].callback_data:
if button in line[0].callback_data:
found = True
break
self.assertTrue(found)
def assertBotResponse(self, mock_send, command, num=1, recipients=[]):
self.assertEqual(num, mock_send.call_count)
for call_args in mock_send.call_args_list:
args, kwargs = call_args
if not recipients:
if self.telegram_update.message:
self.assertEqual(kwargs['chat_id'], self.telegram_update.message.chat.id)
elif self.telegram_update.callback_query:
self.assertEqual(kwargs['chat_id'], self.telegram_update.callback_query.message.chat.id)
else:
recipients.remove(kwargs['chat_id'])
self.assertEqual(kwargs['parse_mode'], command['out']['parse_mode'])
if not command['out']['reply_markup']:
self.assertTrue(isinstance(kwargs['reply_markup'], ReplyKeyboardHide))
else:
if isinstance(command['out']['reply_markup'], list):
for keyboard in command['out']['reply_markup']:
self.assertInTelegramKeyboard(keyboard, kwargs['reply_markup'].inline_keyboard)
else:
self.assertInTelegramKeyboard(command['out']['reply_markup'], kwargs['reply_markup'].inline_keyboard)
self.assertIn(command['out']['text'], kwargs['text'])
self.assertEqual([], recipients)
def assertAPI(self, number, message_api):
self.assertEqual(number, TelegramUpdate.objects.count())
self.assertTelegramUpdate(TelegramUpdate.objects.get(update_id=message_api.update_id), message_api)
class KikTestBot(BaseTestBot):
def setUp(self):
super(KikTestBot, self).setUp()
self.send_message_to_patch = 'kik.api.KikApi.send_messages'
self.webhook_url = self.kik_webhook_url
self.message_api = self.kik_messages
def set_text(self, text, update):
update['messages'][0].body = text
def to_send(self, messages):
from time import mktime
message = messages['messages'][0]
if message.timestamp:
message.timestamp = int(mktime(message.timestamp.timetuple()))
message.id = str(message.id)
return json.dumps({'messages': [message.to_json()]})
def assertKikMessage(self, model_message, message):
self.assertEqual(str(model_message.message_id), message.id)
self.assertEqual(model_message.from_user.username, message.from_user)
self.assertEqual(model_message.chat.id, message.chat_id)
if message.participants:
self.assertEqual([participant.username for participant in model_message.chat.participants.all()], message.participants)
else:
self.assertEqual(model_message.chat.participants.count(), 0)
# TODO: problems with UTCs
# self.assertEqual(model_message.date, message.date)
if message.type == "text":
body = message.body
if message.type == "start-chatting":
body = "/start"
self.assertEqual(model_message.body, body)
def assertInKikKeyboard(self, button, keyboard):
found = False
for response in keyboard.responses:
if button in response.body:
found = True
break
self.assertTrue(found)
def assertBotResponse(self, mock_send, command, num=1, recipients=[]):
self.assertEqual(num, mock_send.call_count)
for call_args in mock_send.call_args_list:
args, kwargs = call_args
message = args[0][0]
if not recipients:
self.assertEqual(message.chat_id, self.kik_message.chat_id)
else:
recipients.remove(kwargs['chat_id'])
if not command['out']['reply_markup']:
self.assertEqual(message.keyboards, [])
else:
if isinstance(command['out']['reply_markup'], list):
for keyboard in command['out']['reply_markup']:
self.assertInKikKeyboard(keyboard, message.keyboards[0])
else:
self.assertInKikKeyboard(command['out']['reply_markup'], message.keyboards[0])
self.assertIn(command['out']['body'], message.body)
self.assertEqual([], recipients)
def assertAPI(self, number, message_api):
self.assertEqual(number, KikMessage.objects.count())
self.assertKikMessage(KikMessage.objects.get(message_id=message_api['messages'][0].id), message_api['messages'][0])
class MessengerTestBot(BaseTestBot):
def setUp(self):
super(MessengerTestBot, self).setUp()
self.send_message_to_patch = 'messengerbot.MessengerClient.send'
self.webhook_url = self.messenger_webhook_url
self.message_api = self.messenger_webhook_message
def set_text(self, text, update):
if update.entries[0].messaging[0].type == 'message':
update.entries[0].messaging[0].message.text = text
else:
update.entries[0].messaging[0].message.payload = text
def to_send(self, update):
return json.dumps(update.to_json())
def assertMessengerMessage(self, model_message, message):
message = message.entries[0].messaging[0]
self.assertEqual(model_message.sender, message.sender)
self.assertEqual(model_message.recipient, message.recipient)
if model_message.type == MessengerMessage.MESSAGE:
self.assertEqual(model_message.text, message.message.text)
else:
self.assertEqual(model_message.postback, message.message.payload)
def assertInMessengerKeyboard(self, button, keyboard):
found = False
for element in keyboard.elements:
for keyboard_button in element.buttons:
if button in keyboard_button.title:
found = True
break
elif isinstance(keyboard_button, PostbackButton):
if button in keyboard_button.payload:
found = True
break
elif isinstance(keyboard_button, WebUrlButton):
if button in keyboard_button.url:
found = True
break
self.assertTrue(found)
def assertBotResponse(self, mock_send, command, num=1, recipients=[]):
self.assertEqual(num, mock_send.call_count)
for call_args in mock_send.call_args_list:
args, kwargs = call_args
message = args[0]
if not recipients:
self.assertEqual(message.recipient.recipient_id, self.messenger_entry.messaging[0].sender)
else:
recipients.remove(message.recipient.recipient_id)
if not command['out']['reply_markup']:
self.assertEqual(message.message.attachment, None)
text = message.message.text
self.assertIn(command['out']['body'], text)
else:
if isinstance(command['out']['reply_markup'], list):
for keyboard in command['out']['reply_markup']:
self.assertInMessengerKeyboard(keyboard, message.message.attachment.template)
else:
self.assertInMessengerKeyboard(command['out']['reply_markup'], message.message.attachment.template)
self.assertIn(message.message.attachment.template.elements[0].title, command['out']['body'])
self.assertEqual([], recipients)
def assertAPI(self, number, message_api):
self.assertEqual(number, MessengerMessage.objects.count())
self.assertMessengerMessage(MessengerMessage.objects.all()[0], message_api)
| jlmadurga/permabots | permabots/test/testcases.py | Python | bsd-3-clause | 16,025 |
from django.urls import include, path, register_converter
from django.urls.converters import StringConverter
from django.contrib import admin
from django.contrib.auth import logout
from django.views.generic import RedirectView, TemplateView
from pontoon.teams.views import team
class LocaleConverter(StringConverter):
regex = r"[A-Za-z0-9\-\@\.]+"
register_converter(LocaleConverter, "locale")
pontoon_js_view = TemplateView.as_view(
template_name="js/pontoon.js", content_type="text/javascript"
)
permission_denied_view = TemplateView.as_view(template_name="403.html")
page_not_found_view = TemplateView.as_view(template_name="404.html")
server_error_view = TemplateView.as_view(template_name="500.html")
urlpatterns = [
# Accounts
path("accounts/", include("pontoon.allauth_urls")),
# Admin
path("admin/", include("pontoon.administration.urls")),
# Django admin: Disable the login form
path("a/login/", permission_denied_view),
# Django admin
path("a/", admin.site.urls),
# Logout
path("signout/", logout, {"next_page": "/"}, name="signout"),
# Error pages
path("403/", permission_denied_view),
path("404/", page_not_found_view),
path("500/", server_error_view),
# Robots.txt
path(
"robots.txt",
TemplateView.as_view(template_name="robots.txt", content_type="text/plain"),
),
# contribute.json
path(
"contribute.json",
TemplateView.as_view(
template_name="contribute.json", content_type="text/plain"
),
),
# Favicon
path(
"favicon.ico",
RedirectView.as_view(url="/static/img/favicon.ico", permanent=True),
),
# Include script
path("pontoon.js", pontoon_js_view),
path("static/js/pontoon.js", pontoon_js_view),
# Include URL configurations from installed apps
path("terminology/", include("pontoon.terminology.urls")),
path("translations/", include("pontoon.translations.urls")),
path("", include("pontoon.teams.urls")),
path("", include("pontoon.tour.urls")),
path("", include("pontoon.tags.urls")),
path("", include("pontoon.sync.urls")),
path("", include("pontoon.projects.urls")),
path("", include("pontoon.machinery.urls")),
path("", include("pontoon.contributors.urls")),
path("", include("pontoon.localizations.urls")),
path("", include("pontoon.base.urls")),
path("", include("pontoon.translate.urls")),
path("", include("pontoon.batch.urls")),
path("", include("pontoon.api.urls")),
path("", include("pontoon.homepage.urls")),
path("", include("pontoon.in_context.urls")),
path("", include("pontoon.uxactionlog.urls")),
# Team page: Must be at the end
path("<locale:locale>/", team, name="pontoon.teams.team"),
]
| mathjazz/pontoon | pontoon/urls.py | Python | bsd-3-clause | 2,791 |
from django.conf.urls.defaults import *
from corehq.apps.adm.dispatcher import ADMAdminInterfaceDispatcher, ADMSectionDispatcher
from corehq.apps.adm.views import ADMAdminCRUDFormView
adm_admin_interface_urls = patterns('corehq.apps.adm.views',
url(r'^$', 'default_adm_admin', name="default_adm_admin_interface"),
url(r'^form/(?P<form_type>[\w_]+)/(?P<action>[(update)|(new)|(delete)]+)/((?P<item_id>[\w_]+)/)?$',
ADMAdminCRUDFormView.as_view(), name="adm_item_form"),
ADMAdminInterfaceDispatcher.url_pattern(),
)
urlpatterns = patterns('corehq.apps.adm.views',
url(r'^$', 'default_adm_report', name="default_adm_report"),
ADMSectionDispatcher.url_pattern(),
)
| gmimano/commcaretest | corehq/apps/adm/urls.py | Python | bsd-3-clause | 691 |
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Get-KeePassconfig',
# list of one or more authors for the module
'Author': ['@tifkin_', '@harmj0y'],
# more verbose multi-line description of the module
'Description': ('This module extracts out the trigger specifications from a KeePass 2.X configuration XML file.'),
# True if the module needs to run in the background
'Background' : True,
# File extension to save the file as
'OutputExtension' : '',
# True if the module needs admin rights to run
'NeedsAdmin' : False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
# list of any references/other comments
'Comments': [
'https://github.com/adaptivethreat/KeeThief'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to run the module on.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
moduleName = self.info["Name"]
# read in the common powerview.ps1 module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/collection/vaults/KeePassConfig.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
# get just the code needed for the specified function
script = moduleCode
script += "\nFind-KeePassconfig | Get-KeePassConfigTrigger "
script += ' | Format-List | Out-String | %{$_ + \"`n\"};"`n'+str(moduleName)+' completed!"'
return script
| Hackplayers/Empire-mod-Hpys-tests | lib/modules/powershell/collection/vaults/get_keepass_config_trigger.py | Python | bsd-3-clause | 3,104 |
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Adapted from portage/getbinpkg.py -- Portage binary-package helper functions
# Copyright 2003-2004 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
"""Helpers dealing with binpkg Packages index files"""
import collections
import cStringIO
import operator
import os
import tempfile
import time
import urllib2
from chromite.lib import cros_build_lib
from chromite.lib import gs
from chromite.lib import parallel
TWO_WEEKS = 60 * 60 * 24 * 7 * 2
HTTP_FORBIDDEN_CODES = (401, 403)
HTTP_NOT_FOUND_CODES = (404, 410)
_Package = collections.namedtuple('_Package', ['mtime', 'uri'])
class PackageIndex(object):
"""A parser for the Portage Packages index file.
The Portage Packages index file serves to keep track of what packages are
included in a tree. It contains the following sections:
1) The header. The header tracks general key/value pairs that don't apply
to any specific package. E.g., it tracks the base URL of the packages
file, and the number of packages included in the file. The header is
terminated by a blank line.
2) The body. The body is a list of packages. Each package contains a list
of key/value pairs. Packages are either terminated by a blank line or
by the end of the file. Every package has a CPV entry, which serves as
a unique identifier for the package.
"""
def __init__(self):
"""Constructor."""
# The header tracks general key/value pairs that don't apply to any
# specific package. E.g., it tracks the base URL of the packages.
self.header = {}
# A list of packages (stored as a list of dictionaries).
self.packages = []
# Whether or not the PackageIndex has been modified since the last time it
# was written.
self.modified = False
def _PopulateDuplicateDB(self, db, expires):
"""Populate db with SHA1 -> URL mapping for packages.
Args:
db: Dictionary to populate with SHA1 -> URL mapping for packages.
expires: The time at which prebuilts expire from the binhost.
"""
uri = gs.CanonicalizeURL(self.header['URI'])
for pkg in self.packages:
cpv, sha1, mtime = pkg['CPV'], pkg.get('SHA1'), pkg.get('MTIME')
oldpkg = db.get(sha1, _Package(0, None))
if sha1 and mtime and int(mtime) > max(expires, oldpkg.mtime):
path = pkg.get('PATH', cpv + '.tbz2')
db[sha1] = _Package(int(mtime), '%s/%s' % (uri.rstrip('/'), path))
def _ReadPkgIndex(self, pkgfile):
"""Read a list of key/value pairs from the Packages file into a dictionary.
Both header entries and package entries are lists of key/value pairs, so
they can both be read by this function. Entries can be terminated by empty
lines or by the end of the file.
This function will read lines from the specified file until it encounters
the a blank line or the end of the file.
Keys and values in the Packages file are separated by a colon and a space.
Keys may contain capital letters, numbers, and underscores, but may not
contain colons. Values may contain any character except a newline. In
particular, it is normal for values to contain colons.
Lines that have content, and do not contain a valid key/value pair, are
ignored. This is for compatibility with the Portage package parser, and
to allow for future extensions to the Packages file format.
All entries must contain at least one key/value pair. If the end of the
fils is reached, an empty dictionary is returned.
Args:
pkgfile: A python file object.
Returns:
The dictionary of key-value pairs that was read from the file.
"""
d = {}
for line in pkgfile:
line = line.rstrip('\n')
if not line:
assert d, 'Packages entry must contain at least one key/value pair'
break
line = line.split(': ', 1)
if len(line) == 2:
k, v = line
d[k] = v
return d
def _WritePkgIndex(self, pkgfile, entry):
"""Write header entry or package entry to packages file.
The keys and values will be separated by a colon and a space. The entry
will be terminated by a blank line.
Args:
pkgfile: A python file object.
entry: A dictionary of the key/value pairs to write.
"""
lines = ['%s: %s' % (k, v) for k, v in sorted(entry.items()) if v]
pkgfile.write('%s\n\n' % '\n'.join(lines))
def _ReadHeader(self, pkgfile):
"""Read header of packages file.
Args:
pkgfile: A python file object.
"""
assert not self.header, 'Should only read header once.'
self.header = self._ReadPkgIndex(pkgfile)
def _ReadBody(self, pkgfile):
"""Read body of packages file.
Before calling this function, you must first read the header (using
_ReadHeader).
Args:
pkgfile: A python file object.
"""
assert self.header, 'Should read header first.'
assert not self.packages, 'Should only read body once.'
# Read all of the sections in the body by looping until we reach the end
# of the file.
while True:
d = self._ReadPkgIndex(pkgfile)
if not d:
break
if 'CPV' in d:
self.packages.append(d)
def Read(self, pkgfile):
"""Read the entire packages file.
Args:
pkgfile: A python file object.
"""
self._ReadHeader(pkgfile)
self._ReadBody(pkgfile)
def RemoveFilteredPackages(self, filter_fn):
"""Remove packages which match filter_fn.
Args:
filter_fn: A function which operates on packages. If it returns True,
the package should be removed.
"""
filtered = [p for p in self.packages if not filter_fn(p)]
if filtered != self.packages:
self.modified = True
self.packages = filtered
def ResolveDuplicateUploads(self, pkgindexes):
"""Point packages at files that have already been uploaded.
For each package in our index, check if there is an existing package that
has already been uploaded to the same base URI, and that is no older than
two weeks. If so, point that package at the existing file, so that we don't
have to upload the file.
Args:
pkgindexes: A list of PackageIndex objects containing info about packages
that have already been uploaded.
Returns:
A list of the packages that still need to be uploaded.
"""
db = {}
now = int(time.time())
expires = now - TWO_WEEKS
base_uri = gs.CanonicalizeURL(self.header['URI'])
for pkgindex in pkgindexes:
if gs.CanonicalizeURL(pkgindex.header['URI']) == base_uri:
# pylint: disable=W0212
pkgindex._PopulateDuplicateDB(db, expires)
uploads = []
base_uri = self.header['URI']
for pkg in self.packages:
sha1 = pkg.get('SHA1')
dup = db.get(sha1)
if sha1 and dup and dup.uri.startswith(base_uri):
pkg['PATH'] = dup.uri[len(base_uri):].lstrip('/')
pkg['MTIME'] = str(dup.mtime)
else:
pkg['MTIME'] = str(now)
uploads.append(pkg)
return uploads
def SetUploadLocation(self, base_uri, path_prefix):
"""Set upload location to base_uri + path_prefix.
Args:
base_uri: Base URI for all packages in the file. We set
self.header['URI'] to this value, so all packages must live under
this directory.
path_prefix: Path prefix to use for all current packages in the file.
This will be added to the beginning of the path for every package.
"""
self.header['URI'] = base_uri
for pkg in self.packages:
path = pkg['CPV'] + '.tbz2'
pkg['PATH'] = '%s/%s' % (path_prefix.rstrip('/'), path)
def Write(self, pkgfile):
"""Write a packages file to disk.
If 'modified' flag is set, the TIMESTAMP and PACKAGES fields in the header
will be updated before writing to disk.
Args:
pkgfile: A python file object.
"""
if self.modified:
self.header['TIMESTAMP'] = str(long(time.time()))
self.header['PACKAGES'] = str(len(self.packages))
self.modified = False
self._WritePkgIndex(pkgfile, self.header)
for metadata in sorted(self.packages, key=operator.itemgetter('CPV')):
self._WritePkgIndex(pkgfile, metadata)
def WriteToNamedTemporaryFile(self):
"""Write pkgindex to a temporary file.
Args:
pkgindex: The PackageIndex object.
Returns:
A temporary file containing the packages from pkgindex.
"""
f = tempfile.NamedTemporaryFile(prefix='chromite.binpkg.pkgidx.')
self.Write(f)
f.flush()
f.seek(0)
return f
def _RetryUrlOpen(url, tries=3):
"""Open the specified url, retrying if we run into temporary errors.
We retry for both network errors and 5xx Server Errors. We do not retry
for HTTP errors with a non-5xx code.
Args:
url: The specified url.
tries: The number of times to try.
Returns:
The result of urllib2.urlopen(url).
"""
for i in range(tries):
try:
return urllib2.urlopen(url)
except urllib2.HTTPError as e:
if i + 1 >= tries or e.code < 500:
e.msg += ('\nwhile processing %s' % url)
raise
else:
print 'Cannot GET %s: %s' % (url, str(e))
except urllib2.URLError as e:
if i + 1 >= tries:
raise
else:
print 'Cannot GET %s: %s' % (url, str(e))
print 'Sleeping for 10 seconds before retrying...'
time.sleep(10)
def GrabRemotePackageIndex(binhost_url):
"""Grab the latest binary package database from the specified URL.
Args:
binhost_url: Base URL of remote packages (PORTAGE_BINHOST).
Returns:
A PackageIndex object, if the Packages file can be retrieved. If the
packages file cannot be retrieved, then None is returned.
"""
url = '%s/Packages' % binhost_url.rstrip('/')
pkgindex = PackageIndex()
if binhost_url.startswith('http'):
try:
f = _RetryUrlOpen(url)
except urllib2.HTTPError as e:
if e.code in HTTP_FORBIDDEN_CODES:
cros_build_lib.PrintBuildbotStepWarnings()
cros_build_lib.Error('Cannot GET %s: %s' % (url, str(e)))
return None
# Not found errors are normal if old prebuilts were cleaned out.
if e.code in HTTP_NOT_FOUND_CODES:
return None
raise
elif binhost_url.startswith('gs://'):
try:
gs_context = gs.GSContext()
output = gs_context.Cat(url).output
except (cros_build_lib.RunCommandError, gs.GSNoSuchKey) as e:
cros_build_lib.PrintBuildbotStepWarnings()
cros_build_lib.Error('Cannot GET %s: %s' % (url, str(e)))
return None
f = cStringIO.StringIO(output)
else:
return None
pkgindex.Read(f)
pkgindex.header.setdefault('URI', binhost_url)
f.close()
return pkgindex
def GrabLocalPackageIndex(package_path):
"""Read a local packages file from disk into a PackageIndex() object.
Args:
package_path: Directory containing Packages file.
Returns:
A PackageIndex object.
"""
packages_file = file(os.path.join(package_path, 'Packages'))
pkgindex = PackageIndex()
pkgindex.Read(packages_file)
packages_file.close()
return pkgindex
def _DownloadURLs(urls, dest_dir):
"""Copy URLs into the specified |dest_dir|.
Args:
urls: List of URLs to fetch.
dest_dir: Destination directory.
"""
gs_ctx = gs.GSContext()
cmd = ['cp'] + urls + [dest_dir]
gs_ctx.DoCommand(cmd, parallel=len(urls) > 1)
def FetchTarballs(binhost_urls, pkgdir):
"""Prefetch the specified |binhost_urls| to the specified |pkgdir|.
This function fetches the tarballs from the specified list of binhost
URLs to disk. It does not populate the Packages file -- we leave that
to Portage.
Args:
binhost_urls: List of binhost URLs to fetch.
pkgdir: Location to store the fetched packages.
"""
categories = {}
for binhost_url in binhost_urls:
pkgindex = GrabRemotePackageIndex(binhost_url)
base_uri = pkgindex.header['URI']
for pkg in pkgindex.packages:
cpv = pkg['CPV']
path = pkg.get('PATH', '%s.tbz2' % cpv)
uri = '/'.join([base_uri, path])
category = cpv.partition('/')[0]
fetches = categories.setdefault(category, {})
fetches[cpv] = uri
with parallel.BackgroundTaskRunner(_DownloadURLs) as queue:
for category, urls in categories.iteritems():
category_dir = os.path.join(pkgdir, category)
if not os.path.exists(category_dir):
os.makedirs(category_dir)
queue.put((urls.values(), category_dir))
| bpsinc-native/src_third_party_chromite | lib/binpkg.py | Python | bsd-3-clause | 12,642 |
#!/usr/bin/env python
from setuptools import *
setup(
name='dataflow',
version='0.1.1',
description='a dataflow library for python',
author='Tim Cuthbertson',
author_email='[email protected]',
url='http://github.com/gfxmonk/py-dataflow/tree',
packages=find_packages(exclude=["test"]),
long_description=open('readme.rst').read(),
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='dataflow concurrent concurrency',
license='BSD',
install_requires=[
'setuptools',
],
)
| gfxmonk/py-dataflow | setup.py | Python | bsd-3-clause | 694 |
import unittest
from hrt import input_handler
class TestCLIInput(unittest.TestCase):
def setUp(self):
pass
def test_stdin_input(self):
pass
def test_interactive_input(self):
pass
def test_file_input(self):
pass
def test_inline_input(self):
pass
def test_when_input_unicode(self):
pass
def test_multiple_input_methods_chosen(self):
pass
def test_empty_input(self):
pass
if __name__ == '__main__':
unittest.main()
| dhruvagarwal/http-request-translator | tests/test_input.py | Python | bsd-3-clause | 524 |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from bokeh.command.bootstrap import main
# Module under test
import bokeh.command.subcommands.secret as scsecret
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
def test_create():
import argparse
from bokeh.command.subcommand import Subcommand
obj = scsecret.Secret(parser=argparse.ArgumentParser())
assert isinstance(obj, Subcommand)
def test_name():
assert scsecret.Secret.name == "secret"
def test_help():
assert scsecret.Secret.help == "Create a Bokeh secret key for use with Bokeh server"
def test_args():
assert scsecret.Secret.args == (
)
def test_run(capsys):
main(["bokeh", "secret"])
out, err = capsys.readouterr()
assert err == ""
assert len(out) == 45
assert out[-1] == '\n'
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| stonebig/bokeh | bokeh/command/subcommands/tests/test_secret.py | Python | bsd-3-clause | 2,391 |
# User Instructions
#
# Write a function 'sub1' that, given a
# string, embeds that string in
# the string:
# "I think X is a perfectly normal thing to do in public."
# where X is replaced by the given
# string.
#
given_string = "I think %s is a perfectly normal thing to do in public."
def sub1(s):
return given_string.replace("%s", s)
def sub2(s):
return given_string % s
print sub1("running")
# => "I think running is a perfectly normal thing to do in public."
print sub1("sleeping")
# => "I think sleeping is a perfectly normal thing to do in public."
| KellyChan/python-examples | web/gae/python/b_words_replaced.py | Python | mit | 581 |
from .downloader_base import DownloaderBase
from ... import logger
log = logger.get(__name__)
import traceback
import subprocess
import shutil
from ... import settings
class ExternalDownloader(DownloaderBase):
"""Abstract Base class for downloading through an external utility"""
program = None
args = []
@classmethod
def is_available(cls):
return not settings.is_windows() and shutil.which(cls.program)
def get(self, url):
try:
log.debug('%s downloader getting url %s', self.program, url)
call = [self.program] + self.args + [url]
result = subprocess.check_output(call)
except subprocess.CalledProcessError:
log.error('%s downloader failed.', self.program)
traceback.print_exc()
result = False
return result
| Colorsublime/Colorsublime-Plugin | colorsublime/http/downloaders/external.py | Python | mit | 843 |
class Solution:
# @param s, a string
# @param dict, a set of string
# @return a boolean
def wordBreak(self, s, dict):
solved = [False for i in range(len(s) + 1)]
solved[0] = True
for i in range(len(s)):
for j in range(i + 1):
if s[j : i + 1] in dict:
solved[i + 1] |= solved[j]
return solved[len(s)]
| happylixue/LeetCodeSol | problems/word-break/sol.py | Python | mit | 396 |
# mako/codegen.py
# Copyright 2006-2019 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""provides functionality for rendering a parsetree constructing into module
source code."""
import json
import re
import time
from mako import ast
from mako import compat
from mako import exceptions
from mako import filters
from mako import parsetree
from mako import util
from mako.pygen import PythonPrinter
MAGIC_NUMBER = 10
# names which are hardwired into the
# template and are not accessed via the
# context itself
TOPLEVEL_DECLARED = set(["UNDEFINED", "STOP_RENDERING"])
RESERVED_NAMES = set(["context", "loop"]).union(TOPLEVEL_DECLARED)
def compile( # noqa
node,
uri,
filename=None,
default_filters=None,
buffer_filters=None,
imports=None,
future_imports=None,
source_encoding=None,
generate_magic_comment=True,
disable_unicode=False,
strict_undefined=False,
enable_loop=True,
reserved_names=frozenset(),
):
"""Generate module source code given a parsetree node,
uri, and optional source filename"""
# if on Py2K, push the "source_encoding" string to be
# a bytestring itself, as we will be embedding it into
# the generated source and we don't want to coerce the
# result into a unicode object, in "disable_unicode" mode
if not compat.py3k and isinstance(source_encoding, compat.text_type):
source_encoding = source_encoding.encode(source_encoding)
buf = util.FastEncodingBuffer()
printer = PythonPrinter(buf)
_GenerateRenderMethod(
printer,
_CompileContext(
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
disable_unicode,
strict_undefined,
enable_loop,
reserved_names,
),
node,
)
return buf.getvalue()
class _CompileContext(object):
def __init__(
self,
uri,
filename,
default_filters,
buffer_filters,
imports,
future_imports,
source_encoding,
generate_magic_comment,
disable_unicode,
strict_undefined,
enable_loop,
reserved_names,
):
self.uri = uri
self.filename = filename
self.default_filters = default_filters
self.buffer_filters = buffer_filters
self.imports = imports
self.future_imports = future_imports
self.source_encoding = source_encoding
self.generate_magic_comment = generate_magic_comment
self.disable_unicode = disable_unicode
self.strict_undefined = strict_undefined
self.enable_loop = enable_loop
self.reserved_names = reserved_names
class _GenerateRenderMethod(object):
"""A template visitor object which generates the
full module source for a template.
"""
def __init__(self, printer, compiler, node):
self.printer = printer
self.compiler = compiler
self.node = node
self.identifier_stack = [None]
self.in_def = isinstance(node, (parsetree.DefTag, parsetree.BlockTag))
if self.in_def:
name = "render_%s" % node.funcname
args = node.get_argument_expressions()
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
defs = None
pagetag = None
if node.is_block and not node.is_anonymous:
args += ["**pageargs"]
else:
defs = self.write_toplevel()
pagetag = self.compiler.pagetag
name = "render_body"
if pagetag is not None:
args = pagetag.body_decl.get_argument_expressions()
if not pagetag.body_decl.kwargs:
args += ["**pageargs"]
cached = eval(pagetag.attributes.get("cached", "False"))
self.compiler.enable_loop = self.compiler.enable_loop or eval(
pagetag.attributes.get("enable_loop", "False")
)
else:
args = ["**pageargs"]
cached = False
buffered = filtered = False
if args is None:
args = ["context"]
else:
args = [a for a in ["context"] + args]
self.write_render_callable(
pagetag or node, name, args, buffered, filtered, cached
)
if defs is not None:
for node in defs:
_GenerateRenderMethod(printer, compiler, node)
if not self.in_def:
self.write_metadata_struct()
def write_metadata_struct(self):
self.printer.source_map[self.printer.lineno] = max(
self.printer.source_map
)
struct = {
"filename": self.compiler.filename,
"uri": self.compiler.uri,
"source_encoding": self.compiler.source_encoding,
"line_map": self.printer.source_map,
}
self.printer.writelines(
'"""',
"__M_BEGIN_METADATA",
json.dumps(struct),
"__M_END_METADATA\n" '"""',
)
@property
def identifiers(self):
return self.identifier_stack[-1]
def write_toplevel(self):
"""Traverse a template structure for module-level directives and
generate the start of module-level code.
"""
inherit = []
namespaces = {}
module_code = []
self.compiler.pagetag = None
class FindTopLevel(object):
def visitInheritTag(s, node):
inherit.append(node)
def visitNamespaceTag(s, node):
namespaces[node.name] = node
def visitPageTag(s, node):
self.compiler.pagetag = node
def visitCode(s, node):
if node.ismodule:
module_code.append(node)
f = FindTopLevel()
for n in self.node.nodes:
n.accept_visitor(f)
self.compiler.namespaces = namespaces
module_ident = set()
for n in module_code:
module_ident = module_ident.union(n.declared_identifiers())
module_identifiers = _Identifiers(self.compiler)
module_identifiers.declared = module_ident
# module-level names, python code
if (
self.compiler.generate_magic_comment
and self.compiler.source_encoding
):
self.printer.writeline(
"# -*- coding:%s -*-" % self.compiler.source_encoding
)
if self.compiler.future_imports:
self.printer.writeline(
"from __future__ import %s"
% (", ".join(self.compiler.future_imports),)
)
self.printer.writeline("from mako import runtime, filters, cache")
self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
self.printer.writeline("STOP_RENDERING = runtime.STOP_RENDERING")
self.printer.writeline("__M_dict_builtin = dict")
self.printer.writeline("__M_locals_builtin = locals")
self.printer.writeline("_magic_number = %r" % MAGIC_NUMBER)
self.printer.writeline("_modified_time = %r" % time.time())
self.printer.writeline("_enable_loop = %r" % self.compiler.enable_loop)
self.printer.writeline(
"_template_filename = %r" % self.compiler.filename
)
self.printer.writeline("_template_uri = %r" % self.compiler.uri)
self.printer.writeline(
"_source_encoding = %r" % self.compiler.source_encoding
)
if self.compiler.imports:
buf = ""
for imp in self.compiler.imports:
buf += imp + "\n"
self.printer.writeline(imp)
impcode = ast.PythonCode(
buf,
source="",
lineno=0,
pos=0,
filename="template defined imports",
)
else:
impcode = None
main_identifiers = module_identifiers.branch(self.node)
mit = module_identifiers.topleveldefs
module_identifiers.topleveldefs = mit.union(
main_identifiers.topleveldefs
)
module_identifiers.declared.update(TOPLEVEL_DECLARED)
if impcode:
module_identifiers.declared.update(impcode.declared_identifiers)
self.compiler.identifiers = module_identifiers
self.printer.writeline(
"_exports = %r"
% [n.name for n in main_identifiers.topleveldefs.values()]
)
self.printer.write_blanks(2)
if len(module_code):
self.write_module_code(module_code)
if len(inherit):
self.write_namespaces(namespaces)
self.write_inherit(inherit[-1])
elif len(namespaces):
self.write_namespaces(namespaces)
return list(main_identifiers.topleveldefs.values())
def write_render_callable(
self, node, name, args, buffered, filtered, cached
):
"""write a top-level render callable.
this could be the main render() method or that of a top-level def."""
if self.in_def:
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_toplevel(%s)" % decorator
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"def %s(%s):" % (name, ",".join(args)),
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writeline("context._push_buffer()")
self.identifier_stack.append(
self.compiler.identifiers.branch(self.node)
)
if (not self.in_def or self.node.is_block) and "**pageargs" in args:
self.identifier_stack[-1].argument_declared.add("pageargs")
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
self.printer.writeline(
"__M_locals = __M_dict_builtin(%s)"
% ",".join(
[
"%s=%s" % (x, x)
for x in self.identifiers.argument_declared
]
)
)
self.write_variable_declares(self.identifiers, toplevel=True)
for n in self.node.nodes:
n.accept_visitor(self)
self.write_def_finish(self.node, buffered, filtered, cached)
self.printer.writeline(None)
self.printer.write_blanks(2)
if cached:
self.write_cache_decorator(
node, name, args, buffered, self.identifiers, toplevel=True
)
def write_module_code(self, module_code):
"""write module-level template code, i.e. that which
is enclosed in <%! %> tags in the template."""
for n in module_code:
self.printer.write_indented_block(n.text, starting_lineno=n.lineno)
def write_inherit(self, node):
"""write the module-level inheritance-determination callable."""
self.printer.writelines(
"def _mako_inherit(template, context):",
"_mako_generate_namespaces(context)",
"return runtime._inherit_from(context, %s, _template_uri)"
% (node.parsed_attributes["file"]),
None,
)
def write_namespaces(self, namespaces):
"""write the module-level namespace-generating callable."""
self.printer.writelines(
"def _mako_get_namespace(context, name):",
"try:",
"return context.namespaces[(__name__, name)]",
"except KeyError:",
"_mako_generate_namespaces(context)",
"return context.namespaces[(__name__, name)]",
None,
None,
)
self.printer.writeline("def _mako_generate_namespaces(context):")
for node in namespaces.values():
if "import" in node.attributes:
self.compiler.has_ns_imports = True
self.printer.start_source(node.lineno)
if len(node.nodes):
self.printer.writeline("def make_namespace():")
export = []
identifiers = self.compiler.identifiers.branch(node)
self.in_def = True
class NSDefVisitor(object):
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
if node.is_anonymous:
raise exceptions.CompileException(
"Can't put anonymous blocks inside "
"<%namespace>",
**node.exception_kwargs
)
self.write_inline_def(node, identifiers, nested=False)
export.append(node.funcname)
vis = NSDefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.printer.writeline("return [%s]" % (",".join(export)))
self.printer.writeline(None)
self.in_def = False
callable_name = "make_namespace()"
else:
callable_name = "None"
if "file" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.TemplateNamespace(%r,"
" context._clean_inheritance_tokens(),"
" templateuri=%s, callables=%s, "
" calling_uri=_template_uri)"
% (
node.name,
node.parsed_attributes.get("file", "None"),
callable_name,
)
)
elif "module" in node.parsed_attributes:
self.printer.writeline(
"ns = runtime.ModuleNamespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri,"
" module=%s)"
% (
node.name,
callable_name,
node.parsed_attributes.get("module", "None"),
)
)
else:
self.printer.writeline(
"ns = runtime.Namespace(%r,"
" context._clean_inheritance_tokens(),"
" callables=%s, calling_uri=_template_uri)"
% (node.name, callable_name)
)
if eval(node.attributes.get("inheritable", "False")):
self.printer.writeline("context['self'].%s = ns" % (node.name))
self.printer.writeline(
"context.namespaces[(__name__, %s)] = ns" % repr(node.name)
)
self.printer.write_blanks(1)
if not len(namespaces):
self.printer.writeline("pass")
self.printer.writeline(None)
def write_variable_declares(self, identifiers, toplevel=False, limit=None):
"""write variable declarations at the top of a function.
the variable declarations are in the form of callable
definitions for defs and/or name lookup within the
function's context argument. the names declared are based
on the names that are referenced in the function body,
which don't otherwise have any explicit assignment
operation. names that are assigned within the body are
assumed to be locally-scoped variables and are not
separately declared.
for def callable definitions, if the def is a top-level
callable then a 'stub' callable is generated which wraps
the current Context into a closure. if the def is not
top-level, it is fully rendered as a local closure.
"""
# collection of all defs available to us in this scope
comp_idents = dict([(c.funcname, c) for c in identifiers.defs])
to_write = set()
# write "context.get()" for all variables we are going to
# need that arent in the namespace yet
to_write = to_write.union(identifiers.undeclared)
# write closure functions for closures that we define
# right here
to_write = to_write.union(
[c.funcname for c in identifiers.closuredefs.values()]
)
# remove identifiers that are declared in the argument
# signature of the callable
to_write = to_write.difference(identifiers.argument_declared)
# remove identifiers that we are going to assign to.
# in this way we mimic Python's behavior,
# i.e. assignment to a variable within a block
# means that variable is now a "locally declared" var,
# which cannot be referenced beforehand.
to_write = to_write.difference(identifiers.locally_declared)
if self.compiler.enable_loop:
has_loop = "loop" in to_write
to_write.discard("loop")
else:
has_loop = False
# if a limiting set was sent, constraint to those items in that list
# (this is used for the caching decorator)
if limit is not None:
to_write = to_write.intersection(limit)
if toplevel and getattr(self.compiler, "has_ns_imports", False):
self.printer.writeline("_import_ns = {}")
self.compiler.has_imports = True
for ident, ns in self.compiler.namespaces.items():
if "import" in ns.attributes:
self.printer.writeline(
"_mako_get_namespace(context, %r)."
"_populate(_import_ns, %r)"
% (
ident,
re.split(r"\s*,\s*", ns.attributes["import"]),
)
)
if has_loop:
self.printer.writeline("loop = __M_loop = runtime.LoopStack()")
for ident in to_write:
if ident in comp_idents:
comp = comp_idents[ident]
if comp.is_block:
if not comp.is_anonymous:
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
else:
if comp.is_root():
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
elif ident in self.compiler.namespaces:
self.printer.writeline(
"%s = _mako_get_namespace(context, %r)" % (ident, ident)
)
else:
if getattr(self.compiler, "has_ns_imports", False):
if self.compiler.strict_undefined:
self.printer.writelines(
"%s = _import_ns.get(%r, UNDEFINED)"
% (ident, ident),
"if %s is UNDEFINED:" % ident,
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
None,
)
else:
self.printer.writeline(
"%s = _import_ns.get"
"(%r, context.get(%r, UNDEFINED))"
% (ident, ident, ident)
)
else:
if self.compiler.strict_undefined:
self.printer.writelines(
"try:",
"%s = context[%r]" % (ident, ident),
"except KeyError:",
"raise NameError(\"'%s' is not defined\")" % ident,
None,
)
else:
self.printer.writeline(
"%s = context.get(%r, UNDEFINED)" % (ident, ident)
)
self.printer.writeline("__M_writer = context.writer()")
def write_def_decl(self, node, identifiers):
"""write a locally-available callable referencing a top-level def"""
funcname = node.funcname
namedecls = node.get_argument_expressions()
nameargs = node.get_argument_expressions(as_call=True)
if not self.in_def and (
len(self.identifiers.locally_assigned) > 0
or len(self.identifiers.argument_declared) > 0
):
nameargs.insert(0, "context._locals(__M_locals)")
else:
nameargs.insert(0, "context")
self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
self.printer.writeline(
"return render_%s(%s)" % (funcname, ",".join(nameargs))
)
self.printer.writeline(None)
def write_inline_def(self, node, identifiers, nested):
"""write a locally-available def callable inside an enclosing def."""
namedecls = node.get_argument_expressions()
decorator = node.decorator
if decorator:
self.printer.writeline(
"@runtime._decorate_inline(context, %s)" % decorator
)
self.printer.writeline(
"def %s(%s):" % (node.funcname, ",".join(namedecls))
)
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get("buffered", "False"))
cached = eval(node.attributes.get("cached", "False"))
self.printer.writelines(
# push new frame, assign current frame to __M_caller
"__M_caller = context.caller_stack._push_frame()",
"try:",
)
if buffered or filtered or cached:
self.printer.writelines("context._push_buffer()")
identifiers = identifiers.branch(node, nested=nested)
self.write_variable_declares(identifiers)
self.identifier_stack.append(identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, filtered, cached)
self.printer.writeline(None)
if cached:
self.write_cache_decorator(
node,
node.funcname,
namedecls,
False,
identifiers,
inline=True,
toplevel=False,
)
def write_def_finish(
self, node, buffered, filtered, cached, callstack=True
):
"""write the end section of a rendering function, either outermost or
inline.
this takes into account if the rendering function was filtered,
buffered, etc. and closes the corresponding try: block if any, and
writes code to retrieve captured content, apply filters, send proper
return value."""
if not buffered and not cached and not filtered:
self.printer.writeline("return ''")
if callstack:
self.printer.writelines(
"finally:", "context.caller_stack._pop_frame()", None
)
if buffered or filtered or cached:
if buffered or cached:
# in a caching scenario, don't try to get a writer
# from the context after popping; assume the caching
# implemenation might be using a context with no
# extra buffers
self.printer.writelines(
"finally:", "__M_buf = context._pop_buffer()"
)
else:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
)
if callstack:
self.printer.writeline("context.caller_stack._pop_frame()")
s = "__M_buf.getvalue()"
if filtered:
s = self.create_filter_callable(
node.filter_args.args, s, False
)
self.printer.writeline(None)
if buffered and not cached:
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
if buffered or cached:
self.printer.writeline("return %s" % s)
else:
self.printer.writelines("__M_writer(%s)" % s, "return ''")
def write_cache_decorator(
self,
node_or_pagetag,
name,
args,
buffered,
identifiers,
inline=False,
toplevel=False,
):
"""write a post-function decorator to replace a rendering
callable with a cached version of itself."""
self.printer.writeline("__M_%s = %s" % (name, name))
cachekey = node_or_pagetag.parsed_attributes.get(
"cache_key", repr(name)
)
cache_args = {}
if self.compiler.pagetag is not None:
cache_args.update(
(pa[6:], self.compiler.pagetag.parsed_attributes[pa])
for pa in self.compiler.pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
cache_args.update(
(pa[6:], node_or_pagetag.parsed_attributes[pa])
for pa in node_or_pagetag.parsed_attributes
if pa.startswith("cache_") and pa != "cache_key"
)
if "timeout" in cache_args:
cache_args["timeout"] = int(eval(cache_args["timeout"]))
self.printer.writeline("def %s(%s):" % (name, ",".join(args)))
# form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
pass_args = [
"%s=%s" % ((a.split("=")[0],) * 2) if "=" in a else a for a in args
]
self.write_variable_declares(
identifiers,
toplevel=toplevel,
limit=node_or_pagetag.undeclared_identifiers(),
)
if buffered:
s = (
"context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r)"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
)
)
# apply buffer_filters
s = self.create_filter_callable(
self.compiler.buffer_filters, s, False
)
self.printer.writelines("return " + s, None)
else:
self.printer.writelines(
"__M_writer(context.get('local')."
"cache._ctx_get_or_create("
"%s, lambda:__M_%s(%s), context, %s__M_defname=%r))"
% (
cachekey,
name,
",".join(pass_args),
"".join(
["%s=%s, " % (k, v) for k, v in cache_args.items()]
),
name,
),
"return ''",
None,
)
def create_filter_callable(self, args, target, is_expression):
"""write a filter-applying expression based on the filters
present in the given filter names, adjusting for the global
'default' filter aliases as needed."""
def locate_encode(name):
if re.match(r"decode\..+", name):
return "filters." + name
elif self.compiler.disable_unicode:
return filters.NON_UNICODE_ESCAPES.get(name, name)
else:
return filters.DEFAULT_ESCAPES.get(name, name)
if "n" not in args:
if is_expression:
if self.compiler.pagetag:
args = self.compiler.pagetag.filter_args.args + args
if self.compiler.default_filters and "n" not in args:
args = self.compiler.default_filters + args
for e in args:
# if filter given as a function, get just the identifier portion
if e == "n":
continue
m = re.match(r"(.+?)(\(.*\))", e)
if m:
ident, fargs = m.group(1, 2)
f = locate_encode(ident)
e = f + fargs
else:
e = locate_encode(e)
assert e is not None
target = "%s(%s)" % (e, target)
return target
def visitExpression(self, node):
self.printer.start_source(node.lineno)
if (
len(node.escapes)
or (
self.compiler.pagetag is not None
and len(self.compiler.pagetag.filter_args.args)
)
or len(self.compiler.default_filters)
):
s = self.create_filter_callable(
node.escapes_code.args, "%s" % node.text, True
)
self.printer.writeline("__M_writer(%s)" % s)
else:
self.printer.writeline("__M_writer(%s)" % node.text)
def visitControlLine(self, node):
if node.isend:
self.printer.writeline(None)
if node.has_loop_context:
self.printer.writeline("finally:")
self.printer.writeline("loop = __M_loop._exit()")
self.printer.writeline(None)
else:
self.printer.start_source(node.lineno)
if self.compiler.enable_loop and node.keyword == "for":
text = mangle_mako_loop(node, self.printer)
else:
text = node.text
self.printer.writeline(text)
children = node.get_children()
# this covers the three situations where we want to insert a pass:
# 1) a ternary control line with no children,
# 2) a primary control line with nothing but its own ternary
# and end control lines, and
# 3) any control line with no content other than comments
if not children or (
compat.all(
isinstance(c, (parsetree.Comment, parsetree.ControlLine))
for c in children
)
and compat.all(
(node.is_ternary(c.keyword) or c.isend)
for c in children
if isinstance(c, parsetree.ControlLine)
)
):
self.printer.writeline("pass")
def visitText(self, node):
self.printer.start_source(node.lineno)
self.printer.writeline("__M_writer(%s)" % repr(node.content))
def visitTextTag(self, node):
filtered = len(node.filter_args.args) > 0
if filtered:
self.printer.writelines(
"__M_writer = context._push_writer()", "try:"
)
for n in node.nodes:
n.accept_visitor(self)
if filtered:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
"__M_writer(%s)"
% self.create_filter_callable(
node.filter_args.args, "__M_buf.getvalue()", False
),
None,
)
def visitCode(self, node):
if not node.ismodule:
self.printer.write_indented_block(
node.text, starting_lineno=node.lineno
)
if not self.in_def and len(self.identifiers.locally_assigned) > 0:
# if we are the "template" def, fudge locally
# declared/modified variables into the "__M_locals" dictionary,
# which is used for def calls within the same template,
# to simulate "enclosing scope"
self.printer.writeline(
"__M_locals_builtin_stored = __M_locals_builtin()"
)
self.printer.writeline(
"__M_locals.update(__M_dict_builtin([(__M_key,"
" __M_locals_builtin_stored[__M_key]) for __M_key in"
" [%s] if __M_key in __M_locals_builtin_stored]))"
% ",".join([repr(x) for x in node.declared_identifiers()])
)
def visitIncludeTag(self, node):
self.printer.start_source(node.lineno)
args = node.attributes.get("args")
if args:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri, %s)"
% (node.parsed_attributes["file"], args)
)
else:
self.printer.writeline(
"runtime._include_file(context, %s, _template_uri)"
% (node.parsed_attributes["file"])
)
def visitNamespaceTag(self, node):
pass
def visitDefTag(self, node):
pass
def visitBlockTag(self, node):
if node.is_anonymous:
self.printer.writeline("%s()" % node.funcname)
else:
nameargs = node.get_argument_expressions(as_call=True)
nameargs += ["**pageargs"]
self.printer.writeline(
"if 'parent' not in context._data or "
"not hasattr(context._data['parent'], '%s'):" % node.funcname
)
self.printer.writeline(
"context['self'].%s(%s)" % (node.funcname, ",".join(nameargs))
)
self.printer.writeline("\n")
def visitCallNamespaceTag(self, node):
# TODO: we can put namespace-specific checks here, such
# as ensure the given namespace will be imported,
# pre-import the namespace, etc.
self.visitCallTag(node)
def visitCallTag(self, node):
self.printer.writeline("def ccall(caller):")
export = ["body"]
callable_identifiers = self.identifiers.branch(node, nested=True)
body_identifiers = callable_identifiers.branch(node, nested=False)
# we want the 'caller' passed to ccall to be used
# for the body() function, but for other non-body()
# <%def>s within <%call> we want the current caller
# off the call stack (if any)
body_identifiers.add_declared("caller")
self.identifier_stack.append(body_identifiers)
class DefVisitor(object):
def visitDefTag(s, node):
s.visitDefOrBase(node)
def visitBlockTag(s, node):
s.visitDefOrBase(node)
def visitDefOrBase(s, node):
self.write_inline_def(node, callable_identifiers, nested=False)
if not node.is_anonymous:
export.append(node.funcname)
# remove defs that are within the <%call> from the
# "closuredefs" defined in the body, so they dont render twice
if node.funcname in body_identifiers.closuredefs:
del body_identifiers.closuredefs[node.funcname]
vis = DefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.identifier_stack.pop()
bodyargs = node.body_decl.get_argument_expressions()
self.printer.writeline("def body(%s):" % ",".join(bodyargs))
# TODO: figure out best way to specify
# buffering/nonbuffering (at call time would be better)
buffered = False
if buffered:
self.printer.writelines("context._push_buffer()", "try:")
self.write_variable_declares(body_identifiers)
self.identifier_stack.append(body_identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, False, False, callstack=False)
self.printer.writelines(None, "return [%s]" % (",".join(export)), None)
self.printer.writelines(
# push on caller for nested call
"context.caller_stack.nextcaller = "
"runtime.Namespace('caller', context, "
"callables=ccall(__M_caller))",
"try:",
)
self.printer.start_source(node.lineno)
self.printer.writelines(
"__M_writer(%s)"
% self.create_filter_callable([], node.expression, True),
"finally:",
"context.caller_stack.nextcaller = None",
None,
)
class _Identifiers(object):
"""tracks the status of identifier names as template code is rendered."""
def __init__(self, compiler, node=None, parent=None, nested=False):
if parent is not None:
# if we are the branch created in write_namespaces(),
# we don't share any context from the main body().
if isinstance(node, parsetree.NamespaceTag):
self.declared = set()
self.topleveldefs = util.SetLikeDict()
else:
# things that have already been declared
# in an enclosing namespace (i.e. names we can just use)
self.declared = (
set(parent.declared)
.union([c.name for c in parent.closuredefs.values()])
.union(parent.locally_declared)
.union(parent.argument_declared)
)
# if these identifiers correspond to a "nested"
# scope, it means whatever the parent identifiers
# had as undeclared will have been declared by that parent,
# and therefore we have them in our scope.
if nested:
self.declared = self.declared.union(parent.undeclared)
# top level defs that are available
self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
else:
self.declared = set()
self.topleveldefs = util.SetLikeDict()
self.compiler = compiler
# things within this level that are referenced before they
# are declared (e.g. assigned to)
self.undeclared = set()
# things that are declared locally. some of these things
# could be in the "undeclared" list as well if they are
# referenced before declared
self.locally_declared = set()
# assignments made in explicit python blocks.
# these will be propagated to
# the context of local def calls.
self.locally_assigned = set()
# things that are declared in the argument
# signature of the def callable
self.argument_declared = set()
# closure defs that are defined in this level
self.closuredefs = util.SetLikeDict()
self.node = node
if node is not None:
node.accept_visitor(self)
illegal_names = self.compiler.reserved_names.intersection(
self.locally_declared
)
if illegal_names:
raise exceptions.NameConflictError(
"Reserved words declared in template: %s"
% ", ".join(illegal_names)
)
def branch(self, node, **kwargs):
"""create a new Identifiers for a new Node, with
this Identifiers as the parent."""
return _Identifiers(self.compiler, node, self, **kwargs)
@property
def defs(self):
return set(self.topleveldefs.union(self.closuredefs).values())
def __repr__(self):
return (
"Identifiers(declared=%r, locally_declared=%r, "
"undeclared=%r, topleveldefs=%r, closuredefs=%r, "
"argumentdeclared=%r)"
% (
list(self.declared),
list(self.locally_declared),
list(self.undeclared),
[c.name for c in self.topleveldefs.values()],
[c.name for c in self.closuredefs.values()],
self.argument_declared,
)
)
def check_declared(self, node):
"""update the state of this Identifiers with the undeclared
and declared identifiers of the given node."""
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.locally_declared.add(ident)
def add_declared(self, ident):
self.declared.add(ident)
if ident in self.undeclared:
self.undeclared.remove(ident)
def visitExpression(self, node):
self.check_declared(node)
def visitControlLine(self, node):
self.check_declared(node)
def visitCode(self, node):
if not node.ismodule:
self.check_declared(node)
self.locally_assigned = self.locally_assigned.union(
node.declared_identifiers()
)
def visitNamespaceTag(self, node):
# only traverse into the sub-elements of a
# <%namespace> tag if we are the branch created in
# write_namespaces()
if self.node is node:
for n in node.nodes:
n.accept_visitor(self)
def _check_name_exists(self, collection, node):
existing = collection.get(node.funcname)
collection[node.funcname] = node
if (
existing is not None
and existing is not node
and (node.is_block or existing.is_block)
):
raise exceptions.CompileException(
"%%def or %%block named '%s' already "
"exists in this template." % node.funcname,
**node.exception_kwargs
)
def visitDefTag(self, node):
if node.is_root() and not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
# visit defs only one level deep
if node is self.node:
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitBlockTag(self, node):
if node is not self.node and not node.is_anonymous:
if isinstance(self.node, parsetree.DefTag):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of def '%s'"
% (node.name, self.node.name),
**node.exception_kwargs
)
elif isinstance(
self.node, (parsetree.CallTag, parsetree.CallNamespaceTag)
):
raise exceptions.CompileException(
"Named block '%s' not allowed inside of <%%call> tag"
% (node.name,),
**node.exception_kwargs
)
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
if not node.is_anonymous:
self._check_name_exists(self.topleveldefs, node)
self.undeclared.add(node.funcname)
elif node is not self.node:
self._check_name_exists(self.closuredefs, node)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitTextTag(self, node):
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
def visitIncludeTag(self, node):
self.check_declared(node)
def visitPageTag(self, node):
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
self.check_declared(node)
def visitCallNamespaceTag(self, node):
self.visitCallTag(node)
def visitCallTag(self, node):
if node is self.node:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
else:
for ident in node.undeclared_identifiers():
if ident != "context" and ident not in self.declared.union(
self.locally_declared
):
self.undeclared.add(ident)
_FOR_LOOP = re.compile(
r"^for\s+((?:\(?)\s*[A-Za-z_][A-Za-z_0-9]*"
r"(?:\s*,\s*(?:[A-Za-z_][A-Za-z0-9_]*),??)*\s*(?:\)?))\s+in\s+(.*):"
)
def mangle_mako_loop(node, printer):
"""converts a for loop into a context manager wrapped around a for loop
when access to the `loop` variable has been detected in the for loop body
"""
loop_variable = LoopVariable()
node.accept_visitor(loop_variable)
if loop_variable.detected:
node.nodes[-1].has_loop_context = True
match = _FOR_LOOP.match(node.text)
if match:
printer.writelines(
"loop = __M_loop._enter(%s)" % match.group(2),
"try:"
# 'with __M_loop(%s) as loop:' % match.group(2)
)
text = "for %s in loop:" % match.group(1)
else:
raise SyntaxError("Couldn't apply loop context: %s" % node.text)
else:
text = node.text
return text
class LoopVariable(object):
"""A node visitor which looks for the name 'loop' within undeclared
identifiers."""
def __init__(self):
self.detected = False
def _loop_reference_detected(self, node):
if "loop" in node.undeclared_identifiers():
self.detected = True
else:
for n in node.get_children():
n.accept_visitor(self)
def visitControlLine(self, node):
self._loop_reference_detected(node)
def visitCode(self, node):
self._loop_reference_detected(node)
def visitExpression(self, node):
self._loop_reference_detected(node)
| wujuguang/mako | mako/codegen.py | Python | mit | 47,892 |
#!/usr/bin/env python
from circuits.web import Server, JSONController
class Root(JSONController):
def index(self):
return {"success": True, "message": "Hello World!"}
app = Server(("0.0.0.0", 8000))
Root().register(app)
app.run()
| nizox/circuits | examples/web/jsoncontroller.py | Python | mit | 247 |
from __future__ import print_function
import time
import pickle
import time
import numpy as np
import scipy.optimize, scipy.ndimage
from acq4.util import Qt
import acq4.pyqtgraph as pg
from acq4.Manager import getManager
class PipetteTracker(object):
"""Provides functionality for automated tracking and recalibration of pipette tip position
based on camera feedback.
The current implementation uses normalized cross-correlation to do template matching against
a stack of reference images collected with `takeReferenceFrames()`.
"""
def __init__(self, pipette):
self.dev = pipette
fileName = self.dev.configFileName('ref_frames.pk')
try:
self.reference = pickle.load(open(fileName, 'rb'))
except Exception:
self.reference = {}
def takeFrame(self, imager=None):
"""Acquire one frame from an imaging device.
This method guarantees that the frame is exposed *after* this method is called.
"""
imager = self._getImager(imager)
restart = False
if imager.isRunning():
restart = True
imager.stop()
frame = imager.acquireFrames(1)
if restart:
imager.start()
return frame
def getNextFrame(self, imager=None):
"""Return the next frame available from the imager.
Note: the frame may have been exposed before this method was called.
"""
imager = self._getImager(imager)
self.__nextFrame = None
def newFrame(newFrame):
self.__nextFrame = newFrame
imager.sigNewFrame.connect(newFrame)
try:
start = pg.ptime.time()
while pg.ptime.time() < start + 5.0:
Qt.QApplication.processEvents()
frame = self.__nextFrame
if frame is not None:
self.__nextFrame = None
return frame
time.sleep(0.01)
raise RuntimeError("Did not receive frame from imager.")
finally:
pg.disconnect(imager.sigNewFrame, newFrame)
def _getImager(self, imager=None):
if imager is None:
imager = 'Camera'
if isinstance(imager, str):
man = getManager()
imager = man.getDevice('Camera')
return imager
def getTipImageArea(self, frame, padding, pos=None, tipLength=None):
"""Generate coordinates needed to clip a camera frame to include just the
tip of the pipette and some padding.
By default, images will include the tip of the pipette to a length of 100 pixels.
Return a tuple (minImgPos, maxImgPos, tipRelPos), where the first two
items are (x,y) coordinate pairs giving the corners of the image region to
be extracted, and tipRelPos is the subpixel location of the pipette tip
within this region.
"""
img = frame.data()
if img.ndim == 3:
img = img[0]
if tipLength is None:
tipLength = self.suggestTipLength(frame)
# determine bounding rectangle that we would like to acquire from the tip
if pos is not None:
tipPos = pos
else:
tipPos = self.dev.globalPosition()
tipPos = np.array([tipPos[0], tipPos[1]])
angle = self.dev.getYawAngle() * np.pi / 180.
da = 10 * np.pi / 180 # half-angle of the tip
pxw = frame.info()['pixelSize'][0]
# compute back points of a triangle that circumscribes the tip
backPos1 = np.array([-tipLength * np.cos(angle+da), -tipLength * np.sin(angle+da)])
backPos2 = np.array([-tipLength * np.cos(angle-da), -tipLength * np.sin(angle-da)])
# convert to image coordinates
tr = frame.globalTransform().inverted()[0]
originImgPos = tr.map(pg.Vector([0, 0]))
backImgPos1 = tr.map(pg.Vector(backPos1)) - originImgPos
backImgPos2 = tr.map(pg.Vector(backPos2)) - originImgPos
backImgPos1 = np.array([backImgPos1.x(), backImgPos1.y()])
backImgPos2 = np.array([backImgPos2.x(), backImgPos2.y()])
# Pixel positions of bounding corners in the image relative to tip, including padding.
# Note this is all calculated without actual tip position; this ensures the image
# size is constant even as the tip moves.
allPos = np.vstack([[0, 0], backImgPos1, backImgPos2]).astype('int')
padding = int(padding / pxw)
minRelPos = allPos.min(axis=0) - padding
maxRelPos = allPos.max(axis=0) + padding
# Get absolute pixel position of tip within image
tipImgPos = tr.map(pg.Vector(tipPos))
tipImgPos = np.array([tipImgPos.x(), tipImgPos.y()])
tipImgPx = tipImgPos.astype('int')
# clip bounding coordinates
minRelPos = [np.clip(minRelPos[0], -tipImgPx[0], img.shape[0]-1-tipImgPx[0]),
np.clip(minRelPos[1], -tipImgPx[1], img.shape[1]-1-tipImgPx[1])]
maxRelPos = [np.clip(maxRelPos[0], -tipImgPx[0], img.shape[0]-1-tipImgPx[0]),
np.clip(maxRelPos[1], -tipImgPx[1], img.shape[1]-1-tipImgPx[1])]
# absolute image coordinates of bounding rect
minImgPos = tipImgPx + minRelPos
maxImgPos = tipImgPx + maxRelPos
if np.any(maxImgPos - minImgPos < 1):
raise RuntimeError("No part of tip overlaps with camera frame.")
# subpixel location of tip within image
tipRelPos = tipImgPos - tipImgPx - minRelPos
return minImgPos, maxImgPos, tipRelPos
def takeTipImage(self, padding=50e-6):
"""Acquire an image of the pipette tip plus some padding.
Return a tuple (image, tipPosition).
"""
frame = self.takeFrame()
minImgPos, maxImgPos, tipRelPos = self.getTipImageArea(frame, padding)
# clipped image region
subimg = frame.data()[0, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]]
return subimg, tipRelPos
def suggestTipLength(self, frame):
# return a suggested tip length to image, given the image resolution
# currently just returns the length of 100 pixels in the frame
return frame.info()['pixelSize'][0] * 100
def takeReferenceFrames(self, zRange=None, zStep=None, imager=None, average=8, tipLength=None):
"""Collect a series of images of the pipette tip at various focal depths.
The collected images are used as reference templates for determining the most likely location
and focal depth of the tip after the calibration is no longer valid.
The focus first is moved in +z by half of *zRange*, then stepped downward by *zStep* until the
entire *zRange* is covered. Images of the pipette tip are acquired and stored at each step.
This method assumes that the tip is in focus near the center of the camera frame, and that its
position is well-calibrated. Ideally, the illumination is flat and the area surrounding the tip
is free of any artifacts.
Images are filtered using `self.filterImage` before they are stored.
"""
imager = self._getImager(imager)
# Take an initial frame with the tip in focus.
centerFrame = self.takeFrame()
if tipLength is None:
tipLength = self.suggestTipLength(centerFrame)
if zRange is None:
zRange = tipLength*1.5
if zStep is None:
zStep = zRange / 30.
minImgPos, maxImgPos, tipRelPos = self.getTipImageArea(centerFrame, padding=tipLength*0.15, tipLength=tipLength)
center = centerFrame.data()[0, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]]
center = self.filterImage(center)
# Decide how many frames to collect and at what z depths
nFrames = (int(zRange / zStep) // 2) * 2
pos = self.dev.globalPosition()
zStart = pos[2] + zStep * (nFrames // 2)
frames = []
bg_frames = []
corr = []
print("Collecting %d frames of %0.2fum tip length at %0.2fum resolution." % (nFrames, tipLength*1e6, zStep*1e6))
# Stop camera if it is currently running
restart = False
if imager.isRunning():
restart = True
imager.stop()
try:
with pg.ProgressDialog('Acquiring reference frames...', 0, nFrames*2+1) as dlg:
# collect 2 stacks of images (second stack is for background subtraction)
for j in range(2):
# Set initial focus above start point to reduce hysteresis in focus mechanism
scope = self.dev.scopeDevice()
scope.setFocusDepth(zStart + 10e-6)
# Acquire multiple frames at different depths
for i in range(nFrames):
#pos[2] = zStart - zStep * i
# self.dev._moveToGlobal(pos, 'slow').wait()
scope.setFocusDepth(zStart - zStep * i).wait()
frame = imager.acquireFrames(average)
img = frame.data()[:, minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]].astype(float).mean(axis=0)
img = self.filterImage(img)
if j == 0:
frames.append(img)
corr.append(self._matchTemplateSingle(img, center)[1])
else:
bg_frames.append(img)
dlg += 1
if dlg.wasCanceled():
return
if j == 0:
# move tip out-of-frame to collect background images
self.dev._moveToLocal([-tipLength*3, 0, 0], 'slow').wait()
else:
self.dev._moveToLocal([tipLength*3, 0, 0], 'slow')
finally:
# restart camera if it was running
if restart:
imager.start()
scope.setFocusDepth(pos[2])
# find the index of the frame that most closely matches the initial, tip-focused frame
maxInd = np.argmax(corr)
# stack all frames into a 3D array
frames = np.dstack(frames).transpose((2, 0, 1))
bg_frames = np.dstack(bg_frames).transpose((2, 0, 1))
# subtract background
# frames -= bg_frame.data()
# generate downsampled frame versions
# (for now we generate these on the fly..)
# ds = [frames] + [pg.downsample(pg.downsample(frames, n, axis=1), n, axis=2) for n in [2, 4, 8]]
key = imager.getDeviceStateKey()
self.reference[key] = {
'frames': frames - bg_frames,
'zStep': zStep,
'centerInd': maxInd,
'centerPos': tipRelPos,
'pixelSize': frame.info()['pixelSize'],
'tipLength': tipLength,
# 'downsampledFrames' = ds,
}
# Store with pickle because configfile does not support arrays
pickle.dump(self.reference, open(self.dev.configFileName('ref_frames.pk'), 'wb'))
def measureTipPosition(self, padding=50e-6, threshold=0.7, frame=None, pos=None, tipLength=None, show=False):
"""Find the pipette tip location by template matching within a region surrounding the
expected tip position.
Return `((x, y, z), corr)`, where *corr* is the normalized cross-correlation value of
the best template match.
If the strength of the match is less than *threshold*, then raise RuntimeError.
"""
# Grab one frame (if it is not already supplied) and crop it to the region around the pipette tip.
if frame is None:
frame = self.takeFrame()
elif frame == 'next':
frame = self.getNextFrame()
# load up template images
reference = self._getReference()
if tipLength is None:
# select a tip length similar to template images
tipLength = reference['tipLength']
minImgPos, maxImgPos, tipRelPos = self.getTipImageArea(frame, padding, pos=pos, tipLength=tipLength)
img = frame.data()
if img.ndim == 3:
img = img[0]
img = img[minImgPos[0]:maxImgPos[0], minImgPos[1]:maxImgPos[1]]
img = self.filterImage(img)
# resample acquired image to match template pixel size
pxr = frame.info()['pixelSize'][0] / reference['pixelSize'][0]
if pxr != 1.0:
img = scipy.ndimage.zoom(img, pxr)
# run template match against all template frames, find the frame with the strongest match
match = [self.matchTemplate(img, t) for t in reference['frames']]
if show:
pg.plot([m[0][0] for m in match], title='x match vs z')
pg.plot([m[0][1] for m in match], title='y match vs z')
pg.plot([m[1] for m in match], title='match correlation vs z')
maxInd = np.argmax([m[1] for m in match])
if match[maxInd][1] < threshold:
raise RuntimeError("Unable to locate pipette tip (correlation %0.2f < %0.2f)" % (match[maxInd][1], threshold))
# measure z error
zErr = (maxInd - reference['centerInd']) * reference['zStep']
# measure xy position
offset = match[maxInd][0]
tipImgPos = (minImgPos[0] + (offset[0] + reference['centerPos'][0]) / pxr,
minImgPos[1] + (offset[1] + reference['centerPos'][1]) / pxr)
tipPos = frame.mapFromFrameToGlobal(pg.Vector(tipImgPos))
return (tipPos.x(), tipPos.y(), tipPos.z() + zErr), match[maxInd][1]
def measureError(self, padding=50e-6, threshold=0.7, frame=None, pos=None):
"""Return an (x, y, z) tuple indicating the error vector from the calibrated tip position to the
measured (actual) tip position.
"""
if pos is None:
expectedTipPos = self.dev.globalPosition()
else:
expectedTipPos = pos
measuredTipPos, corr = self.measureTipPosition(padding, threshold, frame, pos=pos)
return tuple([measuredTipPos[i] - expectedTipPos[i] for i in (0, 1, 2)])
def _getReference(self):
key = self._getImager().getDeviceStateKey()
try:
return self.reference[key]
except KeyError:
raise Exception("No reference frames found for this pipette / objective combination.")
def autoCalibrate(self, **kwds):
"""Automatically calibrate the pipette tip position using template matching on a single camera frame.
Return the offset in pipette-local coordinates and the normalized cross-correlation value of the template match.
All keyword arguments are passed to `measureTipPosition()`.
"""
# If no image padding is given, then use the template tip length as a first guess
if 'padding' not in kwds:
ref = self._getReference()
kwds['padding'] = ref['tipLength']
if 'frame' not in kwds:
kwds['frame'] = 'next'
try:
tipPos, corr = self.measureTipPosition(**kwds)
except RuntimeError:
kwds['padding'] *= 2
tipPos, corr = self.measureTipPosition(**kwds)
localError = self.dev.mapFromGlobal(tipPos)
tr = self.dev.deviceTransform()
tr.translate(pg.Vector(localError))
self.dev.setDeviceTransform(tr)
return localError, corr
def filterImage(self, img):
"""Return a filtered version of an image to be used in template matching.
Currently, no filtering is applied.
"""
# Sobel should reduce background artifacts, but it also seems to increase the noise in the signal
# itself--two images with slightly different focus can have a very bad match.
# import skimage.feature
# return skimage.filter.sobel(img)
img = scipy.ndimage.morphological_gradient(img, size=(3, 3))
return img
def matchTemplate(self, img, template, dsVals=(4, 2, 1)):
"""Match a template to image data.
Return the (x, y) pixel offset of the template and a value indicating the strength of the match.
For efficiency, the input images are downsampled and matched at low resolution before
iteratively re-matching at higher resolutions. The *dsVals* argument lists the downsampling values
that will be used, in order. Each value in this list must be an integer multiple of
the value that follows it.
"""
# Recursively match at increasing image resolution
imgDs = [pg.downsample(pg.downsample(img, n, axis=0), n, axis=1) for n in dsVals]
tmpDs = [pg.downsample(pg.downsample(template, n, axis=0), n, axis=1) for n in dsVals]
offset = np.array([0, 0])
for i, ds in enumerate(dsVals):
pos, val = self._matchTemplateSingle(imgDs[i], tmpDs[i])
pos = np.array(pos)
if i == len(dsVals) - 1:
offset += pos
# [pg.image(imgDs[j], title=str(j)) for j in range(len(dsVals))]
return offset, val
else:
scale = ds // dsVals[i+1]
assert scale == ds / dsVals[i+1], "dsVals must satisfy constraint: dsVals[i] == dsVals[i+1] * int(x)"
offset *= scale
offset += np.clip(((pos-1) * scale), 0, imgDs[i+1].shape)
end = offset + np.array(tmpDs[i+1].shape) + 3
end = np.clip(end, 0, imgDs[i+1].shape)
imgDs[i+1] = imgDs[i+1][offset[0]:end[0], offset[1]:end[1]]
def _matchTemplateSingle(self, img, template, show=False, unsharp=3):
import skimage.feature
if img.shape[0] < template.shape[0] or img.shape[1] < template.shape[1]:
raise ValueError("Image must be larger than template. %s %s" % (img.shape, template.shape))
cc = skimage.feature.match_template(img, template)
# high-pass filter; we're looking for a fairly sharp peak.
if unsharp is not False:
cc_filt = cc - scipy.ndimage.gaussian_filter(cc, (unsharp, unsharp))
else:
cc_filt = cc
if show:
pg.image(cc)
ind = np.argmax(cc_filt)
pos = np.unravel_index(ind, cc.shape)
val = cc[pos[0], pos[1]]
return pos, val
def mapErrors(self, nSteps=(5, 5, 7), stepSize=(50e-6, 50e-6, 50e-6), padding=60e-6,
threshold=0.4, speed='slow', show=False, intermediateDist=60e-6):
"""Move pipette tip randomly to locations in a grid and measure the position error
at each location.
All tip locations must be within the field of view.
"""
startTime = time.time()
start = np.array(self.dev.globalPosition())
npts = nSteps[0] * nSteps[1] * nSteps[2]
inds = np.mgrid[0:nSteps[0], 0:nSteps[1], 0:nSteps[2]].reshape((3, npts)).transpose()
order = np.arange(npts)
np.random.shuffle(order)
err = np.zeros(nSteps + (3,))
stepSize = np.array(stepSize)
if show:
imv = pg.image()
mark1 = Qt.QGraphicsEllipseItem(Qt.QRectF(-5, -5, 10, 10))
mark1.setBrush(pg.mkBrush(255, 255, 0, 100))
mark1.setZValue(100)
imv.addItem(mark1)
mark2 = Qt.QGraphicsEllipseItem(Qt.QRectF(-5, -5, 10, 10))
mark2.setBrush(pg.mkBrush(255, 0, 0, 100))
mark2.setZValue(100)
imv.addItem(mark2)
# loop over all points in random order, and such that we do heavy computation while
# pipette is moving.
images = []
offsets = []
try:
with pg.ProgressDialog("Acquiring error map...", 0, len(order)) as dlg:
for i in range(len(order)+1):
if i > 0:
lastPos = pos
if i < len(order):
ind = inds[order[i]]
pos = start.copy() + (stepSize * ind)
# Jump to position + a random 20um offset to avoid hysteresis
offset = np.random.normal(size=3)
offset *= intermediateDist / (offset**2).sum()**0.5
offsets.append(offset)
mfut = self.dev._moveToGlobal(pos + offset, speed)
ffut = self.dev.scopeDevice().setFocusDepth(pos[2], speed)
if i > 0:
ind = inds[order[i-1]]
print("Frame: %d %s" % (i-1, lastPos))
err[tuple(ind)] = self.measureError(padding=padding, threshold=threshold, frame=frame, pos=lastPos)
print(" error: %s" % err[tuple(ind)])
dlg += 1
if show:
imv.setImage(frame.data()[0])
p1 = frame.globalTransform().inverted()[0].map(pg.Vector(lastPos))
p2 = frame.globalTransform().inverted()[0].map(pg.Vector(lastPos + err[tuple(ind)]))
mark1.setPos(p1.x(), p1.y())
mark2.setPos(p2.x(), p2.y())
# wait for previous moves to complete
mfut.wait(updates=True)
ffut.wait(updates=True)
# step back to actual target position
self.dev._moveToGlobal(pos, speed).wait(updates=True)
frame = self.takeFrame()
if dlg.wasCanceled():
return None
finally:
self.dev._moveToGlobal(start, 'fast')
self.dev.scopeDevice().setFocusDepth(start[2], 'fast')
self.errorMap = {
'err': err,
'nSteps': nSteps,
'stepSize': stepSize,
'order': order,
'inds': inds,
'offsets': offsets,
'time': time.time() - startTime,
}
filename = self.dev.configFileName('error_map.np')
np.save(open(filename, 'wb'), self.errorMap)
return self.errorMap
def showErrorAnalysis(self):
if not hasattr(self, 'errorMap'):
filename = self.dev.configFileName('error_map.np')
self.errorMap = np.load(open(filename, 'rb'))[np.newaxis][0]
err = self.errorMap
imx = pg.image(err['err'][..., 0].transpose(1, 0, 2), title='X error')
imy = pg.image(err['err'][..., 1], title='Y error')
imz = pg.image(err['err'][..., 2], title='Z error')
# get N,3 array of offset values used to randomize hysteresis
off = np.vstack(err['offsets'])
sh = err['err'].shape
# Get N,3 array of measured position errors
errf = err['err'].reshape(sh[0]*sh[1]*sh[2], 3)[err['order']]
# Display histogram of errors
win = pg.GraphicsWindow(title="%s error" % self.dev.name())
# subtract out slow drift
normErr = errf - scipy.ndimage.gaussian_filter(errf, (20, 0))
# calculate magnitude of error
absErr = (normErr**2).sum(axis=1)**0.5
# errPlot.plot(absErr)
title = "Error Histogram (mean=%s)" % pg.siFormat(absErr.mean(), suffix='m')
errPlot = win.addPlot(row=0, col=0, title=title, labels={'bottom': ('Position error', 'm')})
hist = np.histogram(absErr, bins=50)
errPlot.plot(hist[1], hist[0], stepMode=True)
# display drift and hysteresis plots
driftPlot = win.addPlot(row=0, col=1, rowspan=1, colspan=2, title="Pipette Drift",
labels={'left': ('Position error', 'm'), 'bottom': ('Time', 's')})
driftPlot.plot(np.linspace(0, err['time'], errf.shape[0]), errf[:, 0], pen='r')
driftPlot.plot(np.linspace(0, err['time'], errf.shape[0]), errf[:, 1], pen='g')
driftPlot.plot(np.linspace(0, err['time'], errf.shape[0]), errf[:, 2], pen='b')
xhplot = win.addPlot(row=1, col=0, title='X Hysteresis',
labels={'left': ('Position error', 'm'), 'bottom': ('Last pipette movement', 'm')})
xhplot.plot(-off[:, 0], errf[:, 0], pen=None, symbol='o')
yhplot = win.addPlot(row=1, col=1, title='Y Hysteresis',
labels={'left': ('Position error', 'm'), 'bottom': ('Last pipette movement', 'm')})
yhplot.plot(-off[:, 1], errf[:, 1], pen=None, symbol='o')
zhplot = win.addPlot(row=1, col=2, title='Z Hysteresis',
labels={'left': ('Position error', 'm'), 'bottom': ('Last pipette movement', 'm')})
zhplot.plot(-off[:, 2], errf[:, 2], pen=None, symbol='o')
# Print best fit for manipulator axes
expPos = err['inds'] * err['stepSize']
measPos = expPos + off
guess = np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0]], dtype='float')
def errFn(v):
return ((measPos - np.dot(expPos, v.reshape(3,4))[:,:3])**2).sum()
fit = scipy.optimize.minimize(errFn, guess)
print("Pipette position transform:", fit)
self.errorMapAnalysis = (imx, imy, imz, win)
class DriftMonitor(Qt.QWidget):
def __init__(self, trackers):
self.trackers = trackers
self.nextFrame = None
Qt.QWidget.__init__(self)
self.timer = Qt.QTimer()
self.timer.timeout.connect(self.update)
self.layout = Qt.QGridLayout()
self.setLayout(self.layout)
self.gv = pg.GraphicsLayoutWidget()
self.layout.addWidget(self.gv, 0, 0)
self.plot = self.gv.addPlot(labels={'left': ('Drift distance', 'm'), 'bottom': ('Time', 's')})
self.plot.addLegend()
self.xplot = self.gv.addPlot(labels={'left': ('X position', 'm')}, row=1, col=0)
self.yplot = self.gv.addPlot(labels={'left': ('Y position', 'm')}, row=2, col=0)
self.zplot = self.gv.addPlot(labels={'left': ('Z position', 'm'), 'bottom': ('Time', 's')}, row=3, col=0)
for plt in [self.xplot, self.yplot, self.zplot]:
plt.setYRange(-10e-6, 10e-6)
self.pens = [(i, len(trackers)) for i in range(len(trackers))]
self.lines = [self.plot.plot(pen=self.pens[i], name=trackers[i].dev.name()) for i in range(len(trackers))]
# self.errors = [[] for i in range(len(trackers))]
# self.cumulative = np.zeros((len(trackers), 3))
self.positions = []
self.times = []
self.timer.start(2000)
trackers[0]._getImager().sigNewFrame.connect(self.newFrame)
self.show()
def newFrame(self, frame):
self.nextFrame = frame
def update(self):
try:
if self.nextFrame is None:
return
frame = self.nextFrame
self.nextFrame = None
self.times.append(time.time())
x = np.array(self.times)
x -= x[0]
pos = []
for i, t in enumerate(self.trackers):
try:
err, corr = t.autoCalibrate(frame=frame, padding=50e-6)
# err = np.array(err)
# self.cumulative[i] += err
# err = (self.cumulative[i]**2).sum()**0.5
pos.append(t.dev.globalPosition())
except RuntimeError:
pos.append([np.nan]*3)
# self.errors[i].append(err)
self.positions.append(pos)
pos = np.array(self.positions)
pos -= pos[0]
err = (pos**2).sum(axis=2)**0.5
for i, t in enumerate(self.trackers):
self.lines[i].setData(x, err[:, i])
for ax, plt in enumerate([self.xplot, self.yplot, self.zplot]):
plt.clear()
for i, t in enumerate(self.trackers):
plt.plot(x, pos[:, i, ax], pen=self.pens[i])
except Exception:
self.timer.stop()
raise
def closeEvent(self, event):
self.timer.stop()
return Qt.QWidget.closeEvent(self, event)
| campagnola/acq4 | acq4/devices/Pipette/tracker.py | Python | mit | 28,251 |
import json
import unittest2
from appengine_fixture_loader.loader import load_fixture
from google.appengine.ext import testbed
from google.appengine.ext import ndb
from helpers.event_simulator import EventSimulator
from helpers.event_team_status_helper import EventTeamStatusHelper
from models.event import Event
from models.event_details import EventDetails
from models.match import Match
class TestSimulated2016nytrEventTeamStatusHelper(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(root_path=".")
ndb.get_context().clear_cache() # Prevent data from leaking between tests
def tearDown(self):
self.testbed.deactivate()
def testSimulatedEvent(self):
es = EventSimulator()
event = Event.get_by_id('2016nytr')
es.step()
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>Rank 15/36</b> with a record of <b>0-0-0</b> in quals.')
for _ in xrange(5):
es.step()
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>Rank 6/36</b> with a record of <b>1-0-0</b> in quals.')
for _ in xrange(67):
es.step()
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 16/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # Alliance selections added
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals and will be competing in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals and will be competing in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 16/36</b> with a record of <b>6-6-0</b> in quals and will be competing in the playoffs as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # QF schedule added
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Quarterfinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-0-0</b> in the <b>Quarterfinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-0-0</b> in the <b>Quarterfinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # qf1m1
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-0-0</b> in the <b>Quarterfinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
es.step() # qf2m1
es.step() # qf3m1
es.step() # qf4m1
es.step() # qf1m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Semifinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-1-0</b> in the <b>Quarterfinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>1-0-0</b> in the <b>Quarterfinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # qf2m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>1-1-0</b> in the <b>Quarterfinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
es.step() # qf3m2
es.step() # qf4m2
es.step() # qf2m3
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-0-0</b> in the <b>Semifinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
es.step() # qf4m3
es.step() # sf1m1
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-0-0</b> in the <b>Semifinals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 is <b>0-1-0</b> in the <b>Semifinals</b> as the <b>1st Pick</b> of <b>Alliance 4</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-0-0</b> in the <b>Semifinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # sf2m1
es.step() # sf1m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-1-0</b> in the <b>Semifinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # sf2m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>1-1-0</b> in the <b>Semifinals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>1-1-0</b> in the <b>Semifinals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # sf2m3
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-0-0</b> in the <b>Finals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>0-0-0</b> in the <b>Finals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # f1m1
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-0-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>0-1-0</b> in the <b>Finals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>0-1-0</b> in the <b>Finals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # f1m2
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 is <b>1-1-0</b> in the <b>Finals</b> as the <b>Captain</b> of <b>Alliance 1</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 is <b>1-1-0</b> in the <b>Finals</b> as the <b>2nd Pick</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 is <b>1-1-0</b> in the <b>Finals</b> as the <b>Backup</b> of <b>Alliance 2</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
es.step() # f1m3
event = Event.get_by_id('2016nytr')
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals, competed in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>, and <b>won the event</b> with a playoff record of <b>6-1-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 4/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 16/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>2nd Pick</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 15/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>Backup</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', event)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 21/36</b> with a record of <b>6-6-0</b> in quals.')
class Test2016nytrEventTeamStatusHelper(unittest2.TestCase):
status_359 = {
"alliance": {
"backup": None,
"name": "Alliance 1",
"number": 1,
"pick": 0
},
"playoff": {
"current_level_record": {
"losses": 1,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 1,
"ties": 0,
"wins": 6
},
"status": "won"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 1,
"record": {
"losses": 1,
"ties": 0,
"wins": 11
},
"sort_orders": [
39.0,
310.0,
165.0,
448.0,
600.0
],
"team_key": "frc359"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_5240 = {
"alliance": {
"backup": None,
"name": "Alliance 4",
"number": 4,
"pick": 1
},
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 0
},
"level": "sf",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 2
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 6,
"record": {
"losses": 3,
"ties": 0,
"wins": 9
},
"sort_orders": [
28.0,
260.0,
150.0,
191.0,
575.0
],
"team_key": "frc5240"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_229 = {
"alliance": {
"backup": {
"in": "frc1665",
"out": "frc229"
},
"name": "Alliance 2",
"number": 2,
"pick": 2
},
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 20,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": [
20.0,
156.0,
130.0,
119.0,
525.0
],
"team_key": "frc229"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_1665 = {
"alliance": {
"backup": {
"in": "frc1665",
"out": "frc229"
},
"name": "Alliance 2",
"number": 2,
"pick": -1
},
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 18,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": [
20.0,
192.0,
105.0,
146.0,
525.0
],
"team_key": "frc1665"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_5964 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 36,
"ranking": {
"dq": 0,
"matches_played": 12,
"qual_average": None,
"rank": 23,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": [
19.0,
218.0,
110.0,
159.0,
520.0
],
"team_key": "frc5964"
},
"sort_order_info": [
{
"name": "Ranking Score",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Scale/Challenge",
"precision": 0
},
{
"name": "Goals",
"precision": 0
},
{
"name": "Defense",
"precision": 0
}
],
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2016nytr')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2016nytr_event_team_status.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2016nytr')
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', self.event)
self.assertDictEqual(status, self.status_359)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 was <b>Rank 1/36</b> with a record of <b>11-1-0</b> in quals, competed in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>, and <b>won the event</b> with a playoff record of <b>6-1-0</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', self.event)
self.assertDictEqual(status, self.status_5240)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 was <b>Rank 6/36</b> with a record of <b>9-3-0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 4</b>, and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
def testBackupOut(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', self.event)
self.assertDictEqual(status, self.status_229)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 was <b>Rank 20/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>2nd Pick</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testBackupIn(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', self.event)
self.assertDictEqual(status, self.status_1665)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 was <b>Rank 18/36</b> with a record of <b>6-6-0</b> in quals, competed in the playoffs as the <b>Backup</b> of <b>Alliance 2</b>, and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', self.event)
self.assertDictEqual(status, self.status_5964)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 was <b>Rank 23/36</b> with a record of <b>6-6-0</b> in quals.')
class Test2016nytrEventTeamStatusHelperNoEventDetails(unittest2.TestCase):
status_359 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 1,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 1,
"ties": 0,
"wins": 6
},
"status": "won"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 1,
"ties": 0,
"wins": 11
},
"sort_orders": None,
"team_key": "frc359",
},
"sort_order_info": None,
"status": "completed"
}
}
status_5240 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 0
},
"level": "sf",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 2
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 9
},
"sort_orders": None,
"team_key": "frc5240",
},
"sort_order_info": None,
"status": "completed"
}
}
status_229 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": None,
"team_key": "frc229",
},
"sort_order_info": None,
"status": "completed"
}
}
status_1665 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 2,
"ties": 0,
"wins": 1
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 3,
"ties": 0,
"wins": 5
},
"status": "eliminated"
},
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": None,
"team_key": "frc1665",
},
"sort_order_info": None,
"status": "completed"
}
}
status_5964 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 36,
"ranking": {
"dq": None,
"matches_played": 12,
"qual_average": None,
"rank": None,
"record": {
"losses": 6,
"ties": 0,
"wins": 6
},
"sort_orders": None,
"team_key": "frc5964",
},
"sort_order_info": None,
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2016nytr')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2016nytr_event_team_status.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2016nytr')
EventDetails.get_by_id('2016nytr').key.delete() # Remove EventDetails
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc359', self.event)
self.assertDictEqual(status, self.status_359)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc359', status),
'Team 359 had a record of <b>11-1-0</b> in quals and <b>won the event</b> with a playoff record of <b>6-1-0</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5240', self.event)
self.assertDictEqual(status, self.status_5240)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5240', status),
'Team 5240 had a record of <b>9-3-0</b> in quals and was <b>eliminated in the Semifinals</b> with a playoff record of <b>2-3-0</b>.')
def testBackupOut(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc229', self.event)
self.assertDictEqual(status, self.status_229)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc229', status),
'Team 229 had a record of <b>6-6-0</b> in quals and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testBackupIn(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc1665', self.event)
self.assertDictEqual(status, self.status_1665)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc1665', status),
'Team 1665 had a record of <b>6-6-0</b> in quals and was <b>eliminated in the Finals</b> with a playoff record of <b>5-3-0</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc5964', self.event)
self.assertDictEqual(status, self.status_5964)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc5964', status),
'Team 5964 had a record of <b>6-6-0</b> in quals.')
class Test2016casjEventTeamStatusHelperNoEventDetails(unittest2.TestCase):
status_254 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 0,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": None,
"record": {
"losses": 0,
"ties": 0,
"wins": 6
},
"status": "won"
},
"qual": {
"num_teams": 64,
"ranking": {
"dq": None,
"matches_played": 8,
"qual_average": None,
"rank": None,
"record": {
"losses": 0,
"ties": 0,
"wins": 8
},
"sort_orders": None,
"team_key": "frc254",
},
"sort_order_info": None,
"status": "completed"
}
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2016casj')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2016casj.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2016casj')
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventSurrogate(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc254', self.event)
self.assertDictEqual(status, self.status_254)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc254', status),
'Team 254 had a record of <b>8-0-0</b> in quals and <b>won the event</b> with a playoff record of <b>6-0-0</b>.')
class Test2015casjEventTeamStatusHelper(unittest2.TestCase):
status_254 = {
"alliance": {
"backup": None,
"name": "Alliance 1",
"number": 1,
"pick": 0
},
"playoff": {
"current_level_record": {
"losses": 0,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": 224.14285714285714,
"record": None,
"status": "won"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": 0,
"matches_played": 10,
"qual_average": 200.4,
"rank": 1,
"record": None,
"sort_orders": [
200.4,
280.0,
200.0,
836.0,
522.0,
166.0
],
"team_key": "frc254"
},
"sort_order_info": [
{
"name": "Qual Avg.",
"precision": 1
},
{
"name": "Coopertition",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Container",
"precision": 0
},
{
"name": "Tote",
"precision": 0
},
{
"name": "Litter",
"precision": 0
}
],
"status": "completed"
}
}
status_846 = {
"alliance": {
"backup": None,
"name": "Alliance 3",
"number": 3,
"pick": 1
},
"playoff": {
"current_level_record": None,
"level": "sf",
"playoff_average": 133.59999999999999,
"record": None,
"status": "eliminated"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": 0,
"matches_played": 10,
"qual_average": 97.0,
"rank": 8,
"record": None,
"sort_orders": [
97.0,
200.0,
20.0,
372.0,
294.0,
108.0
],
"team_key": "frc846"
},
"sort_order_info": [
{
"name": "Qual Avg.",
"precision": 1
},
{
"name": "Coopertition",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Container",
"precision": 0
},
{
"name": "Tote",
"precision": 0
},
{
"name": "Litter",
"precision": 0
}
],
"status": "completed"
}
}
status_8 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 57,
"ranking": {
"dq": 0,
"matches_played": 10,
"qual_average": 42.6,
"rank": 53,
"record": None,
"sort_orders": [
42.6,
120.0,
0.0,
84.0,
150.0,
72.0
],
"team_key": "frc8"
},
"sort_order_info": [
{
"name": "Qual Avg.",
"precision": 1
},
{
"name": "Coopertition",
"precision": 0
},
{
"name": "Auto",
"precision": 0
},
{
"name": "Container",
"precision": 0
},
{
"name": "Tote",
"precision": 0
},
{
"name": "Litter",
"precision": 0
}
],
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2015casj')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2015casj.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2015casj')
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc254', self.event)
self.assertDictEqual(status, self.status_254)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc254', status),
'Team 254 was <b>Rank 1/57</b> with an average score of <b>200.4</b> in quals, competed in the playoffs as the <b>Captain</b> of <b>Alliance 1</b>, and <b>won the event</b> with a playoff average of <b>224.1</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc846', self.event)
self.assertDictEqual(status, self.status_846)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc846', status),
'Team 846 was <b>Rank 8/57</b> with an average score of <b>97.0</b> in quals, competed in the playoffs as the <b>1st Pick</b> of <b>Alliance 3</b>, and was <b>eliminated in the Semifinals</b> with a playoff average of <b>133.6</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc8', self.event)
self.assertDictEqual(status, self.status_8)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc8', status),
'Team 8 was <b>Rank 53/57</b> with an average score of <b>42.6</b> in quals.')
class Test2015casjEventTeamStatusHelperNoEventDetails(unittest2.TestCase):
status_254 = {
"alliance": None,
"playoff": {
"current_level_record": {
"losses": 0,
"ties": 0,
"wins": 2
},
"level": "f",
"playoff_average": 224.14285714285714,
"record": None,
"status": "won"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": None,
"matches_played": 10,
"qual_average": 200.4,
"rank": None,
"record": None,
"sort_orders": None,
"team_key": "frc254",
},
"sort_order_info": None,
"status": "completed"
}
}
status_846 = {
"alliance": None,
"playoff": {
"current_level_record": None,
"level": "sf",
"playoff_average": 133.59999999999999,
"record": None,
"status": "eliminated"
},
"qual": {
"num_teams": 57,
"ranking": {
"dq": None,
"matches_played": 10,
"qual_average": 97.0,
"rank": None,
"record": None,
"sort_orders": None,
"team_key": "frc846",
},
"sort_order_info": None,
"status": "completed"
}
}
status_8 = {
"alliance": None,
"playoff": None,
"qual": {
"num_teams": 57,
"ranking": {
"dq": None,
"matches_played": 10,
"qual_average": 42.6,
"rank": None,
"record": None,
"sort_orders": None,
"team_key": "frc8",
},
"sort_order_info": None,
"status": "completed"
}
}
status_1124 = {
"qual": None,
"playoff": None,
"alliance": None
}
# Because I can't figure out how to get these to generate
def event_key_adder(self, obj):
obj.event = ndb.Key(Event, '2015casj')
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
load_fixture('test_data/fixtures/2015casj.json',
kind={'EventDetails': EventDetails, 'Event': Event, 'Match': Match},
post_processor=self.event_key_adder)
self.event = Event.get_by_id('2015casj')
EventDetails.get_by_id('2015casj').key.delete() # Remove EventDetails
self.assertIsNotNone(self.event)
def tearDown(self):
self.testbed.deactivate()
def testEventWinner(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc254', self.event)
self.assertDictEqual(status, self.status_254)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc254', status),
'Team 254 had an average score of <b>200.4</b> in quals and <b>won the event</b> with a playoff average of <b>224.1</b>.')
def testElimSemisAndFirstPick(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc846', self.event)
self.assertDictEqual(status, self.status_846)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc846', status),
'Team 846 had an average score of <b>97.0</b> in quals and was <b>eliminated in the Semifinals</b> with a playoff average of <b>133.6</b>.')
def testTeamNotPicked(self):
status = EventTeamStatusHelper.generate_team_at_event_status('frc8', self.event)
self.assertDictEqual(status, self.status_8)
self.assertEqual(
EventTeamStatusHelper.generate_team_at_event_status_string('frc8', status),
'Team 8 had an average score of <b>42.6</b> in quals.')
| verycumbersome/the-blue-alliance | tests/test_event_team_status_helper.py | Python | mit | 55,040 |
import pyrox.filtering as filtering
class EmptyFilter(filtering.HttpFilter):
pass
| akatrevorjay/pyrox | pyrox/stock_filters/empty.py | Python | mit | 88 |
import subprocess
import os
li = [i for i in os.walk(os.getcwd())]
print(li)
for di in li:
root = di[0]
for fi in di[2]:
lent = len(fi)
if fi[lent-4:lent] == ".jpg":
fi_path = os.path.join(root, fi)
output_file = fi[:lent-4] + "_output"
print(output_file)
subprocess.call(["tesseract", fi, output_file])
| wonkishtofu/Tesseract-OCR-Tessa | tessa/orbiturary/pictures/get_text.py | Python | mit | 380 |
import math
FREQ = 3000
V = 13.3
REALRPM = 305.6
LIMIT = 2
PWM_IN_MIN = 1100
PWM_IN_MAX = 2000
RPM_MAX = 2000.0 # rpm
RPM_MIN = 300.0 # rpm
# GT: Changed constants?
RPM_SLEW = 10000.0 # rpm/s
DT_LOOP = 0.001 # seconds per slow loop
KP_RPM_UP = 0.3 # mA/rpm
KI_RPM = 0.002 # mA/rpm/s
I_SAT_RPM = 20.0 # mA
KFF_I = 4.000e-5 # mA/rpm^2
# KFF_V = 0.00038 # (0-255)/rpm
KFF_V = 0.0 # (0-255)/rpm
AMAX = 20.0 # max accelerating current [A]
BMAX = 5.0 # max braking current [A]
# RPM controller
KP_EST_RPM = 2
KI_EST_RPM = 0.02
KPQ = 1.000 / 500.0 # [LSB/mA/loop] ~= [1V/A/s at 24VDC]
KPD = 0.3 / 500.0 # [LSB/mA/loop] ~= ??? calculate
CHAR_90_DEG = 64
LOOP_INTERVAL = 4
#iafactor = 0.03
#ibicfactor = 0.02
iafactor = 0.03
ibicfactor = 0.02
SQRT3DIV2 = 0.866025404
ONEDIVSQRT3 = 1.0/math.sqrt(3.0)
TWODIVSQRT3 = 2.0/math.sqrt(3.0)
BEMFK = 0.00537
| gtoonstra/foc_esc | escsim/constants.py | Python | mit | 918 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Functional operations.
## Higher Order Operators
TensorFlow provides several higher order operators to simplify the common
map-reduce programming patterns.
@@map_fn
@@foldl
@@foldr
@@scan
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope as vs
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_functional_ops import *
# pylint: enable=wildcard-import
# pylint: disable=unused-import
from tensorflow.python.ops.gen_functional_ops import _symbolic_gradient
# pylint: enable=unused-import
from tensorflow.python.util import nest
# TODO(yuanbyu, mrry): Handle stride to support sliding windows.
def foldl(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""foldl on the list of tensors unpacked from `elems` on dimension 0.
This foldl operator repeatedly applies the callable `fn` to a sequence
of elements from first to last. The elements are made of the tensors
unpacked from `elems` on dimension 0. The callable fn takes two tensors as
arguments. The first argument is the accumulated value computed from the
preceding invocation of fn. If `initializer` is None, `elems` must contain
at least one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is fn(initializer, values[0]).shape`.
Args:
fn: The callable to be performed.
elems: A tensor to be unpacked on dimension 0.
initializer: (optional) The initial value for the accumulator.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively to the list of tensors
unpacked from `elems`, from first to last.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldl(lambda a, x: a + x, elems)
# sum == 21
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
with ops.op_scope([elems], name, "foldl"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems = ops.convert_to_tensor(elems, name="elems")
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False,
infer_shape=True)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
a = elems_ta.read(0)
i = constant_op.constant(1)
else:
a = ops.convert_to_tensor(initializer)
i = constant_op.constant(0)
def compute(i, a):
a = fn(a, elems_ta.read(i))
return [i + 1, a]
_, r_a = control_flow_ops.while_loop(
lambda i, a: i < n, compute, [i, a],
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return r_a
def foldr(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, name=None):
"""foldr on the list of tensors unpacked from `elems` on dimension 0.
This foldr operator repeatedly applies the callable `fn` to a sequence
of elements from last to first. The elements are made of the tensors
unpacked from `elems`. The callable fn takes two tensors as arguments.
The first argument is the accumulated value computed from the preceding
invocation of fn. If `initializer` is None, `elems` must contain at least
one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `fn(initializer, values[0]).shape`.
Args:
fn: The callable to be performed.
elems: A tensor that is unpacked into a sequence of tensors to apply `fn`.
initializer: (optional) The initial value for the accumulator.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively to the list of tensors
unpacked from `elems`, from last to first.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldr(lambda a, x: a + x, elems)
# sum == 21
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
with ops.op_scope([elems], name, "foldr"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems = ops.convert_to_tensor(elems, name="elems")
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False,
infer_shape=True)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
i = n - 1
a = elems_ta.read(i)
else:
i = n
a = ops.convert_to_tensor(initializer)
def compute(i, a):
i -= 1
a = fn(a, elems_ta.read(i))
return [i, a]
_, r_a = control_flow_ops.while_loop(
lambda i, a: i > 0, compute, [i, a],
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return r_a
def map_fn(fn, elems, dtype=None, parallel_iterations=10, back_prop=True,
swap_memory=False, infer_shape=True, name=None):
"""map on the list of tensors unpacked from `elems` on dimension 0.
The simplest version of `map` repeatedly applies the callable `fn` to a
sequence of elements from first to last. The elements are made of the
tensors unpacked from `elems`. `dtype` is the data type of the return
value of `fn`. Users must provide `dtype` if it is different from
the data type of `elems`.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `[values.shape[0]] + fn(values[0]).shape`.
This method also allows multi-arity `elems` and output of `fn`. If `elems`
is a (possibly nested) list or tuple of tensors, then each of these tensors
must have a matching first (unpack) dimension. The signature of `fn` may
match the structure of `elems`. That is, if `elems` is
`(t1, [t2, t3, [t4, t5]])`, then an appropriate signature for `fn` is:
`fn = lambda (t1, [t2, t3, [t4, t5]]):`.
Furthermore, `fn` may emit a different structure than its input. For example,
`fn` may look like: `fn = lambda t1: return (t1 + 1, t1 - 1)`. In this case,
the `dtype` parameter is not optional: `dtype` must be a type or (possibly
nested) tuple of types matching the output of `fn`.
Args:
fn: The callable to be performed. It accepts one argument, which will
have the same (possibly nested) structure as `elems`. Its output
must have the same structure as `dtype` if one is provided, otherwise
it must have the same structure as `elems`.
elems: A tensor or (possibly nested) sequence of tensors, each of which
will be unpacked along their first dimension. The nested sequence
of the resulting slices will be applied to `fn`.
dtype: (optional) The output type(s) of `fn`. If `fn` returns a structure
of Tensors differing from the structure of `elems`, then `dtype` is not
optional and must have the same structure as the output of `fn`.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
infer_shape: (optional) False disables tests for consistent output shapes.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor or (possibly nested) sequence of tensors. Each tensor packs the
results of applying `fn` to tensors unpacked from `elems` along the first
dimension, from first to last.
Raises:
TypeError: if `fn` is not callable or the structure of the output of
`fn` and `dtype` do not match.
ValueError: if the lengths of the output of `fn` and `dtype` do not match.
Examples:
```python
elems = np.array([1, 2, 3, 4, 5, 6])
squares = map_fn(lambda x: x * x, elems)
# squares == [1, 4, 9, 16, 25, 36]
```
```python
elems = (np.array([1, 2, 3]), np.array([-1, 1, -1]))
alternate = map_fn(lambda x: x[0] * x[1], elems, dtype=tf.int64)
# alternate == [-1, 2, -3]
```
```python
elems = np.array([1, 2, 3])
alternates = map_fn(lambda x: (x, -x), elems, dtype=(tf.int64, tf.int64))
# alternates[0] == [1, 2, 3]
# alternates[1] == [-1, -2, -3]
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
input_is_sequence = nest.is_sequence(elems)
input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]
def input_pack(x):
return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]
if dtype is None:
output_is_sequence = input_is_sequence
output_flatten = input_flatten
output_pack = input_pack
else:
output_is_sequence = nest.is_sequence(dtype)
output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]
def output_pack(x):
return (nest.pack_sequence_as(dtype, x)
if output_is_sequence else x[0])
elems_flat = input_flatten(elems)
with ops.op_scope(elems_flat, name, "map"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
elems_flat = [
ops.convert_to_tensor(elem, name="elem") for elem in elems_flat]
dtype = dtype or input_pack([elem.dtype for elem in elems_flat])
dtype_flat = output_flatten(dtype)
# Convert elems to tensor array.
n = array_ops.shape(elems_flat[0])[0]
# TensorArrays are always flat
elems_ta = [
tensor_array_ops.TensorArray(dtype=elem.dtype, size=n,
dynamic_size=False,
infer_shape=True)
for elem in elems_flat]
# Unpack elements
elems_ta = [
elem_ta.unpack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]
i = constant_op.constant(0)
accs_ta = [
tensor_array_ops.TensorArray(dtype=dt, size=n,
dynamic_size=False,
infer_shape=infer_shape)
for dt in dtype_flat]
def compute(i, tas):
"""The loop body of map_fn.
Args:
i: the loop counter
tas: the flat TensorArray accumulator list
Returns:
(i + 1, tas): the updated counter + updated TensorArrays
Raises:
TypeError: if dtype and packed_fn_values structure do not match
ValueType: if dtype and packed_fn_values lengths do not match
"""
packed_values = input_pack([elem_ta.read(i) for elem_ta in elems_ta])
packed_fn_values = fn(packed_values)
nest.assert_same_structure(dtype or elems, packed_fn_values)
flat_fn_values = output_flatten(packed_fn_values)
tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_fn_values)]
return (i + 1, tas)
_, r_a = control_flow_ops.while_loop(
lambda i, _: i < n, compute, (i, accs_ta),
parallel_iterations=parallel_iterations,
back_prop=back_prop,
swap_memory=swap_memory)
results_flat = [r.pack() for r in r_a]
n_static = elems_flat[0].get_shape().with_rank_at_least(1)[0]
for elem in elems_flat[1:]:
n_static.merge_with(elem.get_shape().with_rank_at_least(1)[0])
for r in results_flat:
r.set_shape(tensor_shape.TensorShape(n_static).concatenate(
r.get_shape()[1:]))
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return output_pack(results_flat)
def scan(fn, elems, initializer=None, parallel_iterations=10, back_prop=True,
swap_memory=False, infer_shape=True, name=None):
"""scan on the list of tensors unpacked from `elems` on dimension 0.
The simplest version of `scan` repeatedly applies the callable `fn` to a
sequence of elements from first to last. The elements are made of the tensors
unpacked from `elems` on dimension 0. The callable fn takes two tensors as
arguments. The first argument is the accumulated value computed from the
preceding invocation of fn. If `initializer` is None, `elems` must contain
at least one element, and its first element is used as the initializer.
Suppose that `elems` is unpacked into `values`, a list of tensors. The shape
of the result tensor is `[len(values)] + fn(initializer, values[0]).shape`.
This method also allows multi-arity `elems` and accumulator. If `elems`
is a (possibly nested) list or tuple of tensors, then each of these tensors
must have a matching first (unpack) dimension. The second argument of
`fn` must match the structure of `elems`.
If no `initializer` is provided, the output structure and dtypes of `fn`
are assumed to be the same as its input; and in this case, the first
argument of `fn` must match the structure of `elems`.
If an `initializer` is provided, then the output of `fn` must have the same
structure as `initializer`; and the first argument of `fn` must match
this structure.
For example, if `elems` is `(t1, [t2, t3])` and `initializer` is
`[i1, i2]` then an appropriate signature for `fn` in `python2` is:
`fn = lambda (acc_p1, acc_p2), (t1 [t2, t3]):` and `fn` must return a list,
`[acc_n1, acc_n2]`. An alternative correct signature for `fn`, and the
one that works in `python3`, is:
`fn = lambda a, t:`, where `a` and `t` correspond to the input tuples.
Args:
fn: The callable to be performed. It accepts two arguments. The first
will have the same (possibly nested) structure as `elems`. The second
will have the same structure as `initializer` if one is provided,
otherwise it will have the same structure as `elems`. Its output
must have the same structure as `initializer` if one is provided,
otherwise it must have the same structure as `elems`.
elems: A tensor or (possibly nested) sequence of tensors, each of which
will be unpacked along their first dimension. The nested sequence
of the resulting slices will be the first argument to `fn`.
initializer: (optional) A tensor or (possibly nested) sequence of tensors,
initial value for the accumulator, and the expected output type of `fn`.
parallel_iterations: (optional) The number of iterations allowed to run
in parallel.
back_prop: (optional) True enables support for back propagation.
swap_memory: (optional) True enables GPU-CPU memory swapping.
infer_shape: (optional) False disables tests for consistent output shapes.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor or (possibly nested) sequence of tensors. Each tensor packs the
results of applying `fn` to tensors unpacked from `elems` along the first
dimension, and the previous accumulator value(s), from first to last.
Raises:
TypeError: if `fn` is not callable or the structure of the output of
`fn` and `initializer` do not match.
ValueError: if the lengths of the output of `fn` and `initializer`
do not match.
Examples:
```python
elems = np.array([1, 2, 3, 4, 5, 6])
sum = scan(lambda a, x: a + x, elems)
# sum == [1, 3, 6, 10, 15, 21]
```
```python
elems = np.array([1, 2, 3, 4, 5, 6])
initializer = np.array(0)
sum_one = scan(
lambda a, x: x[0] - x[1] + a, (elems + 1, elems), initializer)
# sum_one == [1, 2, 3, 4, 5, 6]
```
```python
elems = np.array([1, 0, 0, 0, 0, 0])
initializer = (np.array(0), np.array(1))
fibonaccis = scan(lambda a, _: (a[1], a[0] + a[1]), elems, initializer)
# fibonaccis == ([1, 1, 2, 3, 5, 8], [1, 2, 3, 5, 8, 13])
```
"""
if not callable(fn):
raise TypeError("fn must be callable.")
input_is_sequence = nest.is_sequence(elems)
input_flatten = lambda x: nest.flatten(x) if input_is_sequence else [x]
def input_pack(x):
return nest.pack_sequence_as(elems, x) if input_is_sequence else x[0]
if initializer is None:
output_is_sequence = input_is_sequence
output_flatten = input_flatten
output_pack = input_pack
else:
output_is_sequence = nest.is_sequence(initializer)
output_flatten = lambda x: nest.flatten(x) if output_is_sequence else [x]
def output_pack(x):
return (nest.pack_sequence_as(initializer, x)
if output_is_sequence else x[0])
elems_flat = input_flatten(elems)
with ops.op_scope(elems_flat, name, "scan"):
# Any get_variable calls in fn will cache the first call locally
# and not issue repeated network I/O requests for each iteration.
varscope = vs.get_variable_scope()
varscope_caching_device_was_none = False
if varscope.caching_device is None:
# TODO(ebrevdo): Change to using colocate_with here and in other methods.
varscope.set_caching_device(lambda op: op.device)
varscope_caching_device_was_none = True
# Convert elems to tensor array.
elems_flat = [
ops.convert_to_tensor(elem, name="elem") for elem in elems_flat]
n = array_ops.shape(elems_flat[0])[0]
# TensorArrays are always flat
elems_ta = [
tensor_array_ops.TensorArray(dtype=elem.dtype, size=n,
dynamic_size=False,
infer_shape=True)
for elem in elems_flat]
# Unpack elements
elems_ta = [
elem_ta.unpack(elem) for elem_ta, elem in zip(elems_ta, elems_flat)]
if initializer is None:
a_flat = [elem.read(0) for elem in elems_ta]
i = constant_op.constant(1)
else:
initializer_flat = output_flatten(initializer)
a_flat = [ops.convert_to_tensor(init) for init in initializer_flat]
i = constant_op.constant(0)
# Create a tensor array to store the intermediate values.
accs_ta = [
tensor_array_ops.TensorArray(dtype=init.dtype, size=n,
dynamic_size=False,
infer_shape=infer_shape)
for init in a_flat]
if initializer is None:
accs_ta = [acc_ta.write(0, a) for (acc_ta, a) in zip(accs_ta, a_flat)]
def compute(i, a_flat, tas):
"""The loop body of scan.
Args:
i: the loop counter.
a_flat: the accumulator value(s), flattened.
tas: the output accumulator TensorArray(s), flattened.
Returns:
[i + 1, a_flat, tas]: the updated counter + new accumulator values +
updated TensorArrays
Raises:
TypeError: if initializer and fn() output structure do not match
ValueType: if initializer and fn() output lengths do not match
"""
packed_elems = input_pack([elem_ta.read(i) for elem_ta in elems_ta])
packed_a = output_pack(a_flat)
a_out = fn(packed_a, packed_elems)
nest.assert_same_structure(
elems if initializer is None else initializer, a_out)
flat_a_out = output_flatten(a_out)
tas = [ta.write(i, value) for (ta, value) in zip(tas, flat_a_out)]
return (i + 1, flat_a_out, tas)
_, _, r_a = control_flow_ops.while_loop(
lambda i, _1, _2: i < n, compute, (i, a_flat, accs_ta),
parallel_iterations=parallel_iterations,
back_prop=back_prop, swap_memory=swap_memory)
results_flat = [r.pack() for r in r_a]
n_static = elems_flat[0].get_shape().with_rank_at_least(1)[0]
for elem in elems_flat[1:]:
n_static.merge_with(elem.get_shape().with_rank_at_least(1)[0])
for r in results_flat:
r.set_shape(tensor_shape.TensorShape(n_static).concatenate(
r.get_shape()[1:]))
if varscope_caching_device_was_none:
varscope.set_caching_device(None)
return output_pack(results_flat)
@ops.RegisterShape("SymbolicGradient")
def _symbolic_gradient_shape(op):
# Say, (u, v) = f(x, y, z), _symbolic_gradient(f) is a function of
# (x, y, z, du, dv) -> (dx, dy, dz). Therefore, shapes of its
# outputs (dx, dy, dz) are the same as (x, y, z).
return [op.inputs[i].get_shape() for i in range(len(op.outputs))]
| Lab603/PicEncyclopedias | jni-build/jni/include/tensorflow/python/ops/functional_ops.py | Python | mit | 23,180 |
from PyQt4 import QtCore, QtGui
import acq4.Manager
import acq4.pyqtgraph as pg
import acq4.pyqtgraph.opengl as gl
import numpy as np
import acq4.util.functions as fn
import re
man = acq4.Manager.getManager()
## update DB field to reflect dir meta info
#for i in db.select('Cell', ['rowid']):
#d = db.getDir('Cell', i[0])
#typ = d.info().get('type', '')
#db.update('Cell', {'type': typ}, rowid=i[0])
#print d, typ
global eventView, siteView, cells
eventView = 'events_view'
siteView = 'sites_view'
firstRun = False
if 'events' not in locals():
global events
events = {}
firstRun = True
win = QtGui.QMainWindow()
#cw = QtGui.QWidget()
layout = pg.LayoutWidget()
#layout = QtGui.QGridLayout()
#layout.setContentsMargins(0,0,0,0)
#layout.setSpacing(0)
#cw.setLayout(layout)
win.setCentralWidget(layout)
cellCombo = QtGui.QComboBox()
cellCombo.setSizeAdjustPolicy(cellCombo.AdjustToContents)
layout.addWidget(cellCombo)
reloadBtn = QtGui.QPushButton('reload')
layout.addWidget(reloadBtn)
separateCheck = QtGui.QCheckBox("color pre/post")
layout.addWidget(separateCheck)
colorCheck = QtGui.QCheckBox("color y position")
layout.addWidget(colorCheck)
errLimitSpin = pg.SpinBox(value=0.7, step=0.1)
layout.addWidget(errLimitSpin)
lengthRatioLimitSpin = pg.SpinBox(value=1.5, step=0.1)
layout.addWidget(lengthRatioLimitSpin)
postRgnStartSpin = pg.SpinBox(value=0.500, step=0.01, siPrefix=True, suffix='s')
layout.addWidget(postRgnStartSpin)
postRgnStopSpin = pg.SpinBox(value=0.700, step=0.01, siPrefix=True, suffix='s')
layout.addWidget(postRgnStopSpin)
spl1 = QtGui.QSplitter()
spl1.setOrientation(QtCore.Qt.Vertical)
layout.addWidget(spl1, row=1, col=0, rowspan=1, colspan=8)
pw1 = pg.PlotWidget()
spl1.addWidget(pw1)
pw1.setLabel('left', 'Amplitude', 'A')
pw1.setLabel('bottom', 'Decay Tau', 's')
spl2 = QtGui.QSplitter()
spl2.setOrientation(QtCore.Qt.Horizontal)
spl1.addWidget(spl2)
pw2 = pg.PlotWidget(labels={'bottom': ('time', 's')})
spl2.addWidget(pw2)
tab = QtGui.QTabWidget()
spl2.addWidget(tab)
## For viewing cell morphology
gv = pg.GraphicsView()
gv.setBackgroundBrush(pg.mkBrush('w'))
image = pg.ImageItem()
gv.addItem(image)
gv.enableMouse()
gv.setAspectLocked(True)
tab.addTab(gv, 'Morphology')
## 3D atlas
import acq4.analysis.atlas.CochlearNucleus as CN
atlas = CN.CNAtlasDisplayWidget()
atlas.showLabel('DCN')
atlas.showLabel('AVCN')
atlas.showLabel('PVCN')
tab.addTab(atlas, 'Atlas')
atlasPoints = gl.GLScatterPlotItem()
atlas.addItem(atlasPoints)
win.show()
win.resize(1000,800)
sp1 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(200,200,255,70), identical=True, size=8)
sp2 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(255,200,200,70), identical=True, size=8)
sp3 = pw1.scatterPlot([], pen=pg.mkPen(None), brush=(100,255,100,70), identical=True, size=8)
sp4 = pw1.scatterPlot([], pen=pg.mkPen(None), size=8)
print "Reading cell list..."
#import os, pickle
#md = os.path.abspath(os.path.split(__file__)[0])
#cacheFile = os.path.join(md, 'eventCache.p')
#if os.path.isfile(cacheFile):
#print "Read from cache..."
#ev = pickle.load(open(cacheFile, 'r'))
#else:
#pickle.dump(ev, open(cacheFile, 'w'))
## create views that link cell information to events/sites
db = man.getModule('Data Manager').currentDatabase()
if not db.hasTable(siteView):
print "Creating DB views."
db.createView(siteView, ['photostim_sites', 'DirTable_Protocol', 'DirTable_Cell']) ## seems to be unused.
if not db.hasTable(eventView):
db.createView(eventView, ['photostim_events', 'DirTable_Protocol', 'DirTable_Cell'])
cells = db.select(siteView, ['CellDir'], distinct=True)
cells = [c['CellDir'] for c in cells]
cells.sort(lambda a,b: cmp(a.name(), b.name()))
cellCombo.addItem('')
for c in cells:
cellCombo.addItem(c.name(relativeTo=man.baseDir))
#cellSpin.setMaximum(len(cells)-1)
print "Done."
def loadCell(cell, reloadData=False):
global events
if reloadData:
events.pop(cell, None)
if cell in events:
return
db = man.getModule('Data Manager').currentDatabase()
mod = man.dataModel
allEvents = []
hvals = {}
nEv = 0
positionCache = {}
tcache = {}
print "Loading all events for cell", cell
tot = db.select(eventView, 'count()', where={'CellDir': cell})[0]['count()']
print tot, "total events.."
with pg.ProgressDialog('Loading event data...', maximum=tot, wait=0) as dlg:
for ev in db.iterSelect(eventView, ['ProtocolSequenceDir', 'SourceFile', 'fitAmplitude', 'fitTime', 'fitDecayTau', 'fitRiseTau', 'fitTimeToPeak', 'fitLengthOverDecay', 'fitFractionalError', 'userTransform', 'CellType', 'CellDir', 'ProtocolDir'], where={'CellDir': cell}, toArray=True, chunkSize=200):
extra = np.empty(ev.shape, dtype=[('right', float), ('anterior', float), ('dorsal', float), ('holding', float)])
## insert holding levels
for i in range(len(ev)):
sd = ev[i]['ProtocolSequenceDir']
if sd not in hvals:
cf = ev[i]['SourceFile']
hvals[sd] = mod.getClampHoldingLevel(cf)
#print hvals[sd], cf
extra[i]['holding'] = hvals[sd]
## insert positions
for i in range(len(ev)):
protoDir = ev[i]['SourceFile'].parent()
key = protoDir
#key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile'])
if key not in positionCache:
#try:
#dh = ev[i]['ProtocolDir']
#p1 = pg.Point(dh.info()['Scanner']['position'])
#if key[0] not in tcache:
#tr = pg.SRTTransform()
#tr.restoreState(dh.parent().info()['userTransform'])
#tcache[key[0]] = tr
#trans = tcache[key[0]]
#p2 = trans.map(p1)
#pcache[key] = (p2.x(),p2.y())
#except:
#print key
#raise
rec = db.select('CochlearNucleus_Protocol', where={'ProtocolDir': protoDir})
if len(rec) == 0:
pos = (None, None, None)
elif len(rec) == 1:
pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal'])
elif len(rec) == 2:
raise Exception("Multiple position records for %s!" % str(protoDir))
positionCache[key] = pos
extra[i]['right'] = positionCache[key][0]
extra[i]['anterior'] = positionCache[key][1]
extra[i]['dorsal'] = positionCache[key][2]
ev = fn.concatenateColumns([ev, extra])
allEvents.append(ev)
nEv += len(ev)
dlg.setValue(nEv)
if dlg.wasCanceled():
raise Exception('Canceled by user.')
ev = np.concatenate(allEvents)
numExSites = 0
numInSites = 0
for site in db.select(siteView, 'ProtocolSequenceDir', where={'CellDir': cell}):
h = hvals.get(site['ProtocolSequenceDir'],None)
if h is None:
continue
if h > -0.02:
numInSites += 1
elif h < -0.04:
numExSites += 1
events[cell] = (ev, numExSites, numInSites)
def init():
if not firstRun:
return
cellCombo.currentIndexChanged.connect(showCell)
separateCheck.toggled.connect(showCell)
colorCheck.toggled.connect(showCell)
errLimitSpin.valueChanged.connect(showCell)
lengthRatioLimitSpin.valueChanged.connect(showCell)
reloadBtn.clicked.connect(reloadCell)
for s in [sp1, sp2, sp3, sp4]:
s.sigPointsClicked.connect(plotClicked)
def plotClicked(plt, pts):
pt = pts[0]
#(id, fn, time) = pt.data
#[['SourceFile', 'ProtocolSequenceDir', 'fitTime']]
#fh = db.getDir('ProtocolSequence', id)[fn]
fh = pt.data()['SourceFile']
id = pt.data()['ProtocolSequenceDir']
time = pt.data()['fitTime']
data = fh.read()['Channel':'primary']
data = fn.besselFilter(data, 8e3)
p = pw2.plot(data, clear=True)
pos = time / data.xvals('Time')[-1]
arrow = pg.CurveArrow(p, pos=pos)
xr = pw2.viewRect().left(), pw2.viewRect().right()
if time < xr[0] or time > xr[1]:
w = xr[1]-xr[0]
pw2.setXRange(time-w/5., time+4*w/5., padding=0)
fitLen = pt.data()['fitDecayTau']*pt.data()['fitLengthOverDecay']
x = np.linspace(time, time+fitLen, fitLen * 50e3)
v = [pt.data()['fitAmplitude'], pt.data()['fitTime'], pt.data()['fitRiseTau'], pt.data()['fitDecayTau']]
y = fn.pspFunc(v, x, risePower=2.0) + data[np.argwhere(data.xvals('Time')>time)[0]-1]
pw2.plot(x, y, pen='b')
#plot.addItem(arrow)
def select(ev, ex=True):
#if source is not None:
#ev = ev[ev['CellDir']==source]
if ex:
ev = ev[ev['holding'] < -0.04] # excitatory events
ev = ev[(ev['fitAmplitude'] < 0) * (ev['fitAmplitude'] > -2e-10)]
else:
ev = ev[(ev['holding'] >= -0.02) * (ev['holding'] <= 0.01)] ## inhibitory events
ev = ev[(ev['fitAmplitude'] > 0) * (ev['fitAmplitude'] < 2e-10)]
ev = ev[(0 < ev['fitDecayTau']) * (ev['fitDecayTau'] < 0.2)] # select decay region
ev = ev[ev['fitFractionalError'] < errLimitSpin.value()]
ev = ev[ev['fitLengthOverDecay'] > lengthRatioLimitSpin.value()]
return ev
def reloadCell():
showCell(reloadData=True)
def showCell(**kwds):
pw2.clear()
reloadData = kwds.get('reloadData', False)
#global lock
#if lock:
#return
#lock = True
QtGui.QApplication.processEvents() ## prevents double-spin
#lock = False
cell = cells[cellCombo.currentIndex()-1]
dh = cell #db.getDir('Cell', cell)
loadCell(dh, reloadData=reloadData)
try:
image.setImage(dh['morphology.png'].read())
gv.setRange(image.sceneBoundingRect())
except:
image.setImage(np.zeros((2,2)))
pass
ev, numExSites, numInSites = events[cell]
ev2 = select(ev, ex=True)
ev3 = select(ev, ex=False)
if colorCheck.isChecked():
sp1.hide()
sp2.hide()
sp3.hide()
sp4.show()
start = postRgnStart()
stop = postRgnStop()
ev2post = ev2[(ev2['fitTime']>start) * (ev2['fitTime']<stop)]
ev3post = ev3[(ev3['fitTime']>start) * (ev3['fitTime']<stop)]
ev4 = np.concatenate([ev2post, ev3post])
yMax = ev4['dorsal'].max()
yMin = ev4['dorsal'].min()
brushes = []
for i in range(len(ev4)):
hue = 0.6*((ev4[i]['dorsal']-yMin) / (yMax-yMin))
brushes.append(pg.hsvColor(hue, 1.0, 1.0, 0.3))
#pts.append({
#'pos': (ev4[i]['fitDecayTau'], ev4[i]['fitAmplitude']),
#'brush': pg.hsvColor(hue, 1, 1, 0.3),
#'data': ev4[i]
#})
sp4.setData(x=ev4['fitDecayTau'], y=ev4['fitAmplitude'], symbolBrush=brushes, data=ev4)
else:
sp1.show()
sp2.show()
#sp3.show()
sp4.hide()
## excitatory
if separateCheck.isChecked():
pre = ev2[ev2['fitTime']< preRgnStop()]
post = ev2[(ev2['fitTime'] > postRgnStart()) * (ev2['fitTime'] < postRgnStop())]
else:
pre = ev2
sp1.setData(x=pre['fitDecayTau'], y=pre['fitAmplitude'], data=pre);
#print "Cell ", cell
#print " excitatory:", np.median(ev2['fitDecayTau']), np.median(ev2['fitAmplitude'])
## inhibitory
if separateCheck.isChecked():
pre = ev3[ev3['fitTime']< preRgnStop()]
post2 = ev3[(ev3['fitTime'] > postRgnStart()) * (ev3['fitTime'] < postRgnStop())]
post = np.concatenate([post, post2])
else:
pre = ev3
sp2.setData(x=pre['fitDecayTau'], y=pre['fitAmplitude'], data=pre);
#print " inhibitory:", np.median(ev2['fitDecayTau']), np.median(ev2['fitAmplitude'])
if separateCheck.isChecked():
sp3.setData(x=post['fitDecayTau'], y=post['fitAmplitude'], data=post)
sp3.show()
else:
sp3.hide()
try:
typ = ev2[0]['CellType']
except:
typ = ev3[0]['CellType']
sr = spontRate(ev2, numExSites)
sri = spontRate(ev3, numInSites)
title = "%s -- %s --- <span style='color: #99F;'>ex:</span> %s %s %s %0.1fHz --- <span style='color: #F99;'>in:</span> %s %s %s %0.1fHz" % (
dh.name(relativeTo=dh.parent().parent().parent()),
typ,
pg.siFormat(np.median(ev2['fitTimeToPeak']), error=np.std(ev2['fitTimeToPeak']), space=False, suffix='s'),
pg.siFormat(np.median(ev2['fitDecayTau']), error=np.std(ev2['fitDecayTau']), space=False, suffix='s'),
pg.siFormat(np.median(ev2['fitAmplitude']), error=np.std(ev2['fitAmplitude']), space=False, suffix='A'),
sr,
pg.siFormat(np.median(ev3['fitTimeToPeak']), error=np.std(ev3['fitTimeToPeak']), space=False, suffix='s'),
pg.siFormat(np.median(ev3['fitDecayTau']), error=np.std(ev3['fitDecayTau']), space=False, suffix='s'),
pg.siFormat(np.median(ev3['fitAmplitude']), error=np.std(ev3['fitAmplitude']), space=False, suffix='A'),
sri)
print re.sub(r'<[^>]+>', '', title)
pw1.setTitle(title)
### show cell in atlas
#rec = db.select('CochlearNucleus_Cell', where={'CellDir': cell})
#pts = []
#if len(rec) > 0:
#pos = (rec[0]['right'], rec[0]['anterior'], rec[0]['dorsal'])
#pts = [{'pos': pos, 'size': 100e-6, 'color': (0.7, 0.7, 1.0, 1.0)}]
### show event positions
evSpots = {}
for rec in ev:
p = (rec['right'], rec['anterior'], rec['dorsal'])
evSpots[p] = None
pos = np.array(evSpots.keys())
atlasPoints.setData(pos=pos, )
def spontRate(ev, n):
## This is broken. It does not take into account recordings that had no events.
ev = ev[ev['fitTime'] < preRgnStop()]
#count = {}
#dirs = set()
#for i in range(len(ev)):
#key = (ev[i]['ProtocolSequenceDir'], ev[i]['SourceFile'])
#dirs.add(set)
#if key not in count:
#count[key] = 0
#count[key] += 1
#sr = np.mean([v/(preRgnStop()) for v in count.itervalues()])
if n == 0:
return 0
return len(ev) / (preRgnStop() * n)
def preRgnStop():
return postRgnStartSpin.value() - 0.002
def postRgnStart():
return postRgnStartSpin.value() + 0.002
def postRgnStop():
return postRgnStopSpin.value()
init() | hiuwo/acq4 | acq4/analysis/scripts/eventExplorer.py | Python | mit | 15,462 |
from settings.common import *
DATABASES = {
'default': {
"ENGINE": "django.db.backends.mysql",
"NAME": "mhfowler",
"USER": "root",
"PASSWORD": "",
"HOST": "localhost",
"PORT": ""
}
} | mhfowler/mhfowler | settings/local.py | Python | mit | 239 |
#!/usr/bin/env python3
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Lint format strings: This program checks that the number of arguments passed
# to a variadic format string function matches the number of format specifiers
# in the format string.
import argparse
import re
import sys
FALSE_POSITIVES = [
("src/batchedlogger.h", "strprintf(fmt, args...)"),
("src/dbwrapper.cpp", "vsnprintf(p, limit - p, format, backup_ap)"),
("src/index/base.cpp", "FatalError(const char* fmt, const Args&... args)"),
("src/netbase.cpp", "LogConnectFailure(bool manual_connection, const char* fmt, const Args&... args)"),
("src/qt/networkstyle.cpp", "strprintf(appName, gArgs.GetDevNetName())"),
("src/qt/networkstyle.cpp", "strprintf(titleAddText, gArgs.GetDevNetName())"),
("src/rpc/rpcevo.cpp", "strprintf(it->second, nParamNum)"),
("src/stacktraces.cpp", "strprintf(fmtStr, i, si.pc, lstr, fstr)"),
("src/statsd_client.cpp", "snprintf(d->errmsg, sizeof(d->errmsg), \"could not create socket, err=%m\")"),
("src/statsd_client.cpp", "snprintf(d->errmsg, sizeof(d->errmsg), \"sendto server fail, host=%s:%d, err=%m\", d->host.c_str(), d->port)"),
("src/util.cpp", "strprintf(_(COPYRIGHT_HOLDERS), _(COPYRIGHT_HOLDERS_SUBSTITUTION))"),
("src/util.cpp", "strprintf(COPYRIGHT_HOLDERS, COPYRIGHT_HOLDERS_SUBSTITUTION)"),
("src/wallet/wallet.h", "WalletLogPrintf(std::string fmt, Params... parameters)"),
("src/wallet/wallet.h", "LogPrintf((\"%s \" + fmt).c_str(), GetDisplayName(), parameters...)"),
]
def parse_function_calls(function_name, source_code):
"""Return an array with all calls to function function_name in string source_code.
Preprocessor directives and C++ style comments ("//") in source_code are removed.
>>> len(parse_function_calls("foo", "foo();bar();foo();bar();"))
2
>>> parse_function_calls("foo", "foo(1);bar(1);foo(2);bar(2);")[0].startswith("foo(1);")
True
>>> parse_function_calls("foo", "foo(1);bar(1);foo(2);bar(2);")[1].startswith("foo(2);")
True
>>> len(parse_function_calls("foo", "foo();bar();// foo();bar();"))
1
>>> len(parse_function_calls("foo", "#define FOO foo();"))
0
"""
assert(type(function_name) is str and type(source_code) is str and function_name)
lines = [re.sub("// .*", " ", line).strip()
for line in source_code.split("\n")
if not line.strip().startswith("#")]
return re.findall(r"[^a-zA-Z_](?=({}\(.*).*)".format(function_name), " " + " ".join(lines))
def normalize(s):
"""Return a normalized version of string s with newlines, tabs and C style comments ("/* ... */")
replaced with spaces. Multiple spaces are replaced with a single space.
>>> normalize(" /* nothing */ foo\tfoo /* bar */ foo ")
'foo foo foo'
"""
assert(type(s) is str)
s = s.replace("\n", " ")
s = s.replace("\t", " ")
s = re.sub("/\*.*?\*/", " ", s)
s = re.sub(" {2,}", " ", s)
return s.strip()
ESCAPE_MAP = {
r"\n": "[escaped-newline]",
r"\t": "[escaped-tab]",
r'\"': "[escaped-quote]",
}
def escape(s):
"""Return the escaped version of string s with "\\\"", "\\n" and "\\t" escaped as
"[escaped-backslash]", "[escaped-newline]" and "[escaped-tab]".
>>> unescape(escape("foo")) == "foo"
True
>>> escape(r'foo \\t foo \\n foo \\\\ foo \\ foo \\"bar\\"')
'foo [escaped-tab] foo [escaped-newline] foo \\\\\\\\ foo \\\\ foo [escaped-quote]bar[escaped-quote]'
"""
assert(type(s) is str)
for raw_value, escaped_value in ESCAPE_MAP.items():
s = s.replace(raw_value, escaped_value)
return s
def unescape(s):
"""Return the unescaped version of escaped string s.
Reverses the replacements made in function escape(s).
>>> unescape(escape("bar"))
'bar'
>>> unescape("foo [escaped-tab] foo [escaped-newline] foo \\\\\\\\ foo \\\\ foo [escaped-quote]bar[escaped-quote]")
'foo \\\\t foo \\\\n foo \\\\\\\\ foo \\\\ foo \\\\"bar\\\\"'
"""
assert(type(s) is str)
for raw_value, escaped_value in ESCAPE_MAP.items():
s = s.replace(escaped_value, raw_value)
return s
def parse_function_call_and_arguments(function_name, function_call):
"""Split string function_call into an array of strings consisting of:
* the string function_call followed by "("
* the function call argument #1
* ...
* the function call argument #n
* a trailing ");"
The strings returned are in escaped form. See escape(...).
>>> parse_function_call_and_arguments("foo", 'foo("%s", "foo");')
['foo(', '"%s",', ' "foo"', ')']
>>> parse_function_call_and_arguments("foo", 'foo("%s", "foo");')
['foo(', '"%s",', ' "foo"', ')']
>>> parse_function_call_and_arguments("foo", 'foo("%s %s", "foo", "bar");')
['foo(', '"%s %s",', ' "foo",', ' "bar"', ')']
>>> parse_function_call_and_arguments("fooprintf", 'fooprintf("%050d", i);')
['fooprintf(', '"%050d",', ' i', ')']
>>> parse_function_call_and_arguments("foo", 'foo(bar(foobar(barfoo("foo"))), foobar); barfoo')
['foo(', 'bar(foobar(barfoo("foo"))),', ' foobar', ')']
>>> parse_function_call_and_arguments("foo", "foo()")
['foo(', '', ')']
>>> parse_function_call_and_arguments("foo", "foo(123)")
['foo(', '123', ')']
>>> parse_function_call_and_arguments("foo", 'foo("foo")')
['foo(', '"foo"', ')']
"""
assert(type(function_name) is str and type(function_call) is str and function_name)
remaining = normalize(escape(function_call))
expected_function_call = "{}(".format(function_name)
assert(remaining.startswith(expected_function_call))
parts = [expected_function_call]
remaining = remaining[len(expected_function_call):]
open_parentheses = 1
in_string = False
parts.append("")
for char in remaining:
parts.append(parts.pop() + char)
if char == "\"":
in_string = not in_string
continue
if in_string:
continue
if char == "(":
open_parentheses += 1
continue
if char == ")":
open_parentheses -= 1
if open_parentheses > 1:
continue
if open_parentheses == 0:
parts.append(parts.pop()[:-1])
parts.append(char)
break
if char == ",":
parts.append("")
return parts
def parse_string_content(argument):
"""Return the text within quotes in string argument.
>>> parse_string_content('1 "foo %d bar" 2')
'foo %d bar'
>>> parse_string_content('1 foobar 2')
''
>>> parse_string_content('1 "bar" 2')
'bar'
>>> parse_string_content('1 "foo" 2 "bar" 3')
'foobar'
>>> parse_string_content('1 "foo" 2 " " "bar" 3')
'foo bar'
>>> parse_string_content('""')
''
>>> parse_string_content('')
''
>>> parse_string_content('1 2 3')
''
"""
assert(type(argument) is str)
string_content = ""
in_string = False
for char in normalize(escape(argument)):
if char == "\"":
in_string = not in_string
elif in_string:
string_content += char
return string_content
def count_format_specifiers(format_string):
"""Return the number of format specifiers in string format_string.
>>> count_format_specifiers("foo bar foo")
0
>>> count_format_specifiers("foo %d bar foo")
1
>>> count_format_specifiers("foo %d bar %i foo")
2
>>> count_format_specifiers("foo %d bar %i foo %% foo")
2
>>> count_format_specifiers("foo %d bar %i foo %% foo %d foo")
3
>>> count_format_specifiers("foo %d bar %i foo %% foo %*d foo")
4
"""
assert(type(format_string) is str)
n = 0
in_specifier = False
for i, char in enumerate(format_string):
if format_string[i - 1:i + 1] == "%%" or format_string[i:i + 2] == "%%":
pass
elif char == "%":
in_specifier = True
n += 1
elif char in "aAcdeEfFgGinopsuxX":
in_specifier = False
elif in_specifier and char == "*":
n += 1
return n
def main():
parser = argparse.ArgumentParser(description="This program checks that the number of arguments passed "
"to a variadic format string function matches the number of format "
"specifiers in the format string.")
parser.add_argument("--skip-arguments", type=int, help="number of arguments before the format string "
"argument (e.g. 1 in the case of fprintf)", default=0)
parser.add_argument("function_name", help="function name (e.g. fprintf)", default=None)
parser.add_argument("file", nargs="*", help="C++ source code file (e.g. foo.cpp)")
args = parser.parse_args()
exit_code = 0
for filename in args.file:
with open(filename, "r", encoding="utf-8") as f:
for function_call_str in parse_function_calls(args.function_name, f.read()):
parts = parse_function_call_and_arguments(args.function_name, function_call_str)
relevant_function_call_str = unescape("".join(parts))[:512]
if (f.name, relevant_function_call_str) in FALSE_POSITIVES:
continue
if len(parts) < 3 + args.skip_arguments:
exit_code = 1
print("{}: Could not parse function call string \"{}(...)\": {}".format(f.name, args.function_name, relevant_function_call_str))
continue
argument_count = len(parts) - 3 - args.skip_arguments
format_str = parse_string_content(parts[1 + args.skip_arguments])
format_specifier_count = count_format_specifiers(format_str)
if format_specifier_count != argument_count:
exit_code = 1
print("{}: Expected {} argument(s) after format string but found {} argument(s): {}".format(f.name, format_specifier_count, argument_count, relevant_function_call_str))
continue
sys.exit(exit_code)
if __name__ == "__main__":
main()
| dashpay/dash | test/lint/lint-format-strings.py | Python | mit | 10,365 |
from abc import ABCMeta, abstractmethod
class ConfigParser:
"""Configuration file parser ABC"""
__metaclass__ = ABCMeta
"""
Config parser interface
All parsers for configuaration will
need to comply with this interface
so lmdo can understand it
"""
@abstractmethod
def get(self, *args, **kwargs):
"""Get value from config file"""
pass
@abstractmethod
def validate(self, *args, **kwargs):
"""Validate config file"""
pass
| liangrog/lmdo | lmdo/config_parser.py | Python | mit | 506 |
import sys, os
if os.path.isfile("/usr/lib/enigma2/python/enigma.zip"):
sys.path.append("/usr/lib/enigma2/python/enigma.zip")
from Tools.Profile import profile, profile_final
profile("PYTHON_START")
import Tools.RedirectOutput
import enigma
import eConsoleImpl
import eBaseImpl
from boxbranding import getBoxType
enigma.eTimer = eBaseImpl.eTimer
enigma.eSocketNotifier = eBaseImpl.eSocketNotifier
enigma.eConsoleAppContainer = eConsoleImpl.eConsoleAppContainer
from traceback import print_exc
profile("SimpleSummary")
from Screens import InfoBar
from Screens.SimpleSummary import SimpleSummary
from sys import stdout, exc_info
profile("Bouquets")
from Components.config import config, configfile, ConfigText, ConfigYesNo, ConfigInteger, NoSave
config.misc.load_unlinked_userbouquets = ConfigYesNo(default=True)
def setLoadUnlinkedUserbouquets(configElement):
enigma.eDVBDB.getInstance().setLoadUnlinkedUserbouquets(configElement.value)
config.misc.load_unlinked_userbouquets.addNotifier(setLoadUnlinkedUserbouquets)
enigma.eDVBDB.getInstance().reloadBouquets()
profile("ParentalControl")
import Components.ParentalControl
Components.ParentalControl.InitParentalControl()
profile("LOAD:Navigation")
from Navigation import Navigation
profile("LOAD:skin")
from skin import readSkin
profile("LOAD:Tools")
from Tools.Directories import InitFallbackFiles, resolveFilename, SCOPE_PLUGINS, SCOPE_CURRENT_SKIN
InitFallbackFiles()
profile("config.misc")
config.misc.radiopic = ConfigText(default = resolveFilename(SCOPE_CURRENT_SKIN, "radio.mvi"))
config.misc.blackradiopic = ConfigText(default = resolveFilename(SCOPE_CURRENT_SKIN, "black.mvi"))
config.misc.useTransponderTime = ConfigYesNo(default=True)
config.misc.startCounter = ConfigInteger(default=0) # number of e2 starts...
config.misc.standbyCounter = NoSave(ConfigInteger(default=0)) # number of standby
config.misc.DeepStandby = NoSave(ConfigYesNo(default=False)) # detect deepstandby
config.misc.RestartUI = ConfigYesNo(default=False) # detect user interface restart
config.misc.epgcache_filename = ConfigText(default = "/hdd/epg.dat")
def setEPGCachePath(configElement):
enigma.eEPGCache.getInstance().setCacheFile(configElement.value)
#demo code for use of standby enter leave callbacks
#def leaveStandby():
# print "!!!!!!!!!!!!!!!!!leave standby"
#def standbyCountChanged(configElement):
# print "!!!!!!!!!!!!!!!!!enter standby num", configElement.value
# from Screens.Standby import inStandby
# inStandby.onClose.append(leaveStandby)
#config.misc.standbyCounter.addNotifier(standbyCountChanged, initial_call = False)
####################################################
def useTransponderTimeChanged(configElement):
enigma.eDVBLocalTimeHandler.getInstance().setUseDVBTime(configElement.value)
config.misc.useTransponderTime.addNotifier(useTransponderTimeChanged)
profile("Twisted")
try:
import twisted.python.runtime
import e2reactor
e2reactor.install()
from twisted.internet import reactor
def runReactor():
reactor.run(installSignalHandlers=False)
except ImportError:
print "twisted not available"
def runReactor():
enigma.runMainloop()
profile("LOAD:Plugin")
# initialize autorun plugins and plugin menu entries
from Components.PluginComponent import plugins
profile("LOAD:Wizard")
from Screens.Wizard import wizardManager
from Screens.StartWizard import *
import Screens.Rc
from Tools.BoundFunction import boundFunction
from Plugins.Plugin import PluginDescriptor
profile("misc")
had = dict()
def dump(dir, p = ""):
if isinstance(dir, dict):
for (entry, val) in dir.items():
dump(val, p + "(dict)/" + entry)
if hasattr(dir, "__dict__"):
for name, value in dir.__dict__.items():
if not had.has_key(str(value)):
had[str(value)] = 1
dump(value, p + "/" + str(name))
else:
print p + "/" + str(name) + ":" + str(dir.__class__) + "(cycle)"
else:
print p + ":" + str(dir)
# + ":" + str(dir.__class__)
# display
profile("LOAD:ScreenGlobals")
from Screens.Globals import Globals
from Screens.SessionGlobals import SessionGlobals
from Screens.Screen import Screen
profile("Screen")
Screen.global_screen = Globals()
# Session.open:
# * push current active dialog ('current_dialog') onto stack
# * call execEnd for this dialog
# * clear in_exec flag
# * hide screen
# * instantiate new dialog into 'current_dialog'
# * create screens, components
# * read, apply skin
# * create GUI for screen
# * call execBegin for new dialog
# * set in_exec
# * show gui screen
# * call components' / screen's onExecBegin
# ... screen is active, until it calls 'close'...
# Session.close:
# * assert in_exec
# * save return value
# * start deferred close handler ('onClose')
# * execEnd
# * clear in_exec
# * hide screen
# .. a moment later:
# Session.doClose:
# * destroy screen
class Session:
def __init__(self, desktop = None, summary_desktop = None, navigation = None):
self.desktop = desktop
self.summary_desktop = summary_desktop
self.nav = navigation
self.delay_timer = enigma.eTimer()
self.delay_timer.callback.append(self.processDelay)
self.current_dialog = None
self.dialog_stack = [ ]
self.summary_stack = [ ]
self.summary = None
self.in_exec = False
self.screen = SessionGlobals(self)
for p in plugins.getPlugins(PluginDescriptor.WHERE_SESSIONSTART):
try:
p(reason=0, session=self)
except:
print "Plugin raised exception at WHERE_SESSIONSTART"
import traceback
traceback.print_exc()
def processDelay(self):
callback = self.current_dialog.callback
retval = self.current_dialog.returnValue
if self.current_dialog.isTmp:
self.current_dialog.doClose()
# dump(self.current_dialog)
del self.current_dialog
else:
del self.current_dialog.callback
self.popCurrent()
if callback is not None:
callback(*retval)
def execBegin(self, first=True, do_show = True):
assert not self.in_exec
self.in_exec = True
c = self.current_dialog
# when this is an execbegin after a execend of a "higher" dialog,
# popSummary already did the right thing.
if first:
self.instantiateSummaryDialog(c)
c.saveKeyboardMode()
c.execBegin()
# when execBegin opened a new dialog, don't bother showing the old one.
if c == self.current_dialog and do_show:
c.show()
def execEnd(self, last=True):
assert self.in_exec
self.in_exec = False
self.current_dialog.execEnd()
self.current_dialog.restoreKeyboardMode()
self.current_dialog.hide()
if last:
self.current_dialog.removeSummary(self.summary)
self.popSummary()
def create(self, screen, arguments, **kwargs):
# creates an instance of 'screen' (which is a class)
try:
return screen(self, *arguments, **kwargs)
except:
errstr = "Screen %s(%s, %s): %s" % (str(screen), str(arguments), str(kwargs), exc_info()[0])
print errstr
print_exc(file=stdout)
enigma.quitMainloop(5)
def instantiateDialog(self, screen, *arguments, **kwargs):
return self.doInstantiateDialog(screen, arguments, kwargs, self.desktop)
def deleteDialog(self, screen):
screen.hide()
screen.doClose()
def instantiateSummaryDialog(self, screen, **kwargs):
self.pushSummary()
summary = screen.createSummary() or SimpleSummary
arguments = (screen,)
self.summary = self.doInstantiateDialog(summary, arguments, kwargs, self.summary_desktop)
self.summary.show()
screen.addSummary(self.summary)
def doInstantiateDialog(self, screen, arguments, kwargs, desktop):
# create dialog
try:
dlg = self.create(screen, arguments, **kwargs)
except:
print 'EXCEPTION IN DIALOG INIT CODE, ABORTING:'
print '-'*60
print_exc(file=stdout)
enigma.quitMainloop(5)
print '-'*60
if dlg is None:
return
# read skin data
readSkin(dlg, None, dlg.skinName, desktop)
# create GUI view of this dialog
assert desktop is not None
dlg.setDesktop(desktop)
dlg.applySkin()
return dlg
def pushCurrent(self):
if self.current_dialog is not None:
self.dialog_stack.append((self.current_dialog, self.current_dialog.shown))
self.execEnd(last=False)
def popCurrent(self):
if self.dialog_stack:
(self.current_dialog, do_show) = self.dialog_stack.pop()
self.execBegin(first=False, do_show=do_show)
else:
self.current_dialog = None
def execDialog(self, dialog):
self.pushCurrent()
self.current_dialog = dialog
self.current_dialog.isTmp = False
self.current_dialog.callback = None # would cause re-entrancy problems.
self.execBegin()
def openWithCallback(self, callback, screen, *arguments, **kwargs):
dlg = self.open(screen, *arguments, **kwargs)
dlg.callback = callback
return dlg
def open(self, screen, *arguments, **kwargs):
if self.dialog_stack and not self.in_exec:
raise RuntimeError("modal open are allowed only from a screen which is modal!")
# ...unless it's the very first screen.
self.pushCurrent()
dlg = self.current_dialog = self.instantiateDialog(screen, *arguments, **kwargs)
dlg.isTmp = True
dlg.callback = None
self.execBegin()
return dlg
def close(self, screen, *retval):
if not self.in_exec:
print "close after exec!"
return
# be sure that the close is for the right dialog!
# if it's not, you probably closed after another dialog
# was opened. this can happen if you open a dialog
# onExecBegin, and forget to do this only once.
# after close of the top dialog, the underlying will
# gain focus again (for a short time), thus triggering
# the onExec, which opens the dialog again, closing the loop.
assert screen == self.current_dialog
self.current_dialog.returnValue = retval
self.delay_timer.start(0, 1)
self.execEnd()
def pushSummary(self):
if self.summary is not None:
self.summary.hide()
self.summary_stack.append(self.summary)
self.summary = None
def popSummary(self):
if self.summary is not None:
self.summary.doClose()
self.summary = self.summary_stack.pop()
if self.summary is not None:
self.summary.show()
profile("Standby,PowerKey")
import Screens.Standby
from Screens.Menu import MainMenu, mdom
from GlobalActions import globalActionMap
class PowerKey:
""" PowerKey stuff - handles the powerkey press and powerkey release actions"""
def __init__(self, session):
self.session = session
globalActionMap.actions["power_down"]=self.powerdown
globalActionMap.actions["power_up"]=self.powerup
globalActionMap.actions["power_long"]=self.powerlong
globalActionMap.actions["deepstandby"]=self.shutdown # frontpanel long power button press
globalActionMap.actions["discrete_off"]=self.standby
self.standbyblocked = 1
def MenuClosed(self, *val):
self.session.infobar = None
def shutdown(self):
print "PowerOff - Now!"
if not Screens.Standby.inTryQuitMainloop and self.session.current_dialog and self.session.current_dialog.ALLOW_SUSPEND:
self.session.open(Screens.Standby.TryQuitMainloop, 1)
def powerlong(self):
if Screens.Standby.inTryQuitMainloop or (self.session.current_dialog and not self.session.current_dialog.ALLOW_SUSPEND):
return
self.doAction(action = config.usage.on_long_powerpress.value)
def doAction(self, action):
self.standbyblocked = 1
if action == "shutdown":
self.shutdown()
elif action == "show_menu":
print "Show shutdown Menu"
root = mdom.getroot()
for x in root.findall("menu"):
y = x.find("id")
if y is not None:
id = y.get("val")
if id and id == "shutdown":
self.session.infobar = self
menu_screen = self.session.openWithCallback(self.MenuClosed, MainMenu, x)
menu_screen.setTitle(_("Standby / restart"))
return
elif action == "standby":
self.standby()
def powerdown(self):
self.standbyblocked = 0
def powerup(self):
if self.standbyblocked == 0:
self.doAction(action = config.usage.on_short_powerpress.value)
def standby(self):
if not Screens.Standby.inStandby and self.session.current_dialog and self.session.current_dialog.ALLOW_SUSPEND and self.session.in_exec:
self.session.open(Screens.Standby.Standby)
profile("Scart")
from Screens.Scart import Scart
class AutoScartControl:
def __init__(self, session):
self.force = False
self.current_vcr_sb = enigma.eAVSwitch.getInstance().getVCRSlowBlanking()
if self.current_vcr_sb and config.av.vcrswitch.value:
self.scartDialog = session.instantiateDialog(Scart, True)
else:
self.scartDialog = session.instantiateDialog(Scart, False)
config.av.vcrswitch.addNotifier(self.recheckVCRSb)
enigma.eAVSwitch.getInstance().vcr_sb_notifier.get().append(self.VCRSbChanged)
def recheckVCRSb(self, configElement):
self.VCRSbChanged(self.current_vcr_sb)
def VCRSbChanged(self, value):
#print "vcr sb changed to", value
self.current_vcr_sb = value
if config.av.vcrswitch.value or value > 2:
if value:
self.scartDialog.showMessageBox()
else:
self.scartDialog.switchToTV()
profile("Load:CI")
from enigma import eDVBCIInterfaces
from Screens.Ci import CiHandler
profile("Load:VolumeControl")
from Components.VolumeControl import VolumeControl
def runScreenTest():
config.misc.startCounter.value += 1
profile("readPluginList")
plugins.readPluginList(resolveFilename(SCOPE_PLUGINS))
profile("Init:Session")
nav = Navigation()
session = Session(desktop = enigma.getDesktop(0), summary_desktop = enigma.getDesktop(1), navigation = nav)
CiHandler.setSession(session)
screensToRun = [ p.__call__ for p in plugins.getPlugins(PluginDescriptor.WHERE_WIZARD) ]
profile("wizards")
screensToRun += wizardManager.getWizards()
screensToRun.append((100, InfoBar.InfoBar))
screensToRun.sort()
enigma.ePythonConfigQuery.setQueryFunc(configfile.getResolvedKey)
# eDVBCIInterfaces.getInstance().setDescrambleRules(0 # Slot Number
# ,( ["1:0:1:24:4:85:C00000:0:0:0:"], #service_list
# ["PREMIERE"], #provider_list,
# [] #caid_list
# ));
def runNextScreen(session, screensToRun, *result):
if result:
enigma.quitMainloop(*result)
return
screen = screensToRun[0][1]
args = screensToRun[0][2:]
if screensToRun:
session.openWithCallback(boundFunction(runNextScreen, session, screensToRun[1:]), screen, *args)
else:
session.open(screen, *args)
config.misc.epgcache_filename.addNotifier(setEPGCachePath)
runNextScreen(session, screensToRun)
profile("Init:VolumeControl")
vol = VolumeControl(session)
profile("Init:PowerKey")
power = PowerKey(session)
# we need session.scart to access it from within menu.xml
session.scart = AutoScartControl(session)
profile("Init:Trashcan")
import Tools.Trashcan
Tools.Trashcan.init(session)
profile("RunReactor")
profile_final()
if getBoxType() == 'odinm7':
f = open("/dev/dbox/oled0", "w")
f.write('-E2-')
f.close()
runReactor()
config.misc.startCounter.save()
profile("wakeup")
from time import time, strftime, localtime
from Tools.StbHardware import setFPWakeuptime, getFPWakeuptime, setRTCtime
#get currentTime
nowTime = time()
wakeupList = [
x for x in ((session.nav.RecordTimer.getNextRecordingTime(), 0),
(session.nav.RecordTimer.getNextZapTime(isWakeup=True), 1),
(plugins.getNextWakeupTime(), 2))
if x[0] != -1
]
wakeupList.sort()
if wakeupList:
from time import strftime
startTime = wakeupList[0]
if (startTime[0] - nowTime) < 270: # no time to switch box back on
wptime = nowTime + 30 # so switch back on in 30 seconds
else:
wptime = startTime[0] - 240
if not config.misc.useTransponderTime.value:
print "dvb time sync disabled... so set RTC now to current linux time!", strftime("%Y/%m/%d %H:%M", localtime(nowTime))
setRTCtime(nowTime)
print "set wakeup time to", strftime("%Y/%m/%d %H:%M", localtime(wptime))
setFPWakeuptime(wptime)
profile("stopService")
session.nav.stopService()
profile("nav shutdown")
session.nav.shutdown()
profile("configfile.save")
configfile.save()
from Screens import InfoBarGenerics
InfoBarGenerics.saveResumePoints()
return 0
profile("Init:skin")
import skin
skin.loadSkinData(enigma.getDesktop(0))
profile("InputDevice")
import Components.InputDevice
Components.InputDevice.InitInputDevices()
import Components.InputHotplug
profile("SetupDevices")
import Components.SetupDevices
Components.SetupDevices.InitSetupDevices()
profile("AVSwitch")
import Components.AVSwitch
Components.AVSwitch.InitAVSwitch()
profile("RecordingConfig")
import Components.RecordingConfig
Components.RecordingConfig.InitRecordingConfig()
profile("UsageConfig")
import Components.UsageConfig
Components.UsageConfig.InitUsageConfig()
profile("keymapparser")
import keymapparser
keymapparser.readKeymap(config.usage.keymap.value)
profile("Network")
import Components.Network
Components.Network.InitNetwork()
profile("LCD")
import Components.Lcd
Components.Lcd.InitLcd()
profile("RFMod")
import Components.RFmod
Components.RFmod.InitRFmod()
profile("Init:CI")
import Screens.Ci
Screens.Ci.InitCiConfig()
profile("RcModel")
import Components.RcModel
#from enigma import dump_malloc_stats
#t = eTimer()
#t.callback.append(dump_malloc_stats)
#t.start(1000)
# first, setup a screen
try:
runScreenTest()
plugins.shutdown()
Components.ParentalControl.parentalControl.save()
except:
print 'EXCEPTION IN PYTHON STARTUP CODE:'
print '-'*60
print_exc(file=stdout)
enigma.quitMainloop(5)
print '-'*60
| popazerty/test | mytest.py | Python | gpl-2.0 | 17,270 |
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/svn/docs/wafbook/single.html#_obtaining_the_waf_file
import sys
if sys.hexversion < 0x020400f0: from sets import Set as set
import os,imp,sys,shlex,shutil
from waflib import Build,Utils,Configure,Task,Options,Logs,TaskGen,Errors,ConfigSet,Runner
from waflib.TaskGen import before_method,after_method,feature
from waflib.Configure import conf
WAF_CONFIG_H='config.h'
DEFKEYS='define_key'
INCKEYS='include_key'
cfg_ver={'atleast-version':'>=','exact-version':'==','max-version':'<=',}
SNIP_FUNCTION='''
int main() {
void *p;
p=(void*)(%s);
return 0;
}
'''
SNIP_TYPE='''
int main() {
if ((%(type_name)s *) 0) return 0;
if (sizeof (%(type_name)s)) return 0;
}
'''
SNIP_CLASS='''
int main() {
if (
}
'''
SNIP_EMPTY_PROGRAM='''
int main() {
return 0;
}
'''
SNIP_FIELD='''
int main() {
char *off;
off = (char*) &((%(type_name)s*)0)->%(field_name)s;
return (size_t) off < sizeof(%(type_name)s);
}
'''
MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'msys','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__POWERPC__':'powerpc','__QNX__':'qnx'}
MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc',}
def parse_flags(self,line,uselib,env=None):
assert(isinstance(line,str))
env=env or self.env
app=env.append_value
appu=env.append_unique
lex=shlex.shlex(line,posix=False)
lex.whitespace_split=True
lex.commenters=''
lst=list(lex)
while lst:
x=lst.pop(0)
st=x[:2]
ot=x[2:]
if st=='-I'or st=='/I':
if not ot:ot=lst.pop(0)
appu('INCLUDES_'+uselib,[ot])
elif st=='-include':
tmp=[x,lst.pop(0)]
app('CFLAGS',tmp)
app('CXXFLAGS',tmp)
elif st=='-D'or(self.env.CXX_NAME=='msvc'and st=='/D'):
if not ot:ot=lst.pop(0)
app('DEFINES_'+uselib,[ot])
elif st=='-l':
if not ot:ot=lst.pop(0)
appu('LIB_'+uselib,[ot])
elif st=='-L':
if not ot:ot=lst.pop(0)
appu('LIBPATH_'+uselib,[ot])
elif x=='-pthread'or x.startswith('+')or x.startswith('-std'):
app('CFLAGS_'+uselib,[x])
app('CXXFLAGS_'+uselib,[x])
app('LINKFLAGS_'+uselib,[x])
elif x=='-framework':
appu('FRAMEWORK_'+uselib,[lst.pop(0)])
elif x.startswith('-F'):
appu('FRAMEWORKPATH_'+uselib,[x[2:]])
elif x.startswith('-Wl'):
app('LINKFLAGS_'+uselib,[x])
elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'):
app('CFLAGS_'+uselib,[x])
app('CXXFLAGS_'+uselib,[x])
elif x.startswith('-arch')or x.startswith('-isysroot'):
tmp=[x,lst.pop(0)]
app('CFLAGS_'+uselib,tmp)
app('CXXFLAGS_'+uselib,tmp)
app('LINKFLAGS_'+uselib,tmp)
elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib'):
appu('LINKFLAGS_'+uselib,[x])
def ret_msg(self,f,kw):
if isinstance(f,str):
return f
return f(kw)
def validate_cfg(self,kw):
if not'path'in kw:
if not self.env.PKGCONFIG:
self.find_program('pkg-config',var='PKGCONFIG')
kw['path']=self.env.PKGCONFIG
if'atleast_pkgconfig_version'in kw:
if not'msg'in kw:
kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version']
return
if not'okmsg'in kw:
kw['okmsg']='yes'
if not'errmsg'in kw:
kw['errmsg']='not found'
if'modversion'in kw:
if not'msg'in kw:
kw['msg']='Checking for %r version'%kw['modversion']
return
for x in cfg_ver.keys():
y=x.replace('-','_')
if y in kw:
if not'package'in kw:
raise ValueError('%s requires a package'%x)
if not'msg'in kw:
kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y])
return
if not'msg'in kw:
kw['msg']='Checking for %r'%(kw['package']or kw['path'])
def exec_cfg(self,kw):
if'atleast_pkgconfig_version'in kw:
cmd=[kw['path'],'--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']]
self.cmd_and_log(cmd)
if not'okmsg'in kw:
kw['okmsg']='yes'
return
for x in cfg_ver:
y=x.replace('-','_')
if y in kw:
self.cmd_and_log([kw['path'],'--%s=%s'%(x,kw[y]),kw['package']])
if not'okmsg'in kw:
kw['okmsg']='yes'
self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
break
if'modversion'in kw:
version=self.cmd_and_log([kw['path'],'--modversion',kw['modversion']]).strip()
self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version)
return version
lst=[kw['path']]
defi=kw.get('define_variable',None)
if not defi:
defi=self.env.PKG_CONFIG_DEFINES or{}
for key,val in defi.items():
lst.append('--define-variable=%s=%s'%(key,val))
if kw['package']:
lst.extend(Utils.to_list(kw['package']))
if'variables'in kw:
env=kw.get('env',self.env)
uselib=kw.get('uselib_store',kw['package'].upper())
vars=Utils.to_list(kw['variables'])
for v in vars:
val=self.cmd_and_log(lst+['--variable='+v]).strip()
var='%s_%s'%(uselib,v)
env[var]=val
if not'okmsg'in kw:
kw['okmsg']='yes'
return
if'args'in kw:
lst+=Utils.to_list(kw['args'])
ret=self.cmd_and_log(lst)
if not'okmsg'in kw:
kw['okmsg']='yes'
self.define(self.have_define(kw.get('uselib_store',kw['package'])),1,0)
self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env))
return ret
def check_cfg(self,*k,**kw):
if k:
lst=k[0].split()
kw['package']=lst[0]
kw['args']=' '.join(lst[1:])
self.validate_cfg(kw)
if'msg'in kw:
self.start_msg(kw['msg'])
ret=None
try:
ret=self.exec_cfg(kw)
except self.errors.WafError ,e:
if'errmsg'in kw:
self.end_msg(kw['errmsg'],'YELLOW')
if Logs.verbose>1:
raise
else:
self.fatal('The configuration failed')
else:
kw['success']=ret
if'okmsg'in kw:
self.end_msg(self.ret_msg(kw['okmsg'],kw))
return ret
def validate_c(self,kw):
if not'env'in kw:
kw['env']=self.env.derive()
env=kw['env']
if not'compiler'in kw and not'features'in kw:
kw['compiler']='c'
if env['CXX_NAME']and Task.classes.get('cxx',None):
kw['compiler']='cxx'
if not self.env['CXX']:
self.fatal('a c++ compiler is required')
else:
if not self.env['CC']:
self.fatal('a c compiler is required')
if not'compile_mode'in kw:
kw['compile_mode']='c'
if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx':
kw['compile_mode']='cxx'
if not'type'in kw:
kw['type']='cprogram'
if not'features'in kw:
kw['features']=[kw['compile_mode'],kw['type']]
else:
kw['features']=Utils.to_list(kw['features'])
if not'compile_filename'in kw:
kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'')
def to_header(dct):
if'header_name'in dct:
dct=Utils.to_list(dct['header_name'])
return''.join(['#include <%s>\n'%x for x in dct])
return''
if'framework_name'in kw:
fwkname=kw['framework_name']
if not'uselib_store'in kw:
kw['uselib_store']=fwkname.upper()
if not kw.get('no_header',False):
if not'header_name'in kw:
kw['header_name']=[]
fwk='%s/%s.h'%(fwkname,fwkname)
if kw.get('remove_dot_h',None):
fwk=fwk[:-2]
kw['header_name']=Utils.to_list(kw['header_name'])+[fwk]
kw['msg']='Checking for framework %s'%fwkname
kw['framework']=fwkname
if'function_name'in kw:
fu=kw['function_name']
if not'msg'in kw:
kw['msg']='Checking for function %s'%fu
kw['code']=to_header(kw)+SNIP_FUNCTION%fu
if not'uselib_store'in kw:
kw['uselib_store']=fu.upper()
if not'define_name'in kw:
kw['define_name']=self.have_define(fu)
elif'type_name'in kw:
tu=kw['type_name']
if not'header_name'in kw:
kw['header_name']='stdint.h'
if'field_name'in kw:
field=kw['field_name']
kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field}
if not'msg'in kw:
kw['msg']='Checking for field %s in %s'%(field,tu)
if not'define_name'in kw:
kw['define_name']=self.have_define((tu+'_'+field).upper())
else:
kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu}
if not'msg'in kw:
kw['msg']='Checking for type %s'%tu
if not'define_name'in kw:
kw['define_name']=self.have_define(tu.upper())
elif'header_name'in kw:
if not'msg'in kw:
kw['msg']='Checking for header %s'%kw['header_name']
l=Utils.to_list(kw['header_name'])
assert len(l)>0,'list of headers in header_name is empty'
kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM
if not'uselib_store'in kw:
kw['uselib_store']=l[0].upper()
if not'define_name'in kw:
kw['define_name']=self.have_define(l[0])
if'lib'in kw:
if not'msg'in kw:
kw['msg']='Checking for library %s'%kw['lib']
if not'uselib_store'in kw:
kw['uselib_store']=kw['lib'].upper()
if'stlib'in kw:
if not'msg'in kw:
kw['msg']='Checking for static library %s'%kw['stlib']
if not'uselib_store'in kw:
kw['uselib_store']=kw['stlib'].upper()
if'fragment'in kw:
kw['code']=kw['fragment']
if not'msg'in kw:
kw['msg']='Checking for code snippet'
if not'errmsg'in kw:
kw['errmsg']='no'
for(flagsname,flagstype)in[('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')]:
if flagsname in kw:
if not'msg'in kw:
kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname])
if not'errmsg'in kw:
kw['errmsg']='no'
if not'execute'in kw:
kw['execute']=False
if kw['execute']:
kw['features'].append('test_exec')
if not'errmsg'in kw:
kw['errmsg']='not found'
if not'okmsg'in kw:
kw['okmsg']='yes'
if not'code'in kw:
kw['code']=SNIP_EMPTY_PROGRAM
if self.env[INCKEYS]:
kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code']
if not kw.get('success'):kw['success']=None
if'define_name'in kw:
self.undefine(kw['define_name'])
assert'msg'in kw,'invalid parameters, read http://freehackers.org/~tnagy/wafbook/single.html#config_helpers_c'
def post_check(self,*k,**kw):
is_success=0
if kw['execute']:
if kw['success']is not None:
if kw.get('define_ret',False):
is_success=kw['success']
else:
is_success=(kw['success']==0)
else:
is_success=(kw['success']==0)
if'define_name'in kw:
if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw:
nm=kw['define_name']
if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str):
self.define(kw['define_name'],is_success,quote=kw.get('quote',1))
else:
self.define_cond(kw['define_name'],is_success)
else:
self.define_cond(kw['define_name'],is_success)
if'header_name'in kw:
if kw.get('auto_add_header_name',False):
self.env.append_value(INCKEYS,Utils.to_list(kw['header_name']))
if is_success and'uselib_store'in kw:
from waflib.Tools import ccroot
_vars=set([])
for x in kw['features']:
if x in ccroot.USELIB_VARS:
_vars|=ccroot.USELIB_VARS[x]
for k in _vars:
lk=k.lower()
if k=='INCLUDES':lk='includes'
if k=='DEFINES':lk='defines'
if lk in kw:
val=kw[lk]
if isinstance(val,str):
val=val.rstrip(os.path.sep)
self.env.append_unique(k+'_'+kw['uselib_store'],val)
return is_success
def check(self,*k,**kw):
self.validate_c(kw)
self.start_msg(kw['msg'])
ret=None
try:
ret=self.run_c_code(*k,**kw)
except self.errors.ConfigurationError ,e:
self.end_msg(kw['errmsg'],'YELLOW')
if Logs.verbose>1:
raise
else:
self.fatal('The configuration failed')
else:
kw['success']=ret
self.end_msg(self.ret_msg(kw['okmsg'],kw))
ret=self.post_check(*k,**kw)
if not ret:
self.fatal('The configuration failed %r'%ret)
return ret
class test_exec(Task.Task):
color='PINK'
def run(self):
if getattr(self.generator,'rpath',None):
if getattr(self.generator,'define_ret',False):
self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
else:
self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()])
else:
env=self.env.env or{}
env.update(dict(os.environ))
for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'):
env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'')
if getattr(self.generator,'define_ret',False):
self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env)
else:
self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env)
def test_exec_fun(self):
self.create_task('test_exec',self.link_task.outputs[0])
CACHE_RESULTS=1
COMPILE_ERRORS=2
def run_c_code(self,*k,**kw):
lst=[str(v)for(p,v)in kw.items()if p!='env']
h=Utils.h_list(lst)
dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h)
try:
os.makedirs(dir)
except:
pass
try:
os.stat(dir)
except:
self.fatal('cannot use the configuration test folder %r'%dir)
cachemode=getattr(Options.options,'confcache',None)
if cachemode==CACHE_RESULTS:
try:
proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_c_code'))
ret=proj['cache_run_c_code']
except:
pass
else:
if isinstance(ret,str)and ret.startswith('Test does not build'):
self.fatal(ret)
return ret
bdir=os.path.join(dir,'testbuild')
if not os.path.exists(bdir):
os.makedirs(bdir)
self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir)
bld.init_dirs()
bld.progress_bar=0
bld.targets='*'
if kw['compile_filename']:
node=bld.srcnode.make_node(kw['compile_filename'])
node.write(kw['code'])
bld.logger=self.logger
bld.all_envs.update(self.all_envs)
bld.env=kw['env']
o=bld(features=kw['features'],source=kw['compile_filename'],target='testprog')
for k,v in kw.items():
setattr(o,k,v)
self.to_log("==>\n%s\n<=="%kw['code'])
bld.targets='*'
ret=-1
try:
try:
bld.compile()
except Errors.WafError:
ret='Test does not build: %s'%Utils.ex_stack()
self.fatal(ret)
else:
ret=getattr(bld,'retval',0)
finally:
proj=ConfigSet.ConfigSet()
proj['cache_run_c_code']=ret
proj.store(os.path.join(dir,'cache_run_c_code'))
return ret
def check_cxx(self,*k,**kw):
kw['compiler']='cxx'
return self.check(*k,**kw)
def check_cc(self,*k,**kw):
kw['compiler']='c'
return self.check(*k,**kw)
def define(self,key,val,quote=True):
assert key and isinstance(key,str)
if isinstance(val,int)or isinstance(val,float):
s='%s=%s'
else:
s=quote and'%s="%s"'or'%s=%s'
app=s%(key,str(val))
ban=key+'='
lst=self.env['DEFINES']
for x in lst:
if x.startswith(ban):
lst[lst.index(x)]=app
break
else:
self.env.append_value('DEFINES',app)
self.env.append_unique(DEFKEYS,key)
def undefine(self,key):
assert key and isinstance(key,str)
ban=key+'='
lst=[x for x in self.env['DEFINES']if not x.startswith(ban)]
self.env['DEFINES']=lst
self.env.append_unique(DEFKEYS,key)
def define_cond(self,key,val):
assert key and isinstance(key,str)
if val:
self.define(key,1)
else:
self.undefine(key)
def is_defined(self,key):
assert key and isinstance(key,str)
ban=key+'='
for x in self.env['DEFINES']:
if x.startswith(ban):
return True
return False
def get_define(self,key):
assert key and isinstance(key,str)
ban=key+'='
for x in self.env['DEFINES']:
if x.startswith(ban):
return x[len(ban):]
return None
def have_define(self,key):
return self.__dict__.get('HAVE_PAT','HAVE_%s')%Utils.quote_define_name(key)
def write_config_header(self,configfile='',guard='',top=False,env=None,defines=True,headers=False,remove=True):
if not configfile:configfile=WAF_CONFIG_H
waf_guard=guard or'_%s_WAF'%Utils.quote_define_name(configfile)
node=top and self.bldnode or self.path.get_bld()
node=node.make_node(configfile)
node.parent.mkdir()
lst=['/* WARNING! All changes made to this file will be lost! */\n']
lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard))
lst.append(self.get_config_header(defines,headers))
lst.append('\n#endif /* %s */\n'%waf_guard)
node.write('\n'.join(lst))
env=env or self.env
env.append_unique(Build.CFG_FILES,[node.abspath()])
if remove:
for key in self.env[DEFKEYS]:
self.undefine(key)
self.env[DEFKEYS]=[]
def get_config_header(self,defines=True,headers=False):
lst=[]
if headers:
for x in self.env[INCKEYS]:
lst.append('#include <%s>'%x)
if defines:
for x in self.env[DEFKEYS]:
if self.is_defined(x):
val=self.get_define(x)
lst.append('#define %s %s'%(x,val))
else:
lst.append('/* #undef %s */'%x)
return"\n".join(lst)
def cc_add_flags(conf):
conf.add_os_flags('CPPFLAGS','CFLAGS')
conf.add_os_flags('CFLAGS')
def cxx_add_flags(conf):
conf.add_os_flags('CPPFLAGS','CXXFLAGS')
conf.add_os_flags('CXXFLAGS')
def link_add_flags(conf):
conf.add_os_flags('LINKFLAGS')
conf.add_os_flags('LDFLAGS','LINKFLAGS')
def cc_load_tools(conf):
if not conf.env.DEST_OS:
conf.env.DEST_OS=Utils.unversioned_sys_platform()
conf.load('c')
def cxx_load_tools(conf):
if not conf.env.DEST_OS:
conf.env.DEST_OS=Utils.unversioned_sys_platform()
conf.load('cxx')
def get_cc_version(conf,cc,gcc=False,icc=False):
cmd=cc+['-dM','-E','-']
try:
p=Utils.subprocess.Popen(cmd,stdin=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE,stderr=Utils.subprocess.PIPE)
p.stdin.write('\n')
out=p.communicate()[0]
except:
conf.fatal('could not determine the compiler version %r'%cmd)
if not isinstance(out,str):
out=out.decode(sys.stdout.encoding)
if gcc:
if out.find('__INTEL_COMPILER')>=0:
conf.fatal('The intel compiler pretends to be gcc')
if out.find('__GNUC__')<0:
conf.fatal('Could not determine the compiler type')
if icc and out.find('__INTEL_COMPILER')<0:
conf.fatal('Not icc/icpc')
k={}
if icc or gcc:
out=out.split('\n')
import shlex
for line in out:
lst=shlex.split(line)
if len(lst)>2:
key=lst[1]
val=lst[2]
k[key]=val
def isD(var):
return var in k
def isT(var):
return var in k and k[var]!='0'
if not conf.env.DEST_OS:
conf.env.DEST_OS=''
for i in MACRO_TO_DESTOS:
if isD(i):
conf.env.DEST_OS=MACRO_TO_DESTOS[i]
break
else:
if isD('__APPLE__')and isD('__MACH__'):
conf.env.DEST_OS='darwin'
elif isD('__unix__'):
conf.env.DEST_OS='generic'
if isD('__ELF__'):
conf.env.DEST_BINFMT='elf'
elif isD('__WINNT__')or isD('__CYGWIN__'):
conf.env.DEST_BINFMT='pe'
conf.env.LIBDIR=conf.env['PREFIX']+'/bin'
elif isD('__APPLE__'):
conf.env.DEST_BINFMT='mac-o'
if not conf.env.DEST_BINFMT:
conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS)
for i in MACRO_TO_DEST_CPU:
if isD(i):
conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i]
break
Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')]))
if icc:
ver=k['__INTEL_COMPILER']
conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1])
else:
conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__'])
return k
def add_as_needed(self):
if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME):
self.env.append_unique('LINKFLAGS','--as-needed')
class cfgtask(Task.TaskBase):
def display(self):
return''
def runnable_status(self):
return Task.RUN_ME
def run(self):
conf=self.conf
bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath())
bld.env=conf.env
bld.init_dirs()
bld.in_msg=1
bld.logger=self.logger
try:
bld.check(**self.args)
except:
return 1
def multicheck(self,*k,**kw):
self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)))
class par(object):
def __init__(self):
self.keep=False
self.cache_global=Options.cache_global
self.nocache=Options.options.nocache
self.returned_tasks=[]
def total(self):
return len(tasks)
def to_log(self,*k,**kw):
return
bld=par()
tasks=[]
for dct in k:
x=cfgtask(bld=bld)
tasks.append(x)
x.args=dct
x.bld=bld
x.conf=self
x.args=dct
x.logger=Logs.make_mem_logger(str(id(x)),self.logger)
def it():
yield tasks
while 1:
yield[]
p=Runner.Parallel(bld,Options.options.jobs)
p.biter=it()
p.start()
for x in tasks:
x.logger.memhandler.flush()
for x in tasks:
if x.hasrun!=Task.SUCCESS:
self.end_msg(kw.get('errmsg','no'),color='YELLOW')
self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, see the config.log for more information')
self.end_msg('ok')
conf(parse_flags)
conf(ret_msg)
conf(validate_cfg)
conf(exec_cfg)
conf(check_cfg)
conf(validate_c)
conf(post_check)
conf(check)
feature('test_exec')(test_exec_fun)
after_method('apply_link')(test_exec_fun)
conf(run_c_code)
conf(check_cxx)
conf(check_cc)
conf(define)
conf(undefine)
conf(define_cond)
conf(is_defined)
conf(get_define)
conf(have_define)
conf(write_config_header)
conf(get_config_header)
conf(cc_add_flags)
conf(cxx_add_flags)
conf(link_add_flags)
conf(cc_load_tools)
conf(cxx_load_tools)
conf(get_cc_version)
conf(add_as_needed)
conf(multicheck) | diedthreetimes/VCrash | .waf-1.6.7-0a94702c61504c487a251b8d0a04ca9a/waflib/Tools/c_config.py | Python | gpl-2.0 | 20,820 |
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Holoscopio Tecnologia
# Author: Marcelo Jorge Vieira <[email protected]>
# Author: Thadeu Lima de Souza Cascardo <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gobject
import pygst
pygst.require("0.10")
import gst
from core import Input, INPUT_TYPE_VIDEO
CAPABILITIES = INPUT_TYPE_VIDEO
class VideoTestInput(Input):
def __init__(self):
Input.__init__(self, CAPABILITIES)
self.video_src = gst.element_factory_make("videotestsrc", "video_src")
self.video_src.set_property("is-live", True)
self.add(self.video_src)
self.capsfilter = gst.element_factory_make("capsfilter", "capsfilter")
self.add(self.capsfilter)
gst.element_link_many(self.video_src, self.capsfilter)
self.video_pad.set_target(self.capsfilter.src_pads().next())
def config(self, dict):
self.video_src.set_property("pattern", int(dict["pattern"]))
caps = gst.caps_from_string(
"video/x-raw-yuv, width=%d, height=%d;"
"video/x-raw-rgb, width=%d, height=%d" % (
int(dict["width"]), int(dict["height"]),
int(dict["width"]), int(dict["height"])
)
)
self.capsfilter.set_property("caps", caps)
| landell/landell | sltv/input/videotestinput.py | Python | gpl-2.0 | 1,971 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Rasterize.py
-------------------
begin : 2016-10-05
copyright : (C) 2016 by OPENGIS.ch
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
from qgis.PyQt.QtGui import QImage, QPainter
from qgis.PyQt.QtCore import QSize
from qgis.core import (
QgsMapSettings,
QgsMapRendererCustomPainterJob,
QgsRectangle,
QgsProject,
QgsProcessingException,
QgsProcessingParameterExtent,
QgsProcessingParameterString,
QgsProcessingParameterNumber,
QgsProcessingParameterMapLayer,
QgsProcessingParameterRasterDestination,
QgsRasterFileWriter
)
import qgis
import osgeo.gdal
import os
import tempfile
import math
__author__ = 'Matthias Kuhn'
__date__ = '2016-10-05'
__copyright__ = '(C) 2016 by OPENGIS.ch'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
class RasterizeAlgorithm(QgisAlgorithm):
"""Processing algorithm renders map canvas to a raster file.
It's possible to choose the following parameters:
- Map theme to render
- Layer to render
- The minimum extent to render
- The tile size
- Map unit per pixel
- The output (can be saved to a file or to a temporary file and
automatically opened as layer in qgis)
"""
# Constants used to refer to parameters and outputs. They will be
# used when calling the algorithm from another algorithm, or when
# calling from the QGIS console.
OUTPUT = 'OUTPUT'
MAP_THEME = 'MAP_THEME'
LAYER = 'LAYER'
EXTENT = 'EXTENT'
TILE_SIZE = 'TILE_SIZE'
MAP_UNITS_PER_PIXEL = 'MAP_UNITS_PER_PIXEL'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
"""Here we define the inputs and output of the algorithm, along
with some other properties.
"""
# The parameters
self.addParameter(
QgsProcessingParameterExtent(self.EXTENT, description=self.tr(
'Minimum extent to render')))
self.addParameter(
QgsProcessingParameterNumber(
self.TILE_SIZE,
self.tr('Tile size'),
defaultValue=1024, minValue=64))
self.addParameter(QgsProcessingParameterNumber(
self.MAP_UNITS_PER_PIXEL,
self.tr(
'Map units per '
'pixel'),
defaultValue=100,
minValue=0,
type=QgsProcessingParameterNumber.Double
))
map_theme_param = QgsProcessingParameterString(
self.MAP_THEME,
description=self.tr(
'Map theme to render'),
defaultValue=None, optional=True)
map_theme_param.setMetadata(
{'widget_wrapper': {
'class':
'processing.gui.wrappers_map_theme.MapThemeWrapper'}})
self.addParameter(map_theme_param)
self.addParameter(
QgsProcessingParameterMapLayer(
self.LAYER,
description=self.tr(
'Single layer to render'),
optional=True))
# We add a raster layer as output
self.addParameter(QgsProcessingParameterRasterDestination(
self.OUTPUT,
self.tr(
'Output layer')))
def name(self):
# Unique (non-user visible) name of algorithm
return 'rasterize'
def displayName(self):
# The name that the user will see in the toolbox
return self.tr('Convert map to raster')
def group(self):
return self.tr('Raster tools')
def tags(self):
return self.tr('layer,raster,convert,file,map themes,tiles,render').split(',')
# def processAlgorithm(self, progress):
def processAlgorithm(self, parameters, context, feedback):
"""Here is where the processing itself takes place."""
# The first thing to do is retrieve the values of the parameters
# entered by the user
map_theme = self.parameterAsString(
parameters,
self.MAP_THEME,
context)
layer = self.parameterAsLayer(
parameters,
self.LAYER,
context)
extent = self.parameterAsExtent(
parameters,
self.EXTENT,
context)
tile_size = self.parameterAsInt(
parameters,
self.TILE_SIZE,
context)
mupp = self.parameterAsDouble(
parameters,
self.MAP_UNITS_PER_PIXEL,
context)
output_layer = self.parameterAsOutputLayer(
parameters,
self.OUTPUT,
context)
tile_set = TileSet(map_theme, layer, extent, tile_size, mupp,
output_layer,
qgis.utils.iface.mapCanvas().mapSettings())
tile_set.render(feedback)
return {self.OUTPUT: output_layer}
class TileSet():
"""
A set of tiles
"""
def __init__(self, map_theme, layer, extent, tile_size, mupp, output,
map_settings):
"""
:param map_theme:
:param extent:
:param layer:
:param tile_size:
:param mupp:
:param output:
:param map_settings: Map canvas map settings used for some fallback
values and CRS
"""
self.extent = extent
self.mupp = mupp
self.tile_size = tile_size
driver = self.getDriverForFile(output)
if not driver:
raise QgsProcessingException(
u'Could not load GDAL driver for file {}'.format(output))
crs = map_settings.destinationCrs()
self.x_tile_count = math.ceil(extent.width() / mupp / tile_size)
self.y_tile_count = math.ceil(extent.height() / mupp / tile_size)
xsize = self.x_tile_count * tile_size
ysize = self.y_tile_count * tile_size
self.dataset = driver.Create(output, xsize, ysize, 3) # 3 bands
self.dataset.SetProjection(str(crs.toWkt()))
self.dataset.SetGeoTransform(
[extent.xMinimum(), mupp, 0, extent.yMaximum(), 0, -mupp])
self.image = QImage(QSize(tile_size, tile_size), QImage.Format_ARGB32)
self.settings = QgsMapSettings()
self.settings.setOutputDpi(self.image.logicalDpiX())
self.settings.setOutputImageFormat(QImage.Format_ARGB32)
self.settings.setDestinationCrs(crs)
self.settings.setOutputSize(self.image.size())
self.settings.setFlag(QgsMapSettings.Antialiasing, True)
self.settings.setFlag(QgsMapSettings.RenderMapTile, True)
if QgsProject.instance().mapThemeCollection().hasMapTheme(map_theme):
self.settings.setLayers(
QgsProject.instance().mapThemeCollection(
).mapThemeVisibleLayers(
map_theme))
self.settings.setLayerStyleOverrides(
QgsProject.instance().mapThemeCollection(
).mapThemeStyleOverrides(
map_theme))
elif layer:
self.settings.setLayers([layer])
else:
self.settings.setLayers(map_settings.layers())
def render(self, feedback):
for x in range(self.x_tile_count):
for y in range(self.y_tile_count):
if feedback.isCanceled():
return
cur_tile = x * self.y_tile_count + y
num_tiles = self.x_tile_count * self.y_tile_count
self.renderTile(x, y, feedback)
feedback.setProgress(int((cur_tile / num_tiles) * 100))
def renderTile(self, x, y, feedback):
"""
Render one tile
:param x: The x index of the current tile
:param y: The y index of the current tile
"""
painter = QPainter(self.image)
self.settings.setExtent(QgsRectangle(
self.extent.xMinimum() + x * self.mupp * self.tile_size,
self.extent.yMaximum() - (y + 1) * self.mupp * self.tile_size,
self.extent.xMinimum() + (x + 1) * self.mupp * self.tile_size,
self.extent.yMaximum() - y * self.mupp * self.tile_size))
job = QgsMapRendererCustomPainterJob(self.settings, painter)
job.renderSynchronously()
painter.end()
# Needs not to be deleted or Windows will kill it too early...
tmpfile = tempfile.NamedTemporaryFile(suffix='.png', delete=False)
try:
self.image.save(tmpfile.name)
src_ds = osgeo.gdal.Open(tmpfile.name)
self.dataset.WriteRaster(x * self.tile_size, y * self.tile_size,
self.tile_size, self.tile_size,
src_ds.ReadRaster(0, 0, self.tile_size,
self.tile_size))
except Exception as e:
feedback.reportError(str(e))
finally:
del src_ds
tmpfile.close()
os.unlink(tmpfile.name)
def getDriverForFile(self, filename):
"""
Get the GDAL driver for a filename, based on its extension. (.gpkg,
.mbtiles...)
"""
_, extension = os.path.splitext(filename)
# If no extension is set, use .tif as default
if extension == '':
extension = '.tif'
driver_name = QgsRasterFileWriter.driverForExtension(extension[1:])
return osgeo.gdal.GetDriverByName(driver_name)
| GeoCat/QGIS | python/plugins/processing/algs/qgis/Rasterize.py | Python | gpl-2.0 | 10,479 |
""" Page functions for Host Aggregates pages
"""
import attr
from navmazing import NavigateToAttribute
from widgetastic_patternfly import BootstrapNav
from widgetastic_patternfly import BreadCrumb
from widgetastic_patternfly import Button
from widgetastic_patternfly import Dropdown
from widgetastic_patternfly import View
from cfme.base.ui import BaseLoggedInPage
from cfme.common import Taggable
from cfme.common import TaggableCollection
from cfme.exceptions import ItemNotFound
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.providers import get_crud_by_name
from widgetastic_manageiq import Accordion
from widgetastic_manageiq import BaseEntitiesView
from widgetastic_manageiq import ItemsToolBarViewSelector
from widgetastic_manageiq import ManageIQTree
from widgetastic_manageiq import PaginationPane
from widgetastic_manageiq import Search
from widgetastic_manageiq import SummaryTable
from widgetastic_manageiq import Text
class HostAggregatesView(BaseLoggedInPage):
@property
def in_host_aggregates(self):
return (
self.logged_in_as_current_user and
self.navigation.currently_selected == ['Compute', 'Clouds', 'Host Aggregates']
)
class HostAggregatesToolBar(View):
policy = Dropdown('Policy')
download = Dropdown('Download')
configuration = Dropdown('Configuration')
view_selector = View.nested(ItemsToolBarViewSelector)
class HostAggregatesEntities(BaseEntitiesView):
pass
class HostAggregatesDetailsToolBar(View):
policy = Dropdown('Policy')
download = Button(title='Print or export summary')
configuration = Dropdown('Configuration')
class HostAggregatesDetailsAccordion(View):
@View.nested
class properties(Accordion): # noqa
tree = ManageIQTree()
@View.nested
class relationships(Accordion): # noqa
tree = ManageIQTree()
class HostAggregatesDetailsEntities(View):
breadcrumb = BreadCrumb()
title = Text('//div[@id="main-content"]//h1')
properties = SummaryTable(title='Properties')
relationships = SummaryTable(title='Relationships')
smart_management = SummaryTable(title='Smart Management')
class HostAggregatesAllView(HostAggregatesView):
toolbar = HostAggregatesToolBar()
paginator = PaginationPane()
search = View.nested(Search)
including_entities = View.include(HostAggregatesEntities, use_parent=True)
@View.nested
class my_filters(Accordion): # noqa
ACCORDION_NAME = "My Filters"
navigation = BootstrapNav('.//div/ul')
tree = ManageIQTree()
@property
def is_displayed(self):
return (
self.in_host_aggregates and
self.entities.title.text == 'Host Aggregates')
class HostAggregatesDetailsView(HostAggregatesView):
@property
def is_displayed(self):
obj = self.context['object']
return (
self.in_host_aggregates and
self.entities.title.text == obj.expected_details_title and
self.entities.breadcrumb.active_location == obj.expected_details_breadcrumb and
self.entities.relationships.get_text_of('Cloud Provider') == obj.provider.name
)
toolbar = HostAggregatesDetailsToolBar()
sidebar = HostAggregatesDetailsAccordion()
entities = HostAggregatesDetailsEntities()
@attr.s
class HostAggregates(BaseEntity, Taggable):
"""
Host Aggregates class to support navigation
"""
_param_name = "HostAggregate"
name = attr.ib()
provider = attr.ib()
ram = attr.ib(default=None)
vcpus = attr.ib(default=None)
disk = attr.ib(default=None)
swap = attr.ib(default=None)
rxtx = attr.ib(default=None)
is_public = attr.ib(default=True)
tenant = attr.ib(default=None)
def refresh(self):
"""Refresh provider relationships and browser"""
self.provider.refresh_provider_relationships()
self.browser.refresh()
@property
def instance_count(self):
""" number of instances using host aggregates.
Returns:
:py:class:`int` instance count.
"""
view = navigate_to(self, 'Details')
return int(view.entities.relationships.get_text_of('Instances'))
@attr.s
class HostAggregatesCollection(BaseCollection, TaggableCollection):
ENTITY = HostAggregates
def all(self):
provider = self.filters.get('provider') # None if no filter, need for entity instantiation
view = navigate_to(self, 'All')
result = []
flavors = view.entities.get_all(surf_pages=True)
for flavor in flavors:
if provider is not None:
if flavor.data['cloud_provider'] == provider.name:
entity = self.instantiate(flavor.data['name'], provider)
else:
entity = self.instantiate(flavor.data['name'],
get_crud_by_name(flavor.data['cloud_provider']))
result.append(entity)
return result
@navigator.register(HostAggregatesCollection, 'All')
class HostAggregatesAll(CFMENavigateStep):
VIEW = HostAggregatesAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self, *args, **kwargs):
self.prerequisite_view.navigation.select('Compute', 'Clouds', 'Host Aggregates')
@navigator.register(HostAggregates, 'Details')
class HostAggregatesDetails(CFMENavigateStep):
VIEW = HostAggregatesDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
self.prerequisite_view.toolbar.view_selector.select('List View')
try:
row = self.prerequisite_view.entities.get_entity(name=self.obj.name, surf_pages=True)
except ItemNotFound:
raise ItemNotFound('Could not locate host aggregate "{}" on provider {}'
.format(self.obj.name, self.obj.provider.name))
row.click()
| nachandr/cfme_tests | cfme/cloud/host_aggregates.py | Python | gpl-2.0 | 6,202 |
#! /usr/bin/env python
# @ORIGINAL_AUTHOR: Robert Muth
#
# python.org has useful info about the Python programming language
#
# The Python library is described here: http://docs.python.org/lib/lib.html
# An the index for the library here: http://docs.python.org/lib/genindex.html
import sys
import os
import getopt
import re
import string
import copy
#######################################################################
# Version
#######################################################################
def Version():
(l,v,x) = string.split('$Revision: 1.5 $')
return v
#######################################################################
# Usage
#######################################################################
def Usage():
print "Usage: flowgraph.py [OPTION]+ assembler-listing edge-profile"
print
print "flowgraph converts a disassembled routine into a flowgraph which can be rendered using vcg"
print
print "assembler-listing is a textual disassembler listing generated with"
print "objdump-routine.csh or directly with objdump"
print
print "edge-profile is a profile generated with the edgcnt Pin tool"
return -1
#######################################################################
# Messages
#######################################################################
def Info(str):
print >> sys.stderr,"I:",str
return
def Warning(str):
print >> sys.stderr,"W:", str
return
def Error(str):
print >> sys.stderr, "E:",str
sys.exit(-1)
#######################################################################
#
#######################################################################
# 402d05: 41 56 push %r14
PatternNoFallthrough = re.compile(r'call|ret|jmp')
PatternCall = re.compile(r'call')
class INS:
def __init__(self, addr, opcode ):
self._addr = addr
self._opcode = opcode
self._next = None
self._leader = 0
self._bbl = None
return
def get_opcode(self):
return self._opcode
def set_next(self,next):
self._next = next
return
def get_next(self):
return self._next
def get_addr(self):
return self._addr
def get_leader(self):
return self._leader
def set_leader(self,leader):
self._leader = leader
def get_bbl(self):
return self._bbl
def set_bbl(self,bbl):
self._bbl = bbl
def has_no_fallthru(self):
return PatternNoFallthrough.search(self._opcode)
def is_call(self):
return PatternCall.search(self._opcode)
#######################################################################
##
#######################################################################
ALL_INS = {}
PatternAssemler = re.compile(r'^\s*([0-9a-fA-F]+):\s*(?:[0-9a-fA-F][0-9a-fA-F] )+\s*(.+)$')
def ProcessAssemblerListing(lines):
last_ins = None
for l in lines:
match = PatternAssemler.match(l)
if not match:
# print "bad line ",l
continue
addr = long(match.group(1),16)
ins = INS( addr, match.group(2) )
ALL_INS[addr] = ins
if last_ins:
last_ins.set_next(ins)
last_ins = ins
return
#######################################################################
# 0x0000000000400366 0x0000000000402300 2182
PatternEdge2 = re.compile(r'^\s*0x([0-9a-fA-F]+)\s+0x([0-9a-fA-F]+)\s+([0-9]+)\s*$')
PatternEdge3 = re.compile(r'^\s*0x([0-9a-fA-F]+)\s+0x([0-9a-fA-F]+)\s+([a-zA-Z])\s+([0-9]+)\s*$')
def ProcessEdgProfile(lines):
version = string.split(lines[0])
if version[0] != "EDGCOUNT":
Error("files is not an edge profile")
if version[1] == "2.0":
v = 2
elif version[1] == "3.0":
v = 3
else:
Error("unsupported edge profile version")
edg_list = []
for l in lines[1:]:
if v == 2:
match = PatternEdge2.match(l)
elif v==3:
match = PatternEdge3.match(l)
if not match: continue
if v == 2:
src = long(match.group(1),16)
dst = long(match.group(2),16)
count = long(match.group(3))
type = "u"
elif v == 3:
src = long(match.group(1),16)
dst = long(match.group(2),16)
type = match.group(3)
count = long(match.group(4))
if ALL_INS.has_key(src):
next = ALL_INS[src].get_next()
if next: next.set_leader(1)
if ALL_INS.has_key(dst):
ins = ALL_INS[dst]
ins.set_leader(1)
if ALL_INS.has_key(src) or ALL_INS.has_key(dst):
edg_list.append( (src,dst,count,type) )
return edg_list
#######################################################################
#
#######################################################################
class EDG:
def __init__(self,src,dst,count, type):
self._src = src
self._dst = dst
self._count = count
self._type = type
return
def is_fallthru(self):
return self._fallthru
def StringVCG(self, threshold = 100000000000L):
s = ""
if self._count > threshold:
s += "\t" + "nearedge:\n"
else:
s += "\t" + "edge:\n"
s += "\t{\n"
s += "\t\t" + "sourcename: \"" + hex(self._src._start) + "\"\n"
s += "\t\t" + "targetname: \"" + hex(self._dst._start) + "\"\n"
if self._type == "F" or self._type == "L":
s += "\t\t" + "thickness: 4\n"
else:
s += "\t\t" + "thickness: 2\n"
s += "\t\t" + "label: \"%s(%d)\"\n" % (self._type,self._count)
# s += "\t\t" + "priority: %d\n" % self._count
s += "\t}\n"
return s
#######################################################################
class BBL:
def __init__(self,start):
self._start = start
self._ins = []
self._in = []
self._out = []
self._count = 0
self._in_count = 0
self._out_count = 0
self._next = None
return
def add_ins(self,ins):
self._ins.append(ins)
self._end = ins.get_addr()
return
def set_count(self,count):
assert( self._count == 0 )
self._count = count
return
def add_out_edg(self, edg ):
self._out.append(edg)
return
def add_in_edg(self, edg ):
self._in.append(edg)
return
def add_in_count(self, count ):
self._in_count += count
return
def add_out_count(self, count ):
self._out_count += count
return
def count_in(self):
count = self._in_count
for e in self._in: count += e._count
return count
def count_out(self):
count = self._out_count
for e in self._out: count += e._count
return count
def set_next(self,next):
self._next = next
return
def get_next(self):
return self._next
def get_start(self):
return self._start
def is_call(self):
return self._ins[-1].is_call()
def has_no_fallthru(self):
return self._ins[-1].has_no_fallthru()
def String(self):
s = "BBL at %x count %d (i: %d o: %d)\n" % (self._start, self._count, self._in_count, self._out_count)
s += "i: "
for edg in self._in:
s += "%x (%d) " % (edg._src.get_start(),edg._count)
s += "\n"
s += "o: "
for edg in self._out:
s += "%x (%d) " % (edg._dst.get_start(),edg._count)
s += "\n"
for ins in self._ins:
s += "%x %s\n" % (ins.get_addr(),ins.get_opcode())
return s
def StringVCG(self,threshold=1000):
s = "\t" + "node:\n"
s += "\t" + "{\n"
if self._count > threshold:
s += "\t\t" + "color: red\n"
s += "\t\t" + "title: \"" + hex(self._start) + "\"\n"
s += "\t\t" + "label: \"" + hex(self._start) + " (" + str(self._count) + ")\\n"
for ins in self._ins: s += "%x: %s\\n" % (ins.get_addr(),ins.get_opcode())
s += "\"\n"
s += "\t" + "}\n"
return s
#######################################################################
#
#######################################################################
ALL_BBL = {}
ALL_EDG = []
#######################################################################
#
#######################################################################
def CreateCFG(edg_list):
no_interproc_edges = 1
ins_list = ALL_INS.items()
ins_list.sort() # by addr
bbl_list = []
Info("BBL create")
last = None
for (a,ins) in ins_list:
if ins.get_leader():
start = ins.get_addr()
bbl = BBL(start)
bbl_list.append(bbl)
ALL_BBL[start] = bbl
if last: last.set_next( bbl )
last = bbl
last.add_ins( ins )
ins.set_bbl( last )
if ins.has_no_fallthru():
next = ins.get_next()
if next: next.set_leader(1)
Info( "Created %d bbls" % len(bbl_list))
# for bbl in bbl_list: print bbl.String()
Info( "EDG create")
for (src,dst,count,type) in edg_list:
if ALL_INS.has_key(src):
bbl_src = ALL_INS[src].get_bbl()
else:
assert( ALL_BBL.has_key(dst) )
if no_interproc_edges:
ALL_BBL[dst].add_in_count(count)
continue
bbl_src = BBL(src)
ALL_BBL[src] = bbl_src
if ALL_BBL.has_key(dst):
bbl_dst = ALL_BBL[dst]
else:
if no_interproc_edges:
bbl_src.add_out_count(count)
continue
bbl_dst = BBL(dst)
ALL_BBL[dst] = bbl_dst
edg = EDG( bbl_src, bbl_dst, count, type)
ALL_EDG.append( edg )
bbl_src.add_out_edg( edg )
bbl_dst.add_in_edg( edg )
Info("propagate counts and add fallthrus")
for bbl in bbl_list:
count = bbl.count_in()
bbl.set_count(count)
count -= bbl.count_out()
if count < 0:
Warning("negative fallthru count")
count = 0
next = bbl.get_next()
if count > 0:
if bbl.has_no_fallthru():
Info("losing flow %d\n" % count)
elif next:
edg = EDG(bbl,next,count,"F")
ALL_EDG.append( edg )
bbl.add_out_edg( edg )
next.add_in_edg( edg )
if bbl.is_call() and next:
edg = EDG(bbl,next, 0,"L")
ALL_EDG.append( edg )
bbl.add_out_edg( edg )
next.add_in_edg( edg )
# for bbl in bbl_list: print bbl.String()
return bbl_list
def DumpVCG():
start = 0
end = 0
print "// ###################################################################################"
print "// VCG Flowgraph for %x - %x" % (start,end)
print "// ###################################################################################"
print "graph:"
print "{";
print "title: \"Control Flow Graph for rtn %x - %x \"" % (start,end);
print "label: \"Control Flow Graph for rtn %x - %x \"" % (start,end);
print "display_edge_labels: yes"
print "layout_downfactor: 100"
print "layout_nearfactor: 10"
print "layout_upfactor: 1"
# print "dirty_edge_labels: yes"
print "layout_algorithm: mindepth"
print "manhatten_edges: yes"
print "edge.arrowsize: 15"
print "late_edge_labels: yes"
for e in ALL_EDG:
print e.StringVCG()
bbl_list = ALL_BBL.items()
bbl_list.sort()
for (x,b) in bbl_list:
print b.StringVCG()
print "}";
print "// eof"
return
#######################################################################
# Main
#######################################################################
def Main(argv):
if len(argv) != 2:
Usage()
return -1
Info( "Reading listing")
filename = argv[0]
try:
input = open(filename, "r")
lines = input.readlines()
input.close()
except:
Error("cannot read data " + filename)
ProcessAssemblerListing(lines)
Info( "Reading edges")
filename = argv[1]
try:
input = open(filename, "r")
lines = input.readlines()
input.close()
except:
Error("cannot read data " + filename)
edg_list = ProcessEdgProfile(lines)
Info("Read %d edges" % len(edg_list))
bbl_list = CreateCFG( edg_list)
Info("Dump VCG to stdout")
DumpVCG()
return 0
#######################################################################
#
#######################################################################
if __name__ == "__main__":
sys.exit( Main( sys.argv[1:]) )
#######################################################################
# eof
#######################################################################
| cyjseagull/SHMA | zsim-nvmain/pin_kit/source/tools/SimpleExamples/flowgraph.py | Python | gpl-2.0 | 13,531 |
# -*- coding: UTF-8 -*-
__revision__ = '$Id$'
# Written by Christian Sagmueller <[email protected]>
# based on PluginMovieIMDB.py, Copyright (c) 2005 Vasco Nunes
# You may use and distribute this software under the terms of the
# GNU General Public License, version 2 or later
import gutils
import movie,string,re
plugin_name = "OFDb"
plugin_description = "Online-Filmdatenbank"
plugin_url = "www.ofdb.de"
plugin_language = _("German")
plugin_author = "Christian Sagmueller, Jessica Katharina Parth"
plugin_author_email = "[email protected]"
plugin_version = "0.11"
class Plugin(movie.Movie):
def __init__(self, id):
self.encode = 'utf-8'
self.movie_id = id
self.url = "http://www.ofdb.de/%s" % str(self.movie_id)
def initialize(self):
# OFDb didn't provide the runtime, studio and classification but it provide a link to the german imdb entry
# lets use the imdb page, why not
imdb_nr = gutils.trim(self.page, 'http://german.imdb.com/Title?', '"')
if imdb_nr != '':
self.imdb_page = self.open_page(url='http://www.imdb.de/Title?' + imdb_nr)
else:
imdb_nr = gutils.trim(self.page, 'http://www.imdb.com/Title?', '"')
if imdb_nr != '':
self.imdb_page = self.open_page(url='http://www.imdb.de/Title?' + imdb_nr)
else:
self.imdb_page = ''
def get_image(self):
self.image_url = "http://img.ofdb.de/film/" + gutils.trim(self.page, 'img src="http://img.ofdb.de/film/', '"' )
def get_o_title(self):
self.o_title = gutils.clean(gutils.trim(self.page, 'Originaltitel:', '</tr>'))
if self.o_title == '':
self.o_title = string.replace(self.o_title, ' ', '' )
def get_title(self):
self.title = gutils.trim(self.page,'size="3"><b>','<')
def get_director(self):
self.director = gutils.trim(self.page,"Regie:","</a><br>")
def get_plot(self):
self.plot = ''
storyid = gutils.regextrim(self.page, '<a href="plot/', '(">|[&])')
if not storyid is None:
story_page = self.open_page(url="http://www.ofdb.de/plot/%s" % (storyid.encode('utf8')))
if story_page:
self.plot = gutils.trim(story_page, "</b><br><br>","</")
def get_year(self):
self.year = gutils.trim(self.page,"Erscheinungsjahr:","</a>")
self.year = gutils.strip_tags(self.year)
def get_runtime(self):
# from imdb
self.runtime = gutils.after(gutils.regextrim(self.imdb_page, 'itemprop="duration"', ' (min|Min)'), '>')
def get_genre(self):
self.genre = gutils.trim(self.page,"Genre(s):","</table>")
self.genre = string.replace(self.genre, "<br>", ", ")
self.genre = gutils.strip_tags(self.genre)
self.genre = string.replace(self.genre, "/", ", ")
self.genre = gutils.clean(self.genre)
self.genre = self.genre[0:-1]
def get_cast(self):
self.cast = ''
movie_id_elements = string.split(self.movie_id, ',')
movie_id_elements[0] = string.replace(movie_id_elements[0], "film/", "")
cast_page = self.open_page(url="http://www.ofdb.de/view.php?page=film_detail&fid=%s" % str(movie_id_elements[0]) )
self.cast = gutils.trim(cast_page, 'Darsteller</i>', '</table>')
self.cast = re.sub('(\n|\t| )', '', self.cast)
self.cast = string.replace(self.cast, '\t', '')
self.cast = string.replace(self.cast, 'class="Daten">', '>\n')
self.cast = string.strip(gutils.strip_tags(self.cast))
self.cast = string.replace(self.cast, '... ', _(' as '))
self.cast = gutils.clean(self.cast)
def get_classification(self):
# from imdb
self.classification = gutils.regextrim(gutils.regextrim(self.imdb_page, '(Altersfreigabe|Certification):', '</div>'), '(Deutschland|Germany):', '(&|[|])')
def get_studio(self):
# from imdb
self.studio = gutils.regextrim(self.imdb_page, 'Production Co:', '(<span|</span>)')
def get_o_site(self):
self.o_site = gutils.trim(gutils.regextrim(self.imdb_page, 'Official Sites:', '(<span|</span>)'), 'href="', '"')
def get_site(self):
self.site = self.url
def get_trailer(self):
self.trailer = ""
def get_country(self):
self.country = gutils.trim(self.page,"Herstellungsland:","</a>")
def get_rating(self):
self.rating = gutils.trim(self.page,"<br>Note: "," ")
if self.rating == '':
self.rating = "0"
self.rating = str(round(float(self.rating)))
class SearchPlugin(movie.SearchMovie):
def __init__(self):
self.original_url_search = "http://www.ofdb.de/view.php?page=suchergebnis&Kat=OTitel&SText="
self.translated_url_search = "http://www.ofdb.de/view.php?page=suchergebnis&Kat=DTitel&SText="
self.encode = 'utf-8'
self.remove_accents = False
def search(self,parent_window):
if not self.open_search(parent_window):
return None
self.page = gutils.trim(self.page,"</b><br><br>", "<br><br><br>");
self.page = string.replace( self.page, "'", '"' )
self.page = string.replace( self.page, '<font size="1">', '' )
self.page = string.replace( self.page, '</font>', '' )
return self.page
def get_searches(self):
elements = string.split(self.page,"<br>")
if (elements[0]<>''):
for element in elements:
elementid = gutils.trim(element,'<a href="','"')
if not elementid is None and not elementid == '':
self.ids.append(elementid)
elementname = gutils.clean(element)
p1 = string.find(elementname, '>')
if p1 == -1:
self.titles.append(elementname)
else:
self.titles.append(elementname[p1+1:])
#
# Plugin Test
#
class SearchPluginTest(SearchPlugin):
#
# Configuration for automated tests:
# dict { movie_id -> [ expected result count for original url, expected result count for translated url ] }
#
test_configuration = {
'Rocky Balboa' : [ 1, 1 ],
'Arahan' : [ 3, 2 ],
'glückliches' : [ 4, 2 ]
}
class PluginTest:
#
# Configuration for automated tests:
# dict { movie_id -> dict { arribute -> value } }
#
# value: * True/False if attribute only should be tested for any value
# * or the expected value
#
test_configuration = {
'film/103013,Rocky%20Balboa' : {
'title' : 'Rocky Balboa',
'o_title' : 'Rocky Balboa',
'director' : 'Sylvester Stallone',
'plot' : True,
'cast' : 'Sylvester Stallone' + _(' as ') + 'Rocky Balboa\n\
Burt Young' + _(' as ') + 'Paulie\n\
Antonio Tarver' + _(' as ') + 'Mason \'The Line\' Dixon\n\
Geraldine Hughes' + _(' as ') + 'Marie\n\
Milo Ventimiglia' + _(' as ') + 'Robert Jr.\n\
Tony Burton' + _(' as ') + 'Duke\n\
A.J. Benza' + _(' as ') + 'L.C.\n\
James Francis Kelly III' + _(' as ') + 'Steps\n\
Lou DiBella' + _(' as ') + 'Himself\n\
Mike Tyson' + _(' as ') + 'Himself\n\
Henry G. Sanders' + _(' as ') + 'Martin\n\
Pedro Lovell' + _(' as ') + 'Spider Rico\n\
Ana Gerena' + _(' as ') + 'Isabel\n\
Angela Boyd' + _(' as ') + 'Angie\n\
Louis Giansante\n\
Maureen Schilling\n\
Lahmard J. Tate\n\
Woody Paige\n\
Skip Bayless\n\
Jay Crawford\n\
Brian Kenny\n\
Dana Jacobson\n\
Charles Johnson\n\
James Binns\n\
Johnnie Hobbs Jr.\n\
Barney Fitzpatrick\n\
Jim Lampley\n\
Larry Merchant\n\
Max Kellerman\n\
LeRoy Neiman\n\
Bert Randolph Sugar\n\
Bernard Fernández\n\
Gunnar Peterson\n\
Yahya\n\
Marc Ratner\n\
Anthony Lato Jr.\n\
Jack Lazzarado\n\
Michael Buffer' + _(' as ') + 'Ring Announcer\n\
Joe Cortez' + _(' as ') + 'Referee\n\
Carter Mitchell\n\
Vinod Kumar\n\
Fran Pultro\n\
Frank Stallone als Frank Stallone Jr.' + _(' as ') + 'Dinner Patron \n\
Jody Giambelluca\n\
Tobias Segal' + _(' as ') + 'Robert\'s Friend\n\
Tim Carr' + _(' as ') + 'Robert\'s Friend \n\
Matt Frack\n\
Paul Dion Monte' + _(' as ') + 'Robert\'s Friend\n\
Kevin King Templeton\n\
Robert Michael Kelly\n\
Rick Buchborn\n\
Nick Baker\n\
Don Sherman' + _(' as ') + 'Andy\n\
Gary Compton\n\
Vale Anoai\n\
Sikander Malik\n\
Michael Ahl\n\
Andrew Aninsman\n\
Ben Bachelder\n\
Lacy Bevis\n\
Tim Brooks\n\
D.T. Carney\n\
Ricky Cavazos' + _(' as ') + 'Boxing Spectator (uncredited)\n\
Rennie Cowan\n\
Deon Derrico\n\
Jacob \'Stitch\' Duran\n\
Simon P. Edwards\n\
Ruben Fischman' + _(' as ') + 'High-Roller in Las Vegas (uncredited)\n\
David Gere\n\
Noah Jacobs\n\
Mark J. Kilbane\n\
Zach Klinefelter\n\
David Kneeream\n\
Dan Montero\n\
Keith Moyer' + _(' as ') + 'Bar Patron (uncredited)\n\
Carol Anne Mueller\n\
Jacqueline Olivia\n\
Brian H. Scott\n\
Keyon Smith\n\
Frank Traynor\n\
Ryan Tygh\n\
Kimberly Villanova',
'country' : 'USA',
'genre' : 'Action, Drama, Sportfilm',
'classification' : False,
'studio' : 'Metro-Goldwyn-Mayer (MGM), Columbia Pictures, Revolution Studios',
'o_site' : False,
'site' : 'http://www.ofdb.de/film/103013,Rocky%20Balboa',
'trailer' : False,
'year' : 2006,
'notes' : False,
'runtime' : 102,
'image' : True,
'rating' : 8
},
'film/22489,Ein-Gl%C3%BCckliches-Jahr' : {
'title' : 'Glückliches Jahr, Ein',
'o_title' : 'Bonne année, La',
'director' : 'Claude Lelouch',
'plot' : False,
'cast' : 'Lino Ventura' + _(' as ') + 'Simon\n\
Françoise Fabian' + _(' as ') + 'Françoise\n\
Charles Gérard' + _(' as ') + 'Charlot\n\
André Falcon' + _(' as ') + 'Le bijoutier\n\
Mireille Mathieu\n\
Lilo\n\
Claude Mann\n\
Frédéric de Pasquale\n\
Gérard Sire\n\
Silvano Tranquilli' + _(' as ') + 'L\'amant italien\n\
André Barello\n\
Michel Bertay\n\
Norman de la Chesnaye\n\
Pierre Edeline\n\
Pierre Pontiche\n\
Michou\n\
Bettina Rheims\n\
Joseph Rythmann\n\
Georges Staquet\n\
Jacques Villedieu\n\
Harry Walter\n\
Elie Chouraqui',
'country' : 'Frankreich',
'genre' : 'Komödie, Krimi',
'classification' : False,
'studio' : 'Les Films 13, Rizzoli Film',
'o_site' : False,
'site' : 'http://www.ofdb.de/film/22489,Ein-Gl%C3%BCckliches-Jahr',
'trailer' : False,
'year' : 1973,
'notes' : False,
'runtime' : 115,
'image' : True,
'rating' : 6
},
'film/54088,Arahan' : {
'title' : 'Arahan',
'o_title' : 'Arahan jangpung daejakjeon',
'director' : 'Ryoo Seung-wan',
'plot' : True,
'cast' : 'Ryoo Seung-beom\n\
Yoon Soy' + _(' as ') + 'Wi-jin\n\
Ahn Seong-gi' + _(' as ') + 'Ja-woon\n\
Jeong Doo-hong' + _(' as ') + 'Heuk-Woon\n\
Yoon Joo-sang' + _(' as ') + 'Moo-woon \n\
Kim Ji-yeong\n\
Baek Chan-gi\n\
Kim Jae-man\n\
Lee Dae-yeon\n\
Kim Dong-ju\n\
Kim Su-hyeon\n\
Geum Dong-hyeon\n\
Lee Jae-goo\n\
Ahn Kil-kang\n\
Bong Tae-gyu' + _(' as ') + 'Cameo\n\
Im Ha-ryong' + _(' as ') + 'Cameo\n\
Yoon Do-hyeon\n\
Lee Choon-yeon' + _(' as ') + 'Cameo\n\
Kim Yeong-in\n\
Park Yoon-bae\n\
Lee Won\n\
Kim Kyeong-ae\n\
Yoo Soon-cheol\n\
Hwang Hyo-eun\n\
Lee Jae-ho\n\
Yang Ik-joon\n\
Kwon Beom-taek\n\
Min Hye-ryeong\n\
Oh Soon-tae\n\
Lee Oi-soo',
'country' : 'Südkorea',
'genre' : 'Action, Fantasy, Komödie',
'classification' : False,
'studio' : 'Fun and Happiness, Good Movie Company',
'o_site' : 'http://www.arahan.co.kr/',
'site' : 'http://www.ofdb.de/film/54088,Arahan',
'trailer' : False,
'year' : 2004,
'notes' : False,
'runtime' : 114,
'image' : True,
'rating' : 7
}
}
| santiavenda2/griffith | lib/plugins/movie/PluginMovieOFDb.py | Python | gpl-2.0 | 12,757 |
# -*- coding: utf-8 -*-
#
# Copyright © 2014-2015 Colin Duquesnoy
# Copyright © 2009- The Spyder Developmet Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
"""
Provides widget classes and functions.
.. warning:: Only PyQt4/PySide QtGui classes compatible with PyQt5.QtWidgets
are exposed here. Therefore, you need to treat/use this package as if it
were the ``PyQt5.QtWidgets`` module.
"""
from . import PYQT5, PYSIDE2, PYQT4, PYSIDE, PythonQtError
from ._patch.qcombobox import patch_qcombobox
from ._patch.qheaderview import introduce_renamed_methods_qheaderview
if PYQT5:
from PyQt5.QtWidgets import *
elif PYSIDE2:
from PySide2.QtWidgets import *
elif PYQT4:
from PyQt4.QtGui import *
QStyleOptionViewItem = QStyleOptionViewItemV4
del QStyleOptionViewItemV4
QStyleOptionFrame = QStyleOptionFrameV3
del QStyleOptionFrameV3
# These objects belong to QtGui
try:
# Older versions of PyQt4 do not provide these
del (QGlyphRun,
QMatrix2x2, QMatrix2x3, QMatrix2x4, QMatrix3x2, QMatrix3x3,
QMatrix3x4, QMatrix4x2, QMatrix4x3, QMatrix4x4,
QQuaternion, QRadialGradient, QRawFont, QRegExpValidator,
QStaticText, QTouchEvent, QVector2D, QVector3D, QVector4D,
qFuzzyCompare)
except NameError:
pass
del (QAbstractTextDocumentLayout, QActionEvent, QBitmap, QBrush, QClipboard,
QCloseEvent, QColor, QConicalGradient, QContextMenuEvent, QCursor,
QDesktopServices, QDoubleValidator, QDrag, QDragEnterEvent,
QDragLeaveEvent, QDragMoveEvent, QDropEvent, QFileOpenEvent,
QFocusEvent, QFont, QFontDatabase, QFontInfo, QFontMetrics,
QFontMetricsF, QGradient, QHelpEvent, QHideEvent,
QHoverEvent, QIcon, QIconDragEvent, QIconEngine, QImage,
QImageIOHandler, QImageReader, QImageWriter, QInputEvent,
QInputMethodEvent, QKeyEvent, QKeySequence, QLinearGradient,
QMouseEvent, QMoveEvent, QMovie, QPaintDevice, QPaintEngine,
QPaintEngineState, QPaintEvent, QPainter, QPainterPath,
QPainterPathStroker, QPalette, QPen, QPicture, QPictureIO, QPixmap,
QPixmapCache, QPolygon, QPolygonF,
QRegion, QResizeEvent, QSessionManager, QShortcutEvent, QShowEvent,
QStandardItem, QStandardItemModel, QStatusTipEvent,
QSyntaxHighlighter, QTabletEvent, QTextBlock, QTextBlockFormat,
QTextBlockGroup, QTextBlockUserData, QTextCharFormat, QTextCursor,
QTextDocument, QTextDocumentFragment, QTextDocumentWriter,
QTextFormat, QTextFragment, QTextFrame, QTextFrameFormat,
QTextImageFormat, QTextInlineObject, QTextItem, QTextLayout,
QTextLength, QTextLine, QTextList, QTextListFormat, QTextObject,
QTextObjectInterface, QTextOption, QTextTable, QTextTableCell,
QTextTableCellFormat, QTextTableFormat, QTransform,
QValidator, QWhatsThisClickedEvent,
QWheelEvent, QWindowStateChangeEvent, qAlpha, qBlue,
qGray, qGreen, qIsGray, qRed, qRgb, qRgba, QIntValidator,
QStringListModel)
# These objects belong to QtPrintSupport
del (QAbstractPrintDialog, QPageSetupDialog, QPrintDialog, QPrintEngine,
QPrintPreviewDialog, QPrintPreviewWidget, QPrinter, QPrinterInfo)
# These objects belong to QtCore
del (QItemSelection, QItemSelectionModel, QItemSelectionRange,
QSortFilterProxyModel)
# Patch QComboBox to allow Python objects to be passed to userData
patch_qcombobox(QComboBox)
# QHeaderView: renamed methods
introduce_renamed_methods_qheaderview(QHeaderView)
elif PYSIDE:
from PySide.QtGui import *
QStyleOptionViewItem = QStyleOptionViewItemV4
del QStyleOptionViewItemV4
# These objects belong to QtGui
del (QAbstractTextDocumentLayout, QActionEvent, QBitmap, QBrush, QClipboard,
QCloseEvent, QColor, QConicalGradient, QContextMenuEvent, QCursor,
QDesktopServices, QDoubleValidator, QDrag, QDragEnterEvent,
QDragLeaveEvent, QDragMoveEvent, QDropEvent, QFileOpenEvent,
QFocusEvent, QFont, QFontDatabase, QFontInfo, QFontMetrics,
QFontMetricsF, QGradient, QHelpEvent, QHideEvent,
QHoverEvent, QIcon, QIconDragEvent, QIconEngine, QImage,
QImageIOHandler, QImageReader, QImageWriter, QInputEvent,
QInputMethodEvent, QKeyEvent, QKeySequence, QLinearGradient,
QMatrix2x2, QMatrix2x3, QMatrix2x4, QMatrix3x2, QMatrix3x3,
QMatrix3x4, QMatrix4x2, QMatrix4x3, QMatrix4x4, QMouseEvent,
QMoveEvent, QMovie, QPaintDevice, QPaintEngine, QPaintEngineState,
QPaintEvent, QPainter, QPainterPath, QPainterPathStroker, QPalette,
QPen, QPicture, QPictureIO, QPixmap, QPixmapCache, QPolygon,
QPolygonF, QQuaternion, QRadialGradient, QRegExpValidator,
QRegion, QResizeEvent, QSessionManager, QShortcutEvent, QShowEvent,
QStandardItem, QStandardItemModel, QStatusTipEvent,
QSyntaxHighlighter, QTabletEvent, QTextBlock, QTextBlockFormat,
QTextBlockGroup, QTextBlockUserData, QTextCharFormat, QTextCursor,
QTextDocument, QTextDocumentFragment,
QTextFormat, QTextFragment, QTextFrame, QTextFrameFormat,
QTextImageFormat, QTextInlineObject, QTextItem, QTextLayout,
QTextLength, QTextLine, QTextList, QTextListFormat, QTextObject,
QTextObjectInterface, QTextOption, QTextTable, QTextTableCell,
QTextTableCellFormat, QTextTableFormat, QTouchEvent, QTransform,
QValidator, QVector2D, QVector3D, QVector4D, QWhatsThisClickedEvent,
QWheelEvent, QWindowStateChangeEvent, qAlpha, qBlue, qGray, qGreen,
qIsGray, qRed, qRgb, qRgba, QIntValidator, QStringListModel)
# These objects belong to QtPrintSupport
del (QAbstractPrintDialog, QPageSetupDialog, QPrintDialog, QPrintEngine,
QPrintPreviewDialog, QPrintPreviewWidget, QPrinter, QPrinterInfo)
# These objects belong to QtCore
del (QItemSelection, QItemSelectionModel, QItemSelectionRange,
QSortFilterProxyModel)
# Patch QComboBox to allow Python objects to be passed to userData
patch_qcombobox(QComboBox)
# QHeaderView: renamed methods
introduce_renamed_methods_qheaderview(QHeaderView)
else:
raise PythonQtError('No Qt bindings could be found')
| davvid/git-cola | qtpy/QtWidgets.py | Python | gpl-2.0 | 6,411 |
#
# Alexander Todorov <[email protected]>
#
# Copyright 2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import unittest
from pykickstart.base import DeprecatedCommand
from tests.baseclass import CommandTest
class F20_TestCase(CommandTest):
command = "install"
def runTest(self):
# pass
self.assert_parse("install", "install\n")
self.assert_parse("install", "install\n")
self.assert_parse("install --root-device=/dev/sda", "install\n")
# upgrade is always false
cmd = self.handler().commands[self.command]
cmd.parse([])
self.assertFalse(cmd.upgrade)
# fail
self.assert_parse_error("install --bad-flag")
# --root-device requires argument
self.assert_parse_error("install --root-device")
self.assert_parse_error("install --root-device=\"\"")
class F29_TestCase(F20_TestCase):
def runTest(self):
# make sure we've been deprecated
parser = self.getParser("install")
self.assertTrue(issubclass(parser.__class__, DeprecatedCommand))
# make sure we are still able to parse it
self.assert_parse("install")
class RHEL8_TestCase(F29_TestCase):
pass
if __name__ == "__main__":
unittest.main()
| atodorov/pykickstart | tests/commands/install.py | Python | gpl-2.0 | 2,107 |
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <[email protected]>
## This program is published under a GPLv2 license
"""
VoIP (Voice over IP) related functions
"""
import os
###################
## Testing stuff ##
###################
from fcntl import fcntl
from scapy.sendrecv import sniff
from scapy.layers.inet import IP,UDP
from scapy.layers.rtp import RTP
from scapy.utils import get_temp_file
def merge(x,y,sample_size=2):
if len(x) > len(y):
y += "\x00"*(len(x)-len(y))
elif len(x) < len(y):
x += "\x00"*(len(y)-len(x))
m = ""
ss=sample_size
for i in xrange(len(x)/ss):
m += x[ss*i:ss*(i+1)]+y[ss*i:ss*(i+1)]
return m
# return "".join(map(str.__add__, x, y))
def voip_play(s1,list=None,**kargs):
FIFO=get_temp_file()
FIFO1=FIFO % 1
FIFO2=FIFO % 2
os.mkfifo(FIFO1)
os.mkfifo(FIFO2)
try:
os.system("soxmix -t .ul %s -t .ul %s -t ossdsp /dev/dsp &" % (FIFO1,FIFO2))
c1=open(FIFO1,"w", 4096)
c2=open(FIFO2,"w", 4096)
fcntl.fcntl(c1.fileno(),fcntl.F_SETFL, os.O_NONBLOCK)
fcntl.fcntl(c2.fileno(),fcntl.F_SETFL, os.O_NONBLOCK)
# dsp,rd = os.popen2("sox -t .ul -c 2 - -t ossdsp /dev/dsp")
def play(pkt, last=None):
if last is None:
last = []
if not pkt:
return
if not pkt.haslayer(UDP):
return
ip=pkt.getlayer(IP)
if s1 in [ip.src, ip.dst]:
if not last:
last.append(pkt)
return
load=last.pop()
# x1 = load.load[12:]
c1.write(load.load[12:])
if load.getlayer(IP).src == ip.src:
# x2 = ""
c2.write("\x00"*len(load.load[12:]))
last.append(pkt)
else:
# x2 = pkt.load[:12]
c2.write(pkt.load[12:])
# dsp.write(merge(x1,x2))
if list is None:
sniff(store=0, prn=play, **kargs)
else:
for p in list:
play(p)
finally:
os.unlink(FIFO1)
os.unlink(FIFO2)
def voip_play1(s1,list=None,**kargs):
dsp,rd = os.popen2("sox -t .ul - -t ossdsp /dev/dsp")
def play(pkt):
if not pkt:
return
if not pkt.haslayer(UDP):
return
ip=pkt.getlayer(IP)
if s1 in [ip.src, ip.dst]:
from scapy.config import conf
dsp.write(pkt.getlayer(conf.raw_layer).load[12:])
try:
if list is None:
sniff(store=0, prn=play, **kargs)
else:
for p in list:
play(p)
finally:
dsp.close()
rd.close()
def voip_play2(s1,**kargs):
dsp,rd = os.popen2("sox -t .ul -c 2 - -t ossdsp /dev/dsp")
def play(pkt, last=None):
if last is None:
last = []
if not pkt:
return
if not pkt.haslayer(UDP):
return
ip=pkt.getlayer(IP)
if s1 in [ip.src, ip.dst]:
if not last:
last.append(pkt)
return
load=last.pop()
x1 = load.load[12:]
# c1.write(load.load[12:])
if load.getlayer(IP).src == ip.src:
x2 = ""
# c2.write("\x00"*len(load.load[12:]))
last.append(pkt)
else:
x2 = pkt.load[:12]
# c2.write(pkt.load[12:])
dsp.write(merge(x1,x2))
sniff(store=0, prn=play, **kargs)
def voip_play3(lst=None,**kargs):
dsp,rd = os.popen2("sox -t .ul - -t ossdsp /dev/dsp")
try:
def play(pkt, dsp=dsp):
from scapy.config import conf
if pkt and pkt.haslayer(UDP) and pkt.haslayer(conf.raw_layer):
dsp.write(pkt.getlayer(RTP).load)
if lst is None:
sniff(store=0, prn=play, **kargs)
else:
for p in lst:
play(p)
finally:
try:
dsp.close()
rd.close()
except:
pass
| kinap/scapy | scapy/modules/voip.py | Python | gpl-2.0 | 4,288 |
#
# Copyright (C) 2006 Red Hat, Inc.
# Copyright (C) 2006 Daniel P. Berrange <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
class vmmSecret(object):
def __init__(self, name, secret=None, attributes=None):
self.name = name
self.secret = secret
if attributes == None:
attributes = {}
self.attributes = attributes
def set_secret(self, data):
self.secret = data
def get_secret(self):
return self.secret
def get_name(self):
return self.name
def get_attributes(self):
return self.attributes
def has_attribute(self, key):
return key in self.attributes
def add_attribute(self, key, value):
if type(value) != str:
value = str(value)
self.attributes[key] = value
def list_attributes(self):
return self.attributes.keys()
def get_attribute(self, key):
return self.attributes[key]
| dumbbell/virt-manager | src/virtManager/secret.py | Python | gpl-2.0 | 1,629 |
import math
def PrimeFactors(num):
primeFactors = []
for i in range(2, int(math.sqrt(num)) + 1):
while num % i == 0:
primeFactors.append(i)
num //= i
if num > 2:
primeFactors.append(num)
return primeFactors
def main():
factors = PrimeFactors(36)
print(factors)
if __name__ == '__main__':
main()
| gauravsitlani/programming | prime_factors/prime_factors.py | Python | gpl-3.0 | 315 |
"""
Test of basic 1D plotting methods in MantidPlot
"""
import mantidplottests
from mantidplottests import *
from mantidplot import *
from PyQt4 import QtGui, QtCore
class MantidPlotMdiSubWindowTest(unittest.TestCase):
def test_table(self):
self.doTest( newTable() )
def test_graph(self):
self.doTest( newGraph() )
def doTest(self, w):
if w.isFloating():
w.dock()
self.assertFalse( w.isFloating() )
self.assertTrue( w.isDocked() )
size = w.size()
w.undock()
self.assertTrue( w.isFloating() )
self.assertFalse( w.isDocked() )
w.dock()
self.assertFalse( w.isFloating() )
self.assertTrue( w.isDocked() )
# TODO: sizes are not equal. Should we fix it?
# self.assertEqual( size, w.size() )
w.close()
# Run the unit tests
mantidplottests.runTests(MantidPlotMdiSubWindowTest)
| dymkowsk/mantid | MantidPlot/test/MantidPlotMdiSubWindowTest.py | Python | gpl-3.0 | 922 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import uuid
import base64
import os
from django.contrib.gis.db import models
from treemap.models import User
class APIAccessCredential(models.Model):
access_key = models.CharField(max_length=100, null=False, blank=False)
secret_key = models.CharField(max_length=256, null=False, blank=False)
# If a user is specified then this credential
# is always authorized as the given user
#
# If user is None this credential can access
# any user's data if that user's username
# and password are also provided
user = models.ForeignKey(User, null=True)
enabled = models.BooleanField(default=True)
@classmethod
def create(clz, user=None):
secret_key = base64.urlsafe_b64encode(os.urandom(64))
access_key = base64.urlsafe_b64encode(uuid.uuid4().bytes)\
.replace('=', '')
return APIAccessCredential.objects.create(
user=user, access_key=access_key, secret_key=secret_key)
| ctaylo37/OTM2 | opentreemap/api/models.py | Python | gpl-3.0 | 1,113 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-08-14 18:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sessao', '0009_auto_20170619_1441'),
]
operations = [
migrations.AddField(
model_name='tiporesultadovotacao',
name='natureza',
field=models.CharField(choices=[('A', 'Aprovado'), ('R', 'Rejeitado')], max_length=100, null=True, verbose_name='Natureza do Tipo'),
),
migrations.AlterField(
model_name='tiporesultadovotacao',
name='nome',
field=models.CharField(max_length=100, verbose_name='Nome do Tipo'),
),
]
| cmjatai/cmj | sapl/sessao/migrations/0010_auto_20170814_1804.py | Python | gpl-3.0 | 753 |
## Note: you must install Levenshtein module
# pip install python-Levenshtein
# for this Microsoft Visual C++ 9.0 is required. Get it from http://aka.ms/vcpython27
# more info - see: http://stackoverflow.com/questions/18134437/where-can-the-documentation-for-python-levenshtein-be-found-online
from pymongo import MongoClient
import pymongo
import pprint
import bson
import pandas
import Levenshtein
EXPECTED_STREET_PATTERN = \
u"^.*(?<![Ss]tra\u00dfe)(?<![Ww]eg)(?<![Aa]llee)(?<![Rr]ing)(?<![Bb]erg)" + \
u"(?<![Pp]ark)(?<![Hh]\u00f6he)(?<![Pp]latz)(?<![Bb]r\u00fccke)(?<![Gg]rund)$"
def audit_streets(collection):
return list(collection.distinct("name", {
"type": "way",
"name": {"$regex": EXPECTED_STREET_PATTERN}
}))
def audit_buildings(db):
result = db.eval('''
db.osmnodes.ensureIndex({pos:"2dsphere"});
result = [];
db.osmnodes.find(
{"building": {"$exists": true}, "address.street": {"$exists": true}, "pos": {"$exists": true}},
{"address.street": "", "pos": ""}
).forEach(function(val, idx) {
val.nearby = db.osmnodes.distinct("address.street",
{"_id": {"$ne": val._id}, "pos": {"$near": {"$geometry": {"type": "Point", "coordinates": val.pos}, "$maxDistance": 50, "$minDistance": 0}}}
);
result.push(val);
})
return result;
''')
df_list = []
for row in result:
street_name = row["address"]["street"]
nb_best_dist = None
nb_best = ""
nb_worst_dist = None
nb_worst = ""
for nearby_street in row["nearby"]:
d = Levenshtein.distance(nearby_street, street_name)
if nb_best_dist == None or d < nb_best_dist:
nb_best_dist = d
nb_best = nearby_street
if nb_worst_dist == None or d > nb_worst_dist:
nb_worst_dist = d
nb_worst = nearby_street
df_list += [{
"_id": row["_id"],
"street_name": street_name,
"num_nearby": len(row["nearby"]),
"nb_best": nb_best,
"nb_worst": nb_worst,
"nb_best_dist": nb_best_dist,
"nb_worst_dist": nb_worst_dist
}]
return pandas.DataFrame(df_list, columns=["_id", "street_name", "num_nearby", "nb_best", "nb_best_dist", "nb_worst", "nb_worst_dist"])
def audit_phone_numbers(collection):
return list(collection.aggregate([
{"$match": {"$or": [
{"phone": {"$exists": True}},
{"mobile_phone": {"$exists": True}},
{"address.phone": {"$exists": True}}
]}},
{"$project": {
"_id": 1,
"phone": {"$ifNull": ["$phone", {"$ifNull": ["$mobile_phone", "$address.phone"]}]}
}}
]))
def audit_quality_map(mongoServer, mongoPort, csvFilePattern, csvEncoding):
client = MongoClient(mongoServer + ":" + mongoPort)
db = client.udacity
c = client.udacity.osmnodes
print
print "Auditing way descriptions..."
print "These are the 'unusual' street names"
r = audit_streets(c)
pprint.pprint(r)
print
print "Auditing streets close to buildings..."
r = audit_buildings(db)
r.to_csv(csvFilePattern.format("audit_buildings.csv"), encoding=csvEncoding)
pprint.pprint(r)
print
print "Auditing phone numbers..."
r = audit_phone_numbers(c)
pprint.pprint(r)
| benjaminsoellner/2015_Data_Analyst_Project_3 | Project/audit_quality_map.py | Python | agpl-3.0 | 3,625 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Agile Business Group sagl
# (<http://www.agilebg.com>)
# @author Alex Comba <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "Stock Invoice Picking Incoterm",
'version': '0.1',
'category': 'Warehouse Management',
'description': """
This module adds the field incoterm to invoice and picking. In this way the
user can specify the incoterm directly on these documents, with no need to
refer to the incoterm of the order (which could even be missing).
The module extends 'stock_invoice_picking' so that the invoices created
from pickings will have the same incoterm set in the picking.
""",
'author': 'Agile Business Group',
'website': 'http://www.agilebg.com',
'license': 'AGPL-3',
'depends': [
'stock_invoice_picking',
],
'data': [
'account_invoice_view.xml',
'stock_view.xml',
],
'test': [
'test/invoice_picking_incoterm.yml',
],
'installable': False
}
| jbaudoux/account-invoicing | __unported__/stock_invoice_picking_incoterm/__openerp__.py | Python | agpl-3.0 | 1,830 |
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
from django.core import urlresolvers
from django.http import HttpResponse, HttpResponseServerError
from flexi_auth.models import ObjectWithContext
from rest.views.blocks.base import BlockSSDataTables, ResourceBlockAction, CREATE_CSV
from consts import VIEW_CONFIDENTIAL, CONFIDENTIAL_VERBOSE_HTML, CASH
from gf.base.templatetags.accounting_tags import human_readable_account_csv,human_readable_kind, signed_ledger_entry_amount
from django.template.loader import render_to_string
import datetime, csv
import cStringIO as StringIO
#from simple_accounting.models import economic_subject, AccountingDescriptor
#from simple_accounting.models import account_type
#from simple_accounting.exceptions import MalformedTransaction
#from simple_accounting.models import AccountingProxy
#from simple_accounting.utils import register_transaction, register_simple_transaction
#from gf.base.accounting import PersonAccountingProxy
from lib.shortcuts import render_to_xml_response, render_to_context_response
#------------------------------------------------------------------------------#
# #
#------------------------------------------------------------------------------#
#OLD: ENCODING = "iso-8859-1"
class Block(BlockSSDataTables):
BLOCK_NAME = "transactions"
BLOCK_DESCRIPTION = _("Economic transactions")
BLOCK_VALID_RESOURCE_TYPES = ["gas", "supplier", "pact"]
COLUMN_INDEX_NAME_MAP = {
0: 'id',
1: 'transaction__date',
2: '',
3: '',
4: 'amount',
5: 'transaction__description',
}
#WAS 2: 'transaction__issuer',
#WAS 3: 'transaction__source',
#WAS 3: 'transaction__kind', --> FIXME: In case of translation the search does not operate correctly
def __init__(self, *args, **kw):
super(Block, self).__init__(*args, **kw)
# Default start closed. Mainly for GAS -> Accounting tab ("Conto")
self.start_open = False
def _check_permission(self, request):
if request.resource.gas:
return request.user.has_perm(
CASH, obj=ObjectWithContext(request.resource.gas)
)
else:
return True
def _get_resource_list(self, request):
#Accounting.LedgerEntry or Transactions
return request.resource.economic_movements
def get_response(self, request, resource_type, resource_id, args):
"""Check for confidential access permission and call superclass if needed"""
if not self._check_permission(request):
return render_to_xml_response(
"blocks/table_html_message.xml",
{ 'msg' : CONFIDENTIAL_VERBOSE_HTML }
)
if args == CREATE_CSV:
return self._create_csv(request)
return super(Block, self).get_response(request, resource_type, resource_id, args)
#TODO: Filter grid by
# Date From --> To
# Kind iof transctions: can be checkbox list multiselect
# Subject: Radio or multiple checkbox onto values [GAS borselino, GASMemmbers, Suppliers]
# def options_response(self, request, resource_type, resource_id):
# """Get options for transaction block.
# WARNING: call to this method doesn't pass through get_response
# so you have to reset self.request and self.resource attribute if you want
# """
# self.request = request
# self.resource = request.resource
# fields = []
# #DATE FROM
# fields.append({
# 'field_type' : 'datetime',
# 'field_label' : 'from date',
# 'field_name' : 'from',
# 'field_values' : [{ 'value' : '22/09/2012', 'selected' : ''}]
# })
# #DATE TO
# fields.append({
# 'field_type' : 'datetime',
# 'field_label' : 'to date',
# 'field_name' : 'to',
# 'field_values' : [{ 'value' : '28/09/2012', 'label' : 'labelvalue', 'selected' : 'sel'}]
# })
# ctx = {
# 'block_name' : self.description,
# 'fields': fields,
# }
# #Can use html template loader
# return render_to_xml_response('eco-options.xml', ctx)
def _get_user_actions(self, request):
user_actions = []
resource_type = request.resource.resource_type
if self._check_permission(request):
user_actions += [
ResourceBlockAction(
block_name = self.BLOCK_NAME,
resource = request.resource,
name=CREATE_CSV, verbose_name=_("Create CSV"),
popup_form=False,
method="OPENURL",
),
]
return user_actions
def _create_csv(self, request):
""" Create CSV of this block transactions
#MATTEO TOREMOVE: lascio la prima implementazione (da levare
ovviamente dall'integrazione) come monito a me stesso -->
kiss, kiss e ancora kiss !!
#NOTA: eliminare nell'integrazione tutte le righe commentate con #OLD:
"""
headers = [_(u'Id'), _(u'Data'), _(u'Account'), _(u'Kind'), _(u'Cash amount'), _(u'Description')]
records = self._get_resource_list(request)
csvfile = StringIO.StringIO()
writer = csv.writer(csvfile, delimiter=';',quotechar='"', quoting=csv.QUOTE_MINIMAL)
writer.writerow(headers)
for res in self._get_resource_list(request):
writer.writerow([res.pk,
'{0:%a %d %b %Y %H:%M}'.format(res.date),
human_readable_account_csv(res.account),
human_readable_kind(res.transaction.kind),
signed_ledger_entry_amount(res),
res.transaction.description.encode("utf-8", "ignore")
])
csv_data = csvfile.getvalue()
if not csv_data:
rv = HttpResponseServerError(_('Report not generated'))
else:
response = HttpResponse(csv_data, content_type='text/csv')
filename = "%(res)s_%(date)s.csv" % {
'res': request.resource,
'date' : '{0:%Y%m%d_%H%M}'.format(datetime.datetime.now())
}
response['Content-Disposition'] = "attachment; filename=" + filename
rv = response
return rv
| befair/gasistafelice | gasistafelice/rest/views/blocks/transactions.py | Python | agpl-3.0 | 6,479 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from copy import deepcopy
from .oml_replicate import metacl_oml_replicate
meta_test_test_kwargs = dict(
# Setup the meta-testing phase and allow it to run.
run_meta_test=True,
# This resets the fast params (in this case the output layer of the OMLNetwork)
reset_fast_params=True,
# Results reported over 15 sampled.
meta_test_sample_size=15,
# Run meta-testing over 10 and 50 classes.
num_meta_test_classes=[10, 50],
# The best lr was chosen among the following; done separately for each number of
# classes trained on.
lr_sweep_range=[0.03, 0.01, 0.003, 0.001, 0.0003, 0.0001, 0.00003, 0.00001],
# Run through meta-test testing 5 images at a time. No training occurs here.
test_test_batch_size=60,
)
# Run OML for 2000 steps to ensure meta=testing accuracy hasn't regressed.
# |--------------------------------------------------------------|
# | Num Classes | Meta-test test | Meta-test train | LR |
# |--------------:|:-----------------|:------------------|------:|
# | 10 | 0.84 ± 0.06 | 0.94 ± 0.05 | 0.003 |
# | 50 | 0.75 ± 0.03 | 0.95 ± 0.01 | 0.001 |
# |--------------------------------------------------------------|
#
oml_regression_test = deepcopy(metacl_oml_replicate)
oml_regression_test.update(
# The number of outer (i.e. slow) steps.
epochs=2000,
# Log results to wandb.
wandb_args=dict(
name="oml_regression_test",
project="metacl",
),
# Meta-testing specific arguments.
**deepcopy(meta_test_test_kwargs),
)
# This is meant as a quick run to ensure all functionality is fully working.
# |---------------------------------------------------------------|
# | Num Classes | Meta-test test | Meta-test train | LR |
# |--------------:|:-----------------|:------------------|-------:|
# | 10 | 0.51 ± 0.03 | 0.81 ± 0.01 | 0.001 |
# |---------------------------------------------------------------|
#
oml_regression_test_50_epochs = deepcopy(oml_regression_test)
oml_regression_test_50_epochs.update(
# The number of outer (i.e. slow) steps.
epochs=50,
# Average over 10 meta-test runs.
num_meta_testing_runs=3,
num_meta_test_classes=[10],
num_lr_search_runs=1,
lr_sweep_range=[0.001],
# Log results to wandb.
wandb_args=dict(
name="oml_regression_test_50_epochs",
project="metacl",
),
)
# ------------
# All configs.
# ------------
CONFIGS = dict(
oml_regression_test=oml_regression_test,
oml_regression_test_50_epochs=oml_regression_test_50_epochs,
)
| mrcslws/nupic.research | projects/meta_cl/experiments/oml_regression_test.py | Python | agpl-3.0 | 3,610 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.util.url
import spack.package
class XorgPackage(spack.package.PackageBase):
"""Mixin that takes care of setting url and mirrors for x.org
packages."""
#: Path of the package in a x.org mirror
xorg_mirror_path = None
#: List of x.org mirrors used by Spack
# Note: x.org mirrors are a bit tricky, since many are out-of-sync or off.
# A good package to test with is `util-macros`, which had a "recent"
# release.
base_mirrors = [
'https://www.x.org/archive/individual/',
'https://mirrors.ircam.fr/pub/x.org/individual/',
'https://mirror.transip.net/xorg/individual/',
'ftp://ftp.freedesktop.org/pub/xorg/individual/',
'http://xorg.mirrors.pair.com/individual/'
]
@property
def urls(self):
self._ensure_xorg_mirror_path_is_set_or_raise()
return [
spack.util.url.join(m, self.xorg_mirror_path,
resolve_href=True)
for m in self.base_mirrors
]
def _ensure_xorg_mirror_path_is_set_or_raise(self):
if self.xorg_mirror_path is None:
cls_name = type(self).__name__
msg = ('{0} must define a `xorg_mirror_path` attribute'
' [none defined]')
raise AttributeError(msg.format(cls_name))
| iulian787/spack | lib/spack/spack/build_systems/xorg.py | Python | lgpl-2.1 | 1,541 |
object = {__bases__: [], __name__: 'object'}
object.__mro__ = [object]
type = {__bases__: [object], __mro__: [object], __name__: 'type'}
object.__metaclass__ = type
__ARGUMENTS_PADDING__ = {ARGUMENTS_PADDING: "YES IT IS!"}
def __is__(me, other):
return (me is other)
__is__.is_method = True
object.__is__ = __is__
def __isnot__(me, other):
return not (me is other)
__isnot__.is_method = True
object.__isnot__ = __isnot__
def mro(me):
if me is object:
raw = me.__mro__
elif me.__class__:
raw = me.__class__.__mro__
else:
raw = me.__mro__
l = pythonium_call(tuple)
l.jsobject = raw.slice()
return l
mro.is_method = True
object.mro = mro
def __hash__(me):
uid = lookup(me, 'uid')
if not uid:
uid = object._uid
object._uid += 1
me.__uid__ = uid
return pythonium_call(str, '{' + uid)
__hash__.is_method = True
object._uid = 1
object.__hash__ = __hash__
def __rcontains__(me, other):
contains = lookup(other, '__contains__')
return contains(me)
__rcontains__.is_method = True
object.__rcontains__ = __rcontains__
def issubclass(klass, other):
if klass is other:
return __TRUE
if not klass.__bases__:
return __FALSE
for base in klass.__bases__:
if issubclass(base, other) is __TRUE:
return __TRUE
return __FALSE
def pythonium_is_true(v):
if v is False:
return False
if v is True:
return True
if v is None:
return False
if v is __NONE:
return False
if v is __FALSE:
return False
if isinstance(v, int) or isinstance(v, float):
if v.jsobject == 0:
return False
length = lookup(v, '__len__')
if length and length().jsobject == 0:
return False
return True
def isinstance(obj, klass):
if obj.__class__:
return issubclass(obj.__class__, klass)
return __FALSE
def pythonium_obj_to_js_exception(obj):
def exception():
this.exception = obj
return exception
def pythonium_is_exception(obj, exc):
if obj is exc:
return True
return isinstance(obj, exc)
def pythonium_call(obj):
args = Array.prototype.slice.call(arguments, 1)
if obj.__metaclass__:
instance = {__class__: obj}
init = lookup(instance, '__init__')
if init:
init.apply(instance, args)
return instance
else:
return obj.apply(None, args)
def pythonium_create_empty_dict():
instance = {__class__: dict}
instance._keys = pythonium_call(list)
instance.jsobject = JSObject()
return instance
def pythonium_mro(bases):
"""Calculate the Method Resolution Order of bases using the C3 algorithm.
Suppose you intended creating a class K with the given base classes. This
function returns the MRO which K would have, *excluding* K itself (since
it doesn't yet exist), as if you had actually created the class.
Another way of looking at this, if you pass a single class K, this will
return the linearization of K (the MRO of K, *including* itself).
"""
# based on http://code.activestate.com/recipes/577748-calculate-the-mro-of-a-class/
seqs = [C.__mro__.slice() for C in bases]
seqs.push(bases.slice())
def cdr(l):
l = l.slice()
l = l.splice(1)
return l
def contains(l, c):
for i in l:
if i is c:
return True
return False
res = []
while True:
non_empty = []
for seq in seqs:
out = []
for item in seq:
if item:
out.push(item)
if out.length != 0:
non_empty.push(out)
if non_empty.length == 0:
# Nothing left to process, we're done.
return res
for seq in non_empty: # Find merge candidates among seq heads.
candidate = seq[0]
not_head = []
for s in non_empty:
if contains(cdr(s), candidate):
not_head.push(s)
if not_head.length != 0:
candidate = None
else:
break
if not candidate:
raise TypeError("Inconsistent hierarchy, no C3 MRO is possible")
res.push(candidate)
for seq in non_empty:
# Remove candidate.
if seq[0] is candidate:
seq[0] = None
seqs = non_empty
def pythonium_create_class(name, bases, attrs):
attrs.__name__ = name
attrs.__metaclass__ = type
attrs.__bases__ = bases
mro = pythonium_mro(bases)
mro.splice(0, 0, attrs)
attrs.__mro__ = mro
return attrs
def lookup(obj, attr):
obj_attr = obj[attr]
if obj_attr != None:
if obj_attr and {}.toString.call(obj_attr) == '[object Function]' and obj_attr.is_method and not obj_attr.bound:
def method_wrapper():
args = Array.prototype.slice.call(arguments)
args.splice(0, 0, obj)
return obj_attr.apply(None, args)
method_wrapper.bound = True
return method_wrapper
return obj_attr
else:
if obj.__class__:
__mro__ = obj.__class__.__mro__
elif obj.__metaclass__:
__mro__ = obj.__metaclass__.__mro__
else:
# it's a function
return None
for base in __mro__:
class_attr = base[attr]
if class_attr != None:
if {}.toString.call(class_attr) == '[object Function]' and class_attr.is_method and not class_attr.bound:
def method_wrapper():
args = Array.prototype.slice.call(arguments)
args.splice(0, 0, obj)
return class_attr.apply(None, args)
method_wrapper.bound = True
return method_wrapper
return class_attr
def pythonium_object_get_attribute(obj, attr):
r = lookup(obj, attr)
if r != None:
return r
else:
getattr = lookup(obj, '__getattr__')
if getattr:
return getattr(attr)
else:
console.trace('AttributeError', attr, obj)
raise AttributeError
pythonium_object_get_attribute.is_method = True
object.__getattribute__ = pythonium_object_get_attribute
def pythonium_get_attribute(obj, attr):
if obj.__class__ or obj.__metaclass__:
getattribute = lookup(obj, '__getattribute__')
r = getattribute(attr)
return r
attr = obj[attr]
if attr:
if {}.toString.call(attr) == '[object Function]':
def method_wrapper():
return attr.apply(obj, arguments)
return method_wrapper
else:
return attr
def pythonium_set_attribute(obj, attr, value):
obj[attr] = value
def ASSERT(condition, message):
if not condition:
raise message or pythonium_call(str, 'Assertion failed')
| skariel/pythonium | pythonium/compliant/runtime.py | Python | lgpl-2.1 | 7,045 |
{
'name': 'Control access to Apps',
'version': '1.0.0',
'author': 'IT-Projects LLC, Ivan Yelizariev',
'category': 'Tools',
'website': 'https://twitter.com/yelizariev',
'price': 10.00,
'currency': 'EUR',
'depends': [
'access_restricted'
],
'data': [
'security/access_apps_security.xml',
'security/ir.model.access.csv',
],
'installable': True
}
| ufaks/addons-yelizariev | access_apps/__openerp__.py | Python | lgpl-3.0 | 415 |
"""
Classes for interacting with the tor control socket.
Controllers are a wrapper around a ControlSocket, retaining many of its methods
(connect, close, is_alive, etc) in addition to providing its own for
interacting at a higher level.
**Module Overview:**
::
from_port - Provides a Controller based on a port connection.
from_socket_file - Provides a Controller based on a socket file connection.
Controller - General controller class intended for direct use.
+- get_info - issues a GETINFO query
BaseController - Base controller class asynchronous message handling.
|- msg - communicates with the tor process
|- is_alive - reports if our connection to tor is open or closed
|- connect - connects or reconnects to tor
|- close - shuts down our connection to the tor process
|- get_socket - provides the socket used for control communication
|- add_status_listener - notifies a callback of changes in our status
|- remove_status_listener - prevents further notification of status changes
+- __enter__ / __exit__ - manages socket connection
"""
import time
import Queue
import threading
import stem.response
import stem.socket
import stem.util.log as log
# state changes a control socket can have
# INIT - new control connection
# RESET - received a reset/sighup signal
# CLOSED - control connection closed
State = stem.util.enum.Enum("INIT", "RESET", "CLOSED")
# Constant to indicate an undefined argument default. Usually we'd use None for
# this, but users will commonly provide None as the argument so need something
# else fairly unique...
UNDEFINED = "<Undefined_ >"
class BaseController:
"""
Controller for the tor process. This is a minimal base class for other
controllers, providing basic process communication and event listing. Don't
use this directly - subclasses like the Controller provide higher level
functionality.
Do not continue to directly interacte with the ControlSocket we're
constructed from - use our wrapper methods instead.
"""
def __init__(self, control_socket):
self._socket = control_socket
self._msg_lock = threading.RLock()
self._status_listeners = [] # tuples of the form (callback, spawn_thread)
self._status_listeners_lock = threading.RLock()
# queues where incoming messages are directed
self._reply_queue = Queue.Queue()
self._event_queue = Queue.Queue()
# thread to continually pull from the control socket
self._reader_thread = None
# thread to pull from the _event_queue and call handle_event
self._event_notice = threading.Event()
self._event_thread = None
# saves our socket's prior _connect() and _close() methods so they can be
# called along with ours
self._socket_connect = self._socket._connect
self._socket_close = self._socket._close
self._socket._connect = self._connect
self._socket._close = self._close
if self._socket.is_alive():
self._launch_threads()
def msg(self, message):
"""
Sends a message to our control socket and provides back its reply.
:param str message: message to be formatted and sent to tor
:returns: :class:`stem.response.ControlMessage` with the response
:raises:
* :class:`stem.socket.ProtocolError` the content from the socket is malformed
* :class:`stem.socket.SocketError` if a problem arises in using the socket
* :class:`stem.socket.SocketClosed` if the socket is shut down
"""
with self._msg_lock:
# If our _reply_queue isn't empty then one of a few things happened...
#
# - Our connection was closed and probably re-restablished. This was
# in reply to pulling for an asynchronous event and getting this is
# expected - ignore it.
#
# - Pulling for asynchronous events produced an error. If this was a
# ProtocolError then it's a tor bug, and if a non-closure SocketError
# then it was probably a socket glitch. Deserves an INFO level log
# message.
#
# - This is a leftover response for a msg() call. We can't tell who an
# exception was airmarked for, so we only know that this was the case
# if it's a ControlMessage. This should not be possable and indicates
# a stem bug. This deserves a NOTICE level log message since it
# indicates that one of our callers didn't get their reply.
while not self._reply_queue.empty():
try:
response = self._reply_queue.get_nowait()
if isinstance(response, stem.socket.SocketClosed):
pass # this is fine
elif isinstance(response, stem.socket.ProtocolError):
log.info("Tor provided a malformed message (%s)" % response)
elif isinstance(response, stem.socket.ControllerError):
log.info("Socket experienced a problem (%s)" % response)
elif isinstance(response, stem.response.ControlMessage):
log.notice("BUG: the msg() function failed to deliver a response: %s" % response)
except Queue.Empty:
# the empty() method is documented to not be fully reliable so this
# isn't entirely surprising
break
try:
self._socket.send(message)
response = self._reply_queue.get()
# If the message we received back had an exception then re-raise it to the
# caller. Otherwise return the response.
if isinstance(response, stem.socket.ControllerError):
raise response
else:
return response
except stem.socket.SocketClosed, exc:
# If the recv() thread caused the SocketClosed then we could still be
# in the process of closing. Calling close() here so that we can
# provide an assurance to the caller that when we raise a SocketClosed
# exception we are shut down afterward for realz.
self.close()
raise exc
def is_alive(self):
"""
Checks if our socket is currently connected. This is a passthrough for our
socket's is_alive() method.
:returns: bool that's True if we're shut down and False otherwise
"""
return self._socket.is_alive()
def connect(self):
"""
Reconnects our control socket. This is a passthrough for our socket's
connect() method.
:raises: :class:`stem.socket.SocketError` if unable to make a socket
"""
self._socket.connect()
def close(self):
"""
Closes our socket connection. This is a passthrough for our socket's
:func:`stem.socket.ControlSocket.close` method.
"""
self._socket.close()
def get_socket(self):
"""
Provides the socket used to speak with the tor process. Communicating with
the socket directly isn't advised since it may confuse the controller.
:returns: :class:`stem.socket.ControlSocket` we're communicating with
"""
return self._socket
def add_status_listener(self, callback, spawn = True):
"""
Notifies a given function when the state of our socket changes. Functions
are expected to be of the form...
::
my_function(controller, state, timestamp)
The state is a value from stem.socket.State, functions **must** allow for
new values in this field. The timestamp is a float for the unix time when
the change occured.
This class only provides ``State.INIT`` and ``State.CLOSED`` notifications.
Subclasses may provide others.
If spawn is True then the callback is notified via a new daemon thread. If
false then the notice is under our locks, within the thread where the
change occured. In general this isn't advised, especially if your callback
could block for a while.
:param function callback: function to be notified when our state changes
:param bool spawn: calls function via a new thread if True, otherwise it's part of the connect/close method call
"""
with self._status_listeners_lock:
self._status_listeners.append((callback, spawn))
def remove_status_listener(self, callback):
"""
Stops listener from being notified of further events.
:param function callback: function to be removed from our listeners
:returns: bool that's True if we removed one or more occurances of the callback, False otherwise
"""
with self._status_listeners_lock:
new_listeners, is_changed = [], False
for listener, spawn in self._status_listeners:
if listener != callback:
new_listeners.append((listener, spawn))
else: is_changed = True
self._status_listeners = new_listeners
return is_changed
def __enter__(self):
return self
def __exit__(self, exit_type, value, traceback):
self.close()
def _handle_event(self, event_message):
"""
Callback to be overwritten by subclasses for event listening. This is
notified whenever we receive an event from the control socket.
:param stem.response.ControlMessage event_message: message received from the control socket
"""
pass
def _connect(self):
self._launch_threads()
self._notify_status_listeners(State.INIT, True)
self._socket_connect()
def _close(self):
# Our is_alive() state is now false. Our reader thread should already be
# awake from recv() raising a closure exception. Wake up the event thread
# too so it can end.
self._event_notice.set()
# joins on our threads if it's safe to do so
for t in (self._reader_thread, self._event_thread):
if t and t.is_alive() and threading.current_thread() != t:
t.join()
self._notify_status_listeners(State.CLOSED, False)
self._socket_close()
def _notify_status_listeners(self, state, expect_alive = None):
"""
Informs our status listeners that a state change occured.
States imply that our socket is either alive or not, which may not hold
true when multiple events occure in quick succession. For instance, a
sighup could cause two events (``State.RESET`` for the sighup and
``State.CLOSE`` if it causes tor to crash). However, there's no guarentee
of the order in which they occure, and it would be bad if listeners got the
``State.RESET`` last, implying that we were alive.
If set, the expect_alive flag will discard our event if it conflicts with
our current :func:`stem.control.BaseController.is_alive` state.
:param stem.socket.State state: state change that has occured
:param bool expect_alive: discard event if it conflicts with our :func:`stem.control.BaseController.is_alive` state
"""
# Any changes to our is_alive() state happen under the send lock, so we
# need to have it to ensure it doesn't change beneath us.
with self._socket._get_send_lock(), self._status_listeners_lock:
change_timestamp = time.time()
if expect_alive != None and expect_alive != self.is_alive():
return
for listener, spawn in self._status_listeners:
if spawn:
name = "%s notification" % state
args = (self, state, change_timestamp)
notice_thread = threading.Thread(target = listener, args = args, name = name)
notice_thread.setDaemon(True)
notice_thread.start()
else:
listener(self, state, change_timestamp)
def _launch_threads(self):
"""
Initializes daemon threads. Threads can't be reused so we need to recreate
them if we're restarted.
"""
# In theory concurrent calls could result in multple start() calls on a
# single thread, which would cause an unexpeceted exception. Best be safe.
with self._socket._get_send_lock():
if not self._reader_thread or not self._reader_thread.is_alive():
self._reader_thread = threading.Thread(target = self._reader_loop, name = "Tor Listener")
self._reader_thread.setDaemon(True)
self._reader_thread.start()
if not self._event_thread or not self._event_thread.is_alive():
self._event_thread = threading.Thread(target = self._event_loop, name = "Event Notifier")
self._event_thread.setDaemon(True)
self._event_thread.start()
def _reader_loop(self):
"""
Continually pulls from the control socket, directing the messages into
queues based on their type. Controller messages come in two varieties...
* Responses to messages we've sent (GETINFO, SETCONF, etc).
* Asynchronous events, identified by a status code of 650.
"""
while self.is_alive():
try:
control_message = self._socket.recv()
if control_message.content()[-1][0] == "650":
# asynchronous message, adds to the event queue and wakes up its handler
self._event_queue.put(control_message)
self._event_notice.set()
else:
# response to a msg() call
self._reply_queue.put(control_message)
except stem.socket.ControllerError, exc:
# Assume that all exceptions belong to the reader. This isn't always
# true, but the msg() call can do a better job of sorting it out.
#
# Be aware that the msg() method relies on this to unblock callers.
self._reply_queue.put(exc)
def _event_loop(self):
"""
Continually pulls messages from the _event_queue and sends them to our
handle_event callback. This is done via its own thread so subclasses with a
lengthy handle_event implementation don't block further reading from the
socket.
"""
while True:
try:
event_message = self._event_queue.get_nowait()
self._handle_event(event_message)
except Queue.Empty:
if not self.is_alive(): break
self._event_notice.wait()
self._event_notice.clear()
class Controller(BaseController):
"""
Communicates with a control socket. This is built on top of the
BaseController and provides a more user friendly API for library users.
"""
def from_port(control_addr = "127.0.0.1", control_port = 9051):
"""
Constructs a ControlPort based Controller.
:param str control_addr: ip address of the controller
:param int control_port: port number of the controller
:returns: :class:`stem.control.Controller` attached to the given port
:raises: :class:`stem.socket.SocketError` if we're unable to establish a connection
"""
control_port = stem.socket.ControlPort(control_addr, control_port)
return Controller(control_port)
def from_socket_file(socket_path = "/var/run/tor/control"):
"""
Constructs a ControlSocketFile based Controller.
:param str socket_path: path where the control socket is located
:returns: :class:`stem.control.Controller` attached to the given socket file
:raises: :class:`stem.socket.SocketError` if we're unable to establish a connection
"""
control_socket = stem.socket.ControlSocketFile(socket_path)
return Controller(control_socket)
from_port = staticmethod(from_port)
from_socket_file = staticmethod(from_socket_file)
def get_info(self, param, default = UNDEFINED):
"""
Queries the control socket for the given GETINFO option. If provided a
default then that's returned if the GETINFO option is undefined or the
call fails for any reason (error response, control port closed, initiated,
etc).
:param str,list param: GETINFO option or options to be queried
:param object default: response if the query fails
:returns:
Response depends upon how we were called as follows...
* str with the response if our param was a str
* dict with the param => response mapping if our param was a list
* default if one was provided and our call failed
:raises: :class:`stem.socket.ControllerError` if the call fails, and we weren't provided a default response
"""
# TODO: add caching?
# TODO: special geoip handling?
# TODO: add logging, including call runtime
if isinstance(param, str):
is_multiple = False
param = [param]
else:
is_multiple = True
try:
response = self.msg("GETINFO %s" % " ".join(param))
stem.response.convert("GETINFO", response)
# error if we got back different parameters than we requested
requested_params = set(param)
reply_params = set(response.entries.keys())
if requested_params != reply_params:
requested_label = ", ".join(requested_params)
reply_label = ", ".join(reply_params)
raise stem.socket.ProtocolError("GETINFO reply doesn't match the parameters that we requested. Queried '%s' but got '%s'." % (requested_label, reply_label))
if is_multiple:
return response.entries
else:
return response.entries[param[0]]
except stem.socket.ControllerError, exc:
if default == UNDEFINED: raise exc
else: return default
| meganchang/Stem | stem/control.py | Python | lgpl-3.0 | 17,273 |
# encoding: utf-8
"""
Utilities for working with strings and text.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import __main__
import os
import re
import shutil
import sys
import textwrap
from string import Formatter
from IPython.external.path import path
from IPython.testing.skipdoctest import skip_doctest_py3, skip_doctest
from IPython.utils import py3compat
from IPython.utils.io import nlprint
from IPython.utils.data import flatten
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
def unquote_ends(istr):
"""Remove a single pair of quotes from the endpoints of a string."""
if not istr:
return istr
if (istr[0]=="'" and istr[-1]=="'") or \
(istr[0]=='"' and istr[-1]=='"'):
return istr[1:-1]
else:
return istr
class LSString(str):
"""String derivative with a special access attributes.
These are normal strings, but with the special attributes:
.l (or .list) : value as list (split on newlines).
.n (or .nlstr): original value (the string itself).
.s (or .spstr): value as whitespace-separated string.
.p (or .paths): list of path objects
Any values which require transformations are computed only once and
cached.
Such strings are very useful to efficiently interact with the shell, which
typically only understands whitespace-separated options for commands."""
def get_list(self):
try:
return self.__list
except AttributeError:
self.__list = self.split('\n')
return self.__list
l = list = property(get_list)
def get_spstr(self):
try:
return self.__spstr
except AttributeError:
self.__spstr = self.replace('\n',' ')
return self.__spstr
s = spstr = property(get_spstr)
def get_nlstr(self):
return self
n = nlstr = property(get_nlstr)
def get_paths(self):
try:
return self.__paths
except AttributeError:
self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
return self.__paths
p = paths = property(get_paths)
# FIXME: We need to reimplement type specific displayhook and then add this
# back as a custom printer. This should also be moved outside utils into the
# core.
# def print_lsstring(arg):
# """ Prettier (non-repr-like) and more informative printer for LSString """
# print "LSString (.p, .n, .l, .s available). Value:"
# print arg
#
#
# print_lsstring = result_display.when_type(LSString)(print_lsstring)
class SList(list):
"""List derivative with a special access attributes.
These are normal lists, but with the special attributes:
.l (or .list) : value as list (the list itself).
.n (or .nlstr): value as a string, joined on newlines.
.s (or .spstr): value as a string, joined on spaces.
.p (or .paths): list of path objects
Any values which require transformations are computed only once and
cached."""
def get_list(self):
return self
l = list = property(get_list)
def get_spstr(self):
try:
return self.__spstr
except AttributeError:
self.__spstr = ' '.join(self)
return self.__spstr
s = spstr = property(get_spstr)
def get_nlstr(self):
try:
return self.__nlstr
except AttributeError:
self.__nlstr = '\n'.join(self)
return self.__nlstr
n = nlstr = property(get_nlstr)
def get_paths(self):
try:
return self.__paths
except AttributeError:
self.__paths = [path(p) for p in self if os.path.exists(p)]
return self.__paths
p = paths = property(get_paths)
def grep(self, pattern, prune = False, field = None):
""" Return all strings matching 'pattern' (a regex or callable)
This is case-insensitive. If prune is true, return all items
NOT matching the pattern.
If field is specified, the match must occur in the specified
whitespace-separated field.
Examples::
a.grep( lambda x: x.startswith('C') )
a.grep('Cha.*log', prune=1)
a.grep('chm', field=-1)
"""
def match_target(s):
if field is None:
return s
parts = s.split()
try:
tgt = parts[field]
return tgt
except IndexError:
return ""
if isinstance(pattern, basestring):
pred = lambda x : re.search(pattern, x, re.IGNORECASE)
else:
pred = pattern
if not prune:
return SList([el for el in self if pred(match_target(el))])
else:
return SList([el for el in self if not pred(match_target(el))])
def fields(self, *fields):
""" Collect whitespace-separated fields from string list
Allows quick awk-like usage of string lists.
Example data (in var a, created by 'a = !ls -l')::
-rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
(note the joining by space).
a.fields(-1) is ['ChangeLog', 'IPython']
IndexErrors are ignored.
Without args, fields() just split()'s the strings.
"""
if len(fields) == 0:
return [el.split() for el in self]
res = SList()
for el in [f.split() for f in self]:
lineparts = []
for fd in fields:
try:
lineparts.append(el[fd])
except IndexError:
pass
if lineparts:
res.append(" ".join(lineparts))
return res
def sort(self,field= None, nums = False):
""" sort by specified fields (see fields())
Example::
a.sort(1, nums = True)
Sorts a by second field, in numerical order (so that 21 > 3)
"""
#decorate, sort, undecorate
if field is not None:
dsu = [[SList([line]).fields(field), line] for line in self]
else:
dsu = [[line, line] for line in self]
if nums:
for i in range(len(dsu)):
numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
try:
n = int(numstr)
except ValueError:
n = 0;
dsu[i][0] = n
dsu.sort()
return SList([t[1] for t in dsu])
# FIXME: We need to reimplement type specific displayhook and then add this
# back as a custom printer. This should also be moved outside utils into the
# core.
# def print_slist(arg):
# """ Prettier (non-repr-like) and more informative printer for SList """
# print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
# if hasattr(arg, 'hideonce') and arg.hideonce:
# arg.hideonce = False
# return
#
# nlprint(arg)
#
# print_slist = result_display.when_type(SList)(print_slist)
def esc_quotes(strng):
"""Return the input string with single and double quotes escaped out"""
return strng.replace('"','\\"').replace("'","\\'")
def qw(words,flat=0,sep=None,maxsplit=-1):
"""Similar to Perl's qw() operator, but with some more options.
qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
words can also be a list itself, and with flat=1, the output will be
recursively flattened.
Examples:
>>> qw('1 2')
['1', '2']
>>> qw(['a b','1 2',['m n','p q']])
[['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
>>> qw(['a b','1 2',['m n','p q']],flat=1)
['a', 'b', '1', '2', 'm', 'n', 'p', 'q']
"""
if isinstance(words, basestring):
return [word.strip() for word in words.split(sep,maxsplit)
if word and not word.isspace() ]
if flat:
return flatten(map(qw,words,[1]*len(words)))
return map(qw,words)
def qwflat(words,sep=None,maxsplit=-1):
"""Calls qw(words) in flat mode. It's just a convenient shorthand."""
return qw(words,1,sep,maxsplit)
def qw_lol(indata):
"""qw_lol('a b') -> [['a','b']],
otherwise it's just a call to qw().
We need this to make sure the modules_some keys *always* end up as a
list of lists."""
if isinstance(indata, basestring):
return [qw(indata)]
else:
return qw(indata)
def grep(pat,list,case=1):
"""Simple minded grep-like function.
grep(pat,list) returns occurrences of pat in list, None on failure.
It only does simple string matching, with no support for regexps. Use the
option case=0 for case-insensitive matching."""
# This is pretty crude. At least it should implement copying only references
# to the original data in case it's big. Now it copies the data for output.
out=[]
if case:
for term in list:
if term.find(pat)>-1: out.append(term)
else:
lpat=pat.lower()
for term in list:
if term.lower().find(lpat)>-1: out.append(term)
if len(out): return out
else: return None
def dgrep(pat,*opts):
"""Return grep() on dir()+dir(__builtins__).
A very common use of grep() when working interactively."""
return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
def idgrep(pat):
"""Case-insensitive dgrep()"""
return dgrep(pat,0)
def igrep(pat,list):
"""Synonym for case-insensitive grep."""
return grep(pat,list,case=0)
def indent(instr,nspaces=4, ntabs=0, flatten=False):
"""Indent a string a given number of spaces or tabstops.
indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
Parameters
----------
instr : basestring
The string to be indented.
nspaces : int (default: 4)
The number of spaces to be indented.
ntabs : int (default: 0)
The number of tabs to be indented.
flatten : bool (default: False)
Whether to scrub existing indentation. If True, all lines will be
aligned to the same indentation. If False, existing indentation will
be strictly increased.
Returns
-------
str|unicode : string indented by ntabs and nspaces.
"""
if instr is None:
return
ind = '\t'*ntabs+' '*nspaces
if flatten:
pat = re.compile(r'^\s*', re.MULTILINE)
else:
pat = re.compile(r'^', re.MULTILINE)
outstr = re.sub(pat, ind, instr)
if outstr.endswith(os.linesep+ind):
return outstr[:-len(ind)]
else:
return outstr
def native_line_ends(filename,backup=1):
"""Convert (in-place) a file to line-ends native to the current OS.
If the optional backup argument is given as false, no backup of the
original file is left. """
backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
bak_filename = filename + backup_suffixes[os.name]
original = open(filename).read()
shutil.copy2(filename,bak_filename)
try:
new = open(filename,'wb')
new.write(os.linesep.join(original.splitlines()))
new.write(os.linesep) # ALWAYS put an eol at the end of the file
new.close()
except:
os.rename(bak_filename,filename)
if not backup:
try:
os.remove(bak_filename)
except:
pass
def list_strings(arg):
"""Always return a list of strings, given a string or list of strings
as input.
:Examples:
In [7]: list_strings('A single string')
Out[7]: ['A single string']
In [8]: list_strings(['A single string in a list'])
Out[8]: ['A single string in a list']
In [9]: list_strings(['A','list','of','strings'])
Out[9]: ['A', 'list', 'of', 'strings']
"""
if isinstance(arg,basestring): return [arg]
else: return arg
def marquee(txt='',width=78,mark='*'):
"""Return the input string centered in a 'marquee'.
:Examples:
In [16]: marquee('A test',40)
Out[16]: '**************** A test ****************'
In [17]: marquee('A test',40,'-')
Out[17]: '---------------- A test ----------------'
In [18]: marquee('A test',40,' ')
Out[18]: ' A test '
"""
if not txt:
return (mark*width)[:width]
nmark = (width-len(txt)-2)//len(mark)//2
if nmark < 0: nmark =0
marks = mark*nmark
return '%s %s %s' % (marks,txt,marks)
ini_spaces_re = re.compile(r'^(\s+)')
def num_ini_spaces(strng):
"""Return the number of initial spaces in a string"""
ini_spaces = ini_spaces_re.match(strng)
if ini_spaces:
return ini_spaces.end()
else:
return 0
def format_screen(strng):
"""Format a string for screen printing.
This removes some latex-type format codes."""
# Paragraph continue
par_re = re.compile(r'\\$',re.MULTILINE)
strng = par_re.sub('',strng)
return strng
def dedent(text):
"""Equivalent of textwrap.dedent that ignores unindented first line.
This means it will still dedent strings like:
'''foo
is a bar
'''
For use in wrap_paragraphs.
"""
if text.startswith('\n'):
# text starts with blank line, don't ignore the first line
return textwrap.dedent(text)
# split first line
splits = text.split('\n',1)
if len(splits) == 1:
# only one line
return textwrap.dedent(text)
first, rest = splits
# dedent everything but the first line
rest = textwrap.dedent(rest)
return '\n'.join([first, rest])
def wrap_paragraphs(text, ncols=80):
"""Wrap multiple paragraphs to fit a specified width.
This is equivalent to textwrap.wrap, but with support for multiple
paragraphs, as separated by empty lines.
Returns
-------
list of complete paragraphs, wrapped to fill `ncols` columns.
"""
paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
text = dedent(text).strip()
paragraphs = paragraph_re.split(text)[::2] # every other entry is space
out_ps = []
indent_re = re.compile(r'\n\s+', re.MULTILINE)
for p in paragraphs:
# presume indentation that survives dedent is meaningful formatting,
# so don't fill unless text is flush.
if indent_re.search(p) is None:
# wrap paragraph
p = textwrap.fill(p, ncols)
out_ps.append(p)
return out_ps
def long_substr(data):
"""Return the longest common substring in a list of strings.
Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python
"""
substr = ''
if len(data) > 1 and len(data[0]) > 0:
for i in range(len(data[0])):
for j in range(len(data[0])-i+1):
if j > len(substr) and all(data[0][i:i+j] in x for x in data):
substr = data[0][i:i+j]
elif len(data) == 1:
substr = data[0]
return substr
def strip_email_quotes(text):
"""Strip leading email quotation characters ('>').
Removes any combination of leading '>' interspersed with whitespace that
appears *identically* in all lines of the input text.
Parameters
----------
text : str
Examples
--------
Simple uses::
In [2]: strip_email_quotes('> > text')
Out[2]: 'text'
In [3]: strip_email_quotes('> > text\\n> > more')
Out[3]: 'text\\nmore'
Note how only the common prefix that appears in all lines is stripped::
In [4]: strip_email_quotes('> > text\\n> > more\\n> more...')
Out[4]: '> text\\n> more\\nmore...'
So if any line has no quote marks ('>') , then none are stripped from any
of them ::
In [5]: strip_email_quotes('> > text\\n> > more\\nlast different')
Out[5]: '> > text\\n> > more\\nlast different'
"""
lines = text.splitlines()
matches = set()
for line in lines:
prefix = re.match(r'^(\s*>[ >]*)', line)
if prefix:
matches.add(prefix.group(1))
else:
break
else:
prefix = long_substr(list(matches))
if prefix:
strip = len(prefix)
text = '\n'.join([ ln[strip:] for ln in lines])
return text
class EvalFormatter(Formatter):
"""A String Formatter that allows evaluation of simple expressions.
Note that this version interprets a : as specifying a format string (as per
standard string formatting), so if slicing is required, you must explicitly
create a slice.
This is to be used in templating cases, such as the parallel batch
script templates, where simple arithmetic on arguments is useful.
Examples
--------
In [1]: f = EvalFormatter()
In [2]: f.format('{n//4}', n=8)
Out [2]: '2'
In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
Out [3]: 'll'
"""
def get_field(self, name, args, kwargs):
v = eval(name, kwargs)
return v, name
@skip_doctest_py3
class FullEvalFormatter(Formatter):
"""A String Formatter that allows evaluation of simple expressions.
Any time a format key is not found in the kwargs,
it will be tried as an expression in the kwargs namespace.
Note that this version allows slicing using [1:2], so you cannot specify
a format string. Use :class:`EvalFormatter` to permit format strings.
Examples
--------
In [1]: f = FullEvalFormatter()
In [2]: f.format('{n//4}', n=8)
Out[2]: u'2'
In [3]: f.format('{list(range(5))[2:4]}')
Out[3]: u'[2, 3]'
In [4]: f.format('{3*2}')
Out[4]: u'6'
"""
# copied from Formatter._vformat with minor changes to allow eval
# and replace the format_spec code with slicing
def _vformat(self, format_string, args, kwargs, used_args, recursion_depth):
if recursion_depth < 0:
raise ValueError('Max string recursion exceeded')
result = []
for literal_text, field_name, format_spec, conversion in \
self.parse(format_string):
# output the literal text
if literal_text:
result.append(literal_text)
# if there's a field, output it
if field_name is not None:
# this is some markup, find the object and do
# the formatting
if format_spec:
# override format spec, to allow slicing:
field_name = ':'.join([field_name, format_spec])
# eval the contents of the field for the object
# to be formatted
obj = eval(field_name, kwargs)
# do any conversion on the resulting object
obj = self.convert_field(obj, conversion)
# format the object and append to the result
result.append(self.format_field(obj, ''))
return u''.join(py3compat.cast_unicode(s) for s in result)
@skip_doctest_py3
class DollarFormatter(FullEvalFormatter):
"""Formatter allowing Itpl style $foo replacement, for names and attribute
access only. Standard {foo} replacement also works, and allows full
evaluation of its arguments.
Examples
--------
In [1]: f = DollarFormatter()
In [2]: f.format('{n//4}', n=8)
Out[2]: u'2'
In [3]: f.format('23 * 76 is $result', result=23*76)
Out[3]: u'23 * 76 is 1748'
In [4]: f.format('$a or {b}', a=1, b=2)
Out[4]: u'1 or 2'
"""
_dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
def parse(self, fmt_string):
for literal_txt, field_name, format_spec, conversion \
in Formatter.parse(self, fmt_string):
# Find $foo patterns in the literal text.
continue_from = 0
txt = ""
for m in self._dollar_pattern.finditer(literal_txt):
new_txt, new_field = m.group(1,2)
# $$foo --> $foo
if new_field.startswith("$"):
txt += new_txt + new_field
else:
yield (txt + new_txt, new_field, "", None)
txt = ""
continue_from = m.end()
# Re-yield the {foo} style pattern
yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
#-----------------------------------------------------------------------------
# Utils to columnize a list of string
#-----------------------------------------------------------------------------
def _chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in xrange(0, len(l), n):
yield l[i:i+n]
def _find_optimal(rlist , separator_size=2 , displaywidth=80):
"""Calculate optimal info to columnize a list of string"""
for nrow in range(1, len(rlist)+1) :
chk = map(max,_chunks(rlist, nrow))
sumlength = sum(chk)
ncols = len(chk)
if sumlength+separator_size*(ncols-1) <= displaywidth :
break;
return {'columns_numbers' : ncols,
'optimal_separator_width':(displaywidth - sumlength)/(ncols-1) if (ncols -1) else 0,
'rows_numbers' : nrow,
'columns_width' : chk
}
def _get_or_default(mylist, i, default=None):
"""return list item number, or default if don't exist"""
if i >= len(mylist):
return default
else :
return mylist[i]
@skip_doctest
def compute_item_matrix(items, empty=None, *args, **kwargs) :
"""Returns a nested list, and info to columnize items
Parameters :
------------
items :
list of strings to columize
empty : (default None)
default value to fill list if needed
separator_size : int (default=2)
How much caracters will be used as a separation between each columns.
displaywidth : int (default=80)
The width of the area onto wich the columns should enter
Returns :
---------
Returns a tuple of (strings_matrix, dict_info)
strings_matrix :
nested list of string, the outer most list contains as many list as
rows, the innermost lists have each as many element as colums. If the
total number of elements in `items` does not equal the product of
rows*columns, the last element of some lists are filled with `None`.
dict_info :
some info to make columnize easier:
columns_numbers : number of columns
rows_numbers : number of rows
columns_width : list of with of each columns
optimal_separator_width : best separator width between columns
Exemple :
---------
In [1]: l = ['aaa','b','cc','d','eeeee','f','g','h','i','j','k','l']
...: compute_item_matrix(l,displaywidth=12)
Out[1]:
([['aaa', 'f', 'k'],
['b', 'g', 'l'],
['cc', 'h', None],
['d', 'i', None],
['eeeee', 'j', None]],
{'columns_numbers': 3,
'columns_width': [5, 1, 1],
'optimal_separator_width': 2,
'rows_numbers': 5})
"""
info = _find_optimal(map(len, items), *args, **kwargs)
nrow, ncol = info['rows_numbers'], info['columns_numbers']
return ([[ _get_or_default(items, c*nrow+i, default=empty) for c in range(ncol) ] for i in range(nrow) ], info)
def columnize(items, separator=' ', displaywidth=80):
""" Transform a list of strings into a single string with columns.
Parameters
----------
items : sequence of strings
The strings to process.
separator : str, optional [default is two spaces]
The string that separates columns.
displaywidth : int, optional [default is 80]
Width of the display in number of characters.
Returns
-------
The formatted string.
"""
if not items :
return '\n'
matrix, info = compute_item_matrix(items, separator_size=len(separator), displaywidth=displaywidth)
fmatrix = [filter(None, x) for x in matrix]
sjoin = lambda x : separator.join([ y.ljust(w, ' ') for y, w in zip(x, info['columns_width'])])
return '\n'.join(map(sjoin, fmatrix))+'\n'
| cloud9ers/gurumate | environment/lib/python2.7/site-packages/IPython/utils/text.py | Python | lgpl-3.0 | 25,044 |
from __future__ import division, absolute_import
from __future__ import print_function, unicode_literals
import numpy as np
import theano
import theano.tensor as T
import treeano
import treeano.nodes as tn
fX = theano.config.floatX
# ################################## config ##################################
N_TRAIN = 5000
LAG = 20
LENGTH = 50
HIDDEN_STATE_SIZE = 10
# ############################### prepare data ###############################
def binary_toy_data(lag=1, length=20):
inputs = np.random.randint(0, 2, length).astype(fX)
outputs = np.array(lag * [0] + list(inputs), dtype=fX)[:length]
return inputs, outputs
# ############################## prepare model ##############################
model = tn.HyperparameterNode(
"model",
tn.SequentialNode(
"seq",
[tn.InputNode("x", shape=(None, 1)),
tn.recurrent.SimpleRecurrentNode(
"srn",
tn.TanhNode("nonlin"),
batch_size=None,
num_units=HIDDEN_STATE_SIZE),
tn.scan.ScanNode(
"scan",
tn.DenseNode("fc", num_units=1)),
tn.SigmoidNode("pred"),
]),
inits=[treeano.inits.NormalWeightInit(0.01)],
batch_axis=None,
scan_axis=0
)
with_updates = tn.HyperparameterNode(
"with_updates",
tn.SGDNode(
"adam",
{"subtree": model,
"cost": tn.TotalCostNode("cost", {
"pred": tn.ReferenceNode("pred_ref", reference="model"),
"target": tn.InputNode("y", shape=(None, 1))},
)}),
learning_rate=0.1,
cost_function=treeano.utils.squared_error,
)
network = with_updates.network()
train_fn = network.function(["x", "y"], ["cost"], include_updates=True)
valid_fn = network.function(["x"], ["model"])
# ################################# training #################################
print("Starting training...")
import time
st = time.time()
for i in range(N_TRAIN):
inputs, outputs = binary_toy_data(lag=LAG, length=LENGTH)
loss = train_fn(inputs.reshape(-1, 1), outputs.reshape(-1, 1))[0]
if (i % (N_TRAIN // 100)) == 0:
print(loss)
print("total_time: %s" % (time.time() - st))
inputs, outputs = binary_toy_data(lag=LAG, length=LENGTH)
pred = valid_fn(inputs.reshape(-1, 1))[0].flatten()
print(np.round(pred) == outputs)
| diogo149/treeano | examples/simple_rnn_comparison/with_treeano.py | Python | apache-2.0 | 2,330 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is subject to the terms and conditions defined in
# file 'LICENSE.md', which is part of this source code package.
#
from kubernetes_py.K8sExceptions import NotFoundException
from kubernetes_py.K8sObject import K8sObject
from kubernetes_py.K8sPod import K8sPod
from kubernetes_py.models.v1beta1.ReplicaSet import ReplicaSet
class K8sReplicaSet(K8sObject):
"""
http://kubernetes.io/docs/api-reference/extensions/v1beta1/definitions/#_v1beta1_replicaset
"""
REVISION_ANNOTATION = "deployment.kubernetes.io/revision"
REVISION_HISTORY_ANNOTATION = "deployment.kubernetes.io/revision-history"
def __init__(self, config=None, name=None):
super(K8sReplicaSet, self).__init__(config=config, obj_type="ReplicaSet", name=name)
# ------------------------------------------------------------------------------------- override
def get(self):
self.model = ReplicaSet(self.get_model())
return self
def list(self, pattern=None, reverse=True, labels=None):
ls = super(K8sReplicaSet, self).list(labels=labels)
rsets = list(map(lambda x: ReplicaSet(x), ls))
if pattern is not None:
rsets = list(filter(lambda x: pattern in x.name, rsets))
k8s = []
for x in rsets:
j = K8sReplicaSet(config=self.config, name=x.name).from_model(m=x)
k8s.append(j)
k8s.sort(key=lambda x: x.creation_timestamp, reverse=reverse)
return k8s
def delete(self, cascade=False):
super(K8sReplicaSet, self).delete(cascade)
if cascade:
pods = K8sPod(config=self.config, name="yo").list(pattern=self.name)
for pod in pods:
try:
pod.delete(cascade)
except NotFoundException:
pass
return self
# ------------------------------------------------------------------------------------- revision
@property
def revision(self):
if self.REVISION_ANNOTATION in self.model.metadata.annotations:
return self.model.metadata.annotations[self.REVISION_ANNOTATION]
return None
@revision.setter
def revision(self, r=None):
raise NotImplementedError("K8sReplicaSet: revision is read-only.")
# ------------------------------------------------------------------------------------- revision history
@property
def revision_history(self):
if self.REVISION_HISTORY_ANNOTATION in self.model.metadata.annotations:
comma_string = self.model.metadata.annotations[self.REVISION_HISTORY_ANNOTATION]
version_array = comma_string.split(",")
return map(lambda x: int(x), version_array)
return None
@revision_history.setter
def revision_history(self, r=None):
raise NotImplementedError("K8sReplicaSet: revision_history is read-only.")
| mnubo/kubernetes-py | kubernetes_py/K8sReplicaSet.py | Python | apache-2.0 | 2,940 |
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from unittest import TestCase
from resource_management.libraries.functions import get_port_from_url
from resource_management.core.exceptions import Fail
class TestLibraryFunctions(TestCase):
def test_get_port_from_url(self):
self.assertEqual("8080",get_port_from_url("protocol://host:8080"))
self.assertEqual("8080",get_port_from_url("protocol://host:8080/"))
self.assertEqual("8080",get_port_from_url("host:8080"))
self.assertEqual("8080",get_port_from_url("host:8080/"))
self.assertEqual("8080",get_port_from_url("host:8080/dots_in_url8888:"))
self.assertEqual("8080",get_port_from_url("protocol://host:8080/dots_in_url8888:"))
self.assertEqual("8080",get_port_from_url("127.0.0.1:8080"))
self.assertRaises(Fail, get_port_from_url, "http://host/no_port")
self.assertRaises(Fail, get_port_from_url, "127.0.0.1:808080")
| arenadata/ambari | ambari-agent/src/test/python/resource_management/TestLibraryFunctions.py | Python | apache-2.0 | 1,624 |
'''
This module sets up a scheme for validating that arbitrary Python
objects are correctly typed. It is totally decoupled from Django,
composable, easily wrapped, and easily extended.
A validator takes two parameters--var_name and val--and returns an
error if val is not the correct type. The var_name parameter is used
to format error messages. Validators return None when there are no errors.
Example primitive validators are check_string, check_int, and check_bool.
Compound validators are created by check_list and check_dict. Note that
those functions aren't directly called for validation; instead, those
functions are called to return other functions that adhere to the validator
contract. This is similar to how Python decorators are often parameterized.
The contract for check_list and check_dict is that they get passed in other
validators to apply to their items. This allows you to build up validators
for arbitrarily complex validators. See ValidatorTestCase for example usage.
A simple example of composition is this:
check_list(check_string)('my_list', ['a', 'b', 'c']) == None
To extend this concept, it's simply a matter of writing your own validator
for any particular type of object.
'''
from __future__ import absolute_import
import six
def check_string(var_name, val):
if not isinstance(val, six.string_types):
return '%s is not a string' % (var_name,)
return None
def check_int(var_name, val):
if not isinstance(val, int):
return '%s is not an integer' % (var_name,)
return None
def check_bool(var_name, val):
if not isinstance(val, bool):
return '%s is not a boolean' % (var_name,)
return None
def check_none_or(sub_validator):
def f(var_name, val):
if val is None:
return None
else:
return sub_validator(var_name, val)
return f
def check_list(sub_validator, length=None):
def f(var_name, val):
if not isinstance(val, list):
return '%s is not a list' % (var_name,)
if length is not None and length != len(val):
return '%s should have exactly %d items' % (var_name, length)
if sub_validator:
for i, item in enumerate(val):
vname = '%s[%d]' % (var_name, i)
error = sub_validator(vname, item)
if error:
return error
return None
return f
def check_dict(required_keys):
# required_keys is a list of tuples of
# key_name/validator
def f(var_name, val):
if not isinstance(val, dict):
return '%s is not a dict' % (var_name,)
for k, sub_validator in required_keys:
if k not in val:
return '%s key is missing from %s' % (k, var_name)
vname = '%s["%s"]' % (var_name, k)
error = sub_validator(vname, val[k])
if error:
return error
return None
return f
def check_variable_type(allowed_type_funcs):
"""
Use this validator if an argument is of a variable type (e.g. processing
properties that might be strings or booleans).
`allowed_type_funcs`: the check_* validator functions for the possible data
types for this variable.
"""
def enumerated_type_check(var_name, val):
for func in allowed_type_funcs:
if not func(var_name, val):
return None
return '%s is not an allowed_type' % (var_name,)
return enumerated_type_check
def equals(expected_val):
def f(var_name, val):
if val != expected_val:
return '%s != %r (%r is wrong)' % (var_name, expected_val, val)
return None
return f
| dwrpayne/zulip | zerver/lib/validator.py | Python | apache-2.0 | 3,704 |
#!/usr/bin/python
"""
Program for creating HTML plots
"""
import os
import sys
import json
import time
from readevtlog import *
def imaging_iters(logs):
start_time = 40.0
start_msg = "kernel init"
end_msg = "imaging cleanup"
got_start = False
for k in sorted(logs):
tt = logs[k].time
for e in tt :
if e.msg == start_msg:
start = e.t1
got_start = True
if got_start and e.msg == end_msg:
print e.t2-start, ",",
print ""
data_commands = {
"imaging_iters" : imaging_iters,
}
# Get parameters
cmd = sys.argv[1]
nm = sys.argv[2]
# Open input files
logs = read_timelines(nm)
# Write table
data_commands[cmd](logs)
| SKA-ScienceDataProcessor/RC | MS6/visualize/csv_generator.py | Python | apache-2.0 | 744 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from oslo_service import threadgroup
from blazar.db import api as db_api
LOG = logging.getLogger(__name__)
class BaseMonitor(object):
"""Base class for monitoring classes."""
def __init__(self, monitor_plugins):
self.monitor_plugins = monitor_plugins
self.tg = threadgroup.ThreadGroup()
self.healing_timers = []
def start_monitoring(self):
"""Start monitoring."""
self.start_periodic_healing()
def stop_monitoring(self):
"""Stop monitoring."""
self.stop_periodic_healing()
def start_periodic_healing(self):
"""Start periodic healing process."""
for plugin in self.monitor_plugins:
healing_interval_mins = plugin.get_healing_interval()
if healing_interval_mins > 0:
self.healing_timers.append(
self.tg.add_timer(healing_interval_mins * 60,
self.call_monitor_plugin,
None,
plugin.heal))
def stop_periodic_healing(self):
"""Stop periodic healing process."""
for timer in self.healing_timers:
self.tg.timer_done(timer)
def call_monitor_plugin(self, callback, *args, **kwargs):
"""Call a callback and update lease/reservation flags."""
# This method has to handle any exception internally. It shouldn't
# raise an exception because the timer threads in the BaseMonitor class
# terminates its execution once the thread has received any exception.
try:
# The callback() has to return a dictionary of
# {reservation id: flags to update}.
# e.g. {'dummyid': {'missing_resources': True}}
reservation_flags = callback(*args, **kwargs)
if reservation_flags:
self._update_flags(reservation_flags)
except Exception as e:
LOG.exception('Caught an exception while executing a callback. '
'%s', str(e))
def _update_flags(self, reservation_flags):
"""Update lease/reservation flags."""
lease_ids = set([])
for reservation_id, flags in reservation_flags.items():
db_api.reservation_update(reservation_id, flags)
LOG.debug('Reservation %s was updated: %s',
reservation_id, flags)
reservation = db_api.reservation_get(reservation_id)
lease_ids.add(reservation['lease_id'])
for lease_id in lease_ids:
LOG.debug('Lease %s was updated: {"degraded": True}', lease_id)
db_api.lease_update(lease_id, {'degraded': True})
| stackforge/blazar | blazar/monitor/base.py | Python | apache-2.0 | 3,283 |
# ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
For machine generated datasets.
"""
import numpy as np
from neon import NervanaObject
class Task(NervanaObject):
"""
Base class from which ticker tasks inherit.
"""
def fetch_io(self, time_steps):
"""
Generate inputs, outputs numpy tensor
pair of size appropriate for this minibatch
"""
columns = time_steps * self.be.bsz
inputs = np.zeros((self.nin, columns))
outputs = np.zeros((self.nout, columns))
return inputs, outputs
def fill_buffers(self, time_steps, inputs, outputs, in_tensor, out_tensor, mask):
"""
Do some logistical stuff to get our numpy arrays safely to device.
This can almost certainly be cleaned up.
"""
# Put inputs and outputs, which are too small, into properly shaped arrays
columns = time_steps * self.be.bsz
inC = np.zeros((self.nin, self.max_columns))
outC = np.zeros((self.nout, self.max_columns))
inC[:, :columns] = inputs
outC[:, :columns] = outputs
# Copy those arrays to device
in_tensor.set(inC)
out_tensor.set(outC)
# Set a mask over the unused part of the buffer
mask[:, :columns] = 1
mask[:, columns:] = 0
class CopyTask(Task):
"""
The copy task from the Neural Turing Machines paper:
http://arxiv.org/abs/1410.5401
This version of the task is batched.
All sequences in the same mini-batch are the same length,
but every new minibatch has a randomly chosen minibatch length.
When a given minibatch has length < seq_len_max, we mask the outputs
for time steps after time_steps_max.
The generated data is laid out in the same way as other RNN data in neon.
"""
def __init__(self, seq_len_max, vec_size):
"""
Set up the attributes that Ticker needs to see.
Args:
seq_len_max (int): longest allowable sequence length
vec_size (int): width of the bit-vector to be copied (was 8 in paper)
"""
self.seq_len_max = seq_len_max
self.vec_size = vec_size
self.nout = self.vec_size # output has the same dimension as the underlying bit vector
self.nin = self.vec_size + 2 # input has more dims (for the start and stop channels)
self.time_steps_func = lambda l: 2 * l + 2
self.time_steps_max = 2 * self.seq_len_max + 2
self.time_steps_max = self.time_steps_func(self.seq_len_max)
self.max_columns = self.time_steps_max * self.be.bsz
def synthesize(self, in_tensor, out_tensor, mask):
"""
Create a new minibatch of ticker copy task data.
Args:
in_tensor: device buffer holding inputs
out_tensor: device buffer holding outputs
mask: device buffer for the output mask
"""
# All sequences in a minibatch are the same length for convenience
seq_len = np.random.randint(1, self.seq_len_max + 1)
time_steps = self.time_steps_func(seq_len)
# Generate intermediate buffers of the right size
inputs, outputs = super(CopyTask, self).fetch_io(time_steps)
# Set the start bit
inputs[-2, :self.be.bsz] = 1
# Generate the sequence to be copied
seq = np.random.randint(2,
size=(self.vec_size,
seq_len * self.be.bsz))
# Set the stop bit
stop_loc = self.be.bsz * (seq_len + 1)
inputs[-1, stop_loc:stop_loc + self.be.bsz] = 1
# Place the actual sequence to copy in inputs
inputs[:self.vec_size, self.be.bsz:stop_loc] = seq
# Now place that same sequence in a different place in outputs
outputs[:, self.be.bsz * (seq_len + 2):] = seq
# Fill the device minibatch buffers
super(CopyTask, self).fill_buffers(time_steps, inputs, outputs,
in_tensor, out_tensor, mask)
class RepeatCopyTask(Task):
"""
The repeat copy task from the Neural Turing Machines paper:
http://arxiv.org/abs/1410.5401
See comments on CopyTask class for more details.
"""
def __init__(self, seq_len_max, repeat_count_max, vec_size):
"""
Set up the attributes that Ticker needs to see.
Args:
seq_len_max (int): longest allowable sequence length
repeat_count_max (int): max number of repeats
vec_size (int): width of the bit-vector to be copied (was 8 in paper)
"""
self.seq_len_max = seq_len_max
self.repeat_count_max = seq_len_max
self.vec_size = vec_size
self.nout = self.vec_size + 1 # we output the sequence and a stop bit in a stop channel
self.nin = self.vec_size + 2 # input has more dims (for the start and stop channels)
# seq is seen once as input, repeat_count times as output, with a
# start bit, stop bit, and output stop bit
self.time_steps_func = lambda l, r: l * (r + 1) + 3
self.time_steps_max = self.time_steps_func(self.seq_len_max, self.repeat_count_max)
self.max_columns = self.time_steps_max * self.be.bsz
def synthesize(self, in_tensor, out_tensor, mask):
"""
Create a new minibatch of ticker repeat copy task data.
Args:
in_tensor: device buffer holding inputs
out_tensor: device buffer holding outputs
mask: device buffer for the output mask
"""
# All sequences in a minibatch are the same length for convenience
seq_len = np.random.randint(1, self.seq_len_max + 1)
repeat_count = np.random.randint(1, self.repeat_count_max + 1)
time_steps = self.time_steps_func(seq_len, repeat_count)
# Get the minibatch specific numpy buffers
inputs, outputs = super(RepeatCopyTask, self).fetch_io(time_steps)
# Set the start bit
inputs[-2, :self.be.bsz] = 1
# Generate the sequence to be copied
seq = np.random.randint(2,
size=(self.vec_size,
seq_len * self.be.bsz))
# Set the repeat count
# TODO: should we normalize repeat count?
stop_loc = self.be.bsz * (seq_len + 1)
inputs[-1, stop_loc:stop_loc + self.be.bsz] = repeat_count
# Place the actual sequence to copy in inputs
inputs[:self.vec_size, self.be.bsz:stop_loc] = seq
# Now place that same sequence repeat_copy times in outputs
for i in range(repeat_count):
start = self.be.bsz * ((i + 1) * seq_len + 2)
stop = start + seq_len * self.be.bsz
outputs[:-1, start:stop] = seq
# Place the output finish bit
outputs[-1, -self.be.bsz:] = 1
# Fill the device minibatch buffers
super(RepeatCopyTask, self).fill_buffers(time_steps, inputs, outputs,
in_tensor, out_tensor, mask)
class PrioritySortTask(Task):
"""
The priority sort task from the Neural Turing Machines paper:
http://arxiv.org/abs/1410.5401
See comments on CopyTask class for more details.
"""
def __init__(self, seq_len_max, vec_size):
"""
Set up the attributes that Ticker needs to see.
Args:
seq_len_max (int): longest allowable sequence length
vec_size (int): width of the bit-vector to be copied (was 8 in paper)
"""
self.seq_len_max = seq_len_max
self.vec_size = vec_size
self.nout = self.vec_size # we output the sorted sequence, with no stop bit
self.nin = self.vec_size + 3 # extra channels for start, stop, and priority
# seq is seen once as input with start and stop bits
# then we output seq in sorted order
self.time_steps_func = lambda l: 2 * l + 2
self.time_steps_max = self.time_steps_func(self.seq_len_max)
self.max_columns = self.time_steps_max * self.be.bsz
def synthesize(self, in_tensor, out_tensor, mask):
"""
Create a new minibatch of ticker priority sort task data.
Args:
in_tensor: device buffer holding inputs
out_tensor: device buffer holding outputs
mask: device buffer for the output mask
"""
# All sequences in a minibatch are the same length for convenience
seq_len = np.random.randint(1, self.seq_len_max + 1)
time_steps = self.time_steps_func(seq_len)
# Get the minibatch specific numpy buffers
inputs, outputs = super(PrioritySortTask, self).fetch_io(time_steps)
# Set the start bit
inputs[-3, :self.be.bsz] = 1
# Generate the sequence to be copied
seq = np.random.randint(2,
size=(self.nin,
seq_len * self.be.bsz)).astype(float)
# Zero out the start, stop, and priority channels
seq[-3:, :] = 0
# Generate the scalar priorities and put them in seq
priorities = np.random.uniform(-1, 1, size=(seq_len * self.be.bsz,))
seq[-1, :] = priorities
# Set the stop bit
stop_loc = self.be.bsz * (seq_len + 1)
inputs[-2, stop_loc:stop_loc + self.be.bsz] = 1
# Place the actual sequence to copy in inputs
inputs[:, self.be.bsz:stop_loc] = seq
# sort the sequences
for i in range(self.be.bsz):
# for every sequence in the batch
# x <- every column in the sequence
x = seq[:, i::self.be.bsz]
# sort that set of columns by elt in the last row (the priority)
x = x[:, x[-1, :].argsort()]
# put those columns back into minibatch in the right places
seq[:, i::self.be.bsz] = x
outputs[:, self.be.bsz * (seq_len + 2):] = seq[:self.nout, :]
# Fill the device minibatch buffers
super(PrioritySortTask, self).fill_buffers(time_steps, inputs, outputs,
in_tensor, out_tensor, mask)
class Ticker(NervanaObject):
"""
This class defines methods for generating and iterating over ticker datasets.
"""
def reset(self):
"""
Reset has no meaning in the context of ticker data.
"""
pass
def __init__(self, task):
"""
Construct a ticker dataset object.
Args:
Task is an object representing the task to be trained on
It contains information about input and output size,
sequence length, etc. It also implements a synthesize function,
which is used to generate the next minibatch of data.
"""
self.task = task
# These attributes don't make much sense in the context of tickers
# but I suspect it will be hard to get rid of them
self.batch_index = 0
self.nbatches = 100
self.ndata = self.nbatches * self.be.bsz
# Alias these because other code relies on datasets having nin and nout
self.nout = task.nout
self.nin = task.nin
# Configuration elsewhere relies on the existence of this
self.shape = (self.nin, self.task.time_steps_max)
# Initialize the inputs, the outputs, and the mask
self.dev_X = self.be.iobuf((self.nin, self.task.time_steps_max))
self.dev_y = self.be.iobuf((self.nout, self.task.time_steps_max))
self.mask = self.be.iobuf((self.nout, self.task.time_steps_max))
def __iter__(self):
"""
Generator that can be used to iterate over this dataset.
Yields:
tuple : the next minibatch of data.
The second element of the tuple is itself a tuple (t,m) with:
t: the actual target as generated by the task object
m: the output mask to account for the difference between
the seq_length for this minibatch and the max seq_len,
which is also the number of columns in X,t, and m
"""
self.batch_index = 0
while self.batch_index < self.nbatches:
# The task object writes minibatch data into buffers we pass it
self.task.synthesize(self.dev_X, self.dev_y, self.mask)
self.batch_index += 1
yield self.dev_X, (self.dev_y, self.mask)
| coufon/neon-distributed | neon/data/ticker.py | Python | apache-2.0 | 13,214 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test base for tf.keras Models in multi-worker mode."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
# pylint: disable=g-direct-tensorflow-import
from tensorflow.contrib.distribute.python import collective_all_reduce_strategy as collective_strategy
from tensorflow.contrib.distribute.python import parameter_server_strategy
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import distribute_coordinator as dc
from tensorflow.python.distribute import multi_worker_test_base
from tensorflow.python.eager import context
from tensorflow.python.platform import test
_original_run_std_server = dc._run_std_server # pylint: disable=protected-access
# Used as a decorator on test methods.
run_sync_strategies = combinations.generate(
combinations.combine(
mode=['graph'],
strategy_cls=[
collective_strategy.CollectiveAllReduceStrategy,
],
required_gpus=[0, 1]))
# Used as a decorator on test methods.
run_async_strategies = combinations.generate(
combinations.combine(
mode=['graph'],
strategy_cls=[parameter_server_strategy.ParameterServerStrategy],
required_gpus=[0, 1]))
def get_strategy_object(strategy_cls):
return strategy_cls(num_gpus_per_worker=context.num_gpus())
# TODO(omalleyt): Merge with keras_multiworker_callback_test
class KerasIndependentWorkerTestBase(
multi_worker_test_base.IndependentWorkerTestBase):
"""Test base for simulating Keras Multi-Worker in threads."""
def _make_mock_run_std_server(self):
thread_local = threading.local()
def _mock_run_std_server(*args, **kwargs):
ret = _original_run_std_server(*args, **kwargs)
# Wait for all std servers to be brought up in order to reduce the chance
# of remote sessions taking local ports that have been assigned to std
# servers. Only call this barrier the first time this function is run for
# each thread.
if not getattr(thread_local, 'server_started', False):
self._barrier.wait()
thread_local.server_started = True
return ret
return _mock_run_std_server
def run_independent_workers(self,
worker_fn,
strategy_cls,
num_workers,
num_ps=None,
**kwargs):
cluster_spec = multi_worker_test_base.create_cluster_spec(
num_workers=num_workers, num_ps=num_ps)
self._barrier = dc._Barrier(num_workers + (num_ps or 0)) # pylint: disable=protected-access
def _worker_fn(**kwargs):
"""Runs the worker function in a thread."""
with test.mock.patch.object(dc, '_run_std_server',
self._make_mock_run_std_server()):
strategy = get_strategy_object(strategy_cls)
with strategy.scope():
return worker_fn(**kwargs)
threads = self.run_multiple_tasks_in_threads(_worker_fn, cluster_spec,
**kwargs)
strategy = get_strategy_object(strategy_cls)
if strategy.extended.experimental_between_graph:
threads_to_join = threads.get('chief', []) + threads.get('worker', [])
else:
threads_to_join = [
threads['chief'][0] if 'chief' in threads else threads['worker'][0]
]
self.join_independent_workers(threads_to_join)
| ghchinoy/tensorflow | tensorflow/contrib/distribute/python/keras_multi_worker_test_base.py | Python | apache-2.0 | 4,173 |
"""Helpers that help with state related things."""
import asyncio
from collections import defaultdict
import datetime as dt
import logging
from types import ModuleType, TracebackType
from typing import Dict, Iterable, List, Optional, Type, Union
from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON
from homeassistant.const import (
STATE_CLOSED,
STATE_HOME,
STATE_LOCKED,
STATE_NOT_HOME,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.core import Context, State
from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass
import homeassistant.util.dt as dt_util
from .typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
class AsyncTrackStates:
"""
Record the time when the with-block is entered.
Add all states that have changed since the start time to the return list
when with-block is exited.
Must be run within the event loop.
"""
def __init__(self, hass: HomeAssistantType) -> None:
"""Initialize a TrackStates block."""
self.hass = hass
self.states: List[State] = []
# pylint: disable=attribute-defined-outside-init
def __enter__(self) -> List[State]:
"""Record time from which to track changes."""
self.now = dt_util.utcnow()
return self.states
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Add changes states to changes list."""
self.states.extend(get_changed_since(self.hass.states.async_all(), self.now))
def get_changed_since(
states: Iterable[State], utc_point_in_time: dt.datetime
) -> List[State]:
"""Return list of states that have been changed since utc_point_in_time."""
return [state for state in states if state.last_updated >= utc_point_in_time]
@bind_hass
async def async_reproduce_state(
hass: HomeAssistantType,
states: Union[State, Iterable[State]],
blocking: bool = False,
context: Optional[Context] = None,
) -> None:
"""Reproduce a list of states on multiple domains."""
if isinstance(states, State):
states = [states]
to_call: Dict[str, List[State]] = defaultdict(list)
for state in states:
to_call[state.domain].append(state)
async def worker(domain: str, states_by_domain: List[State]) -> None:
try:
integration = await async_get_integration(hass, domain)
except IntegrationNotFound:
_LOGGER.warning(
"Trying to reproduce state for unknown integration: %s", domain
)
return
try:
platform: Optional[ModuleType] = integration.get_platform("reproduce_state")
except ImportError:
_LOGGER.warning("Integration %s does not support reproduce state", domain)
return
await platform.async_reproduce_states( # type: ignore
hass, states_by_domain, context=context
)
if to_call:
# run all domains in parallel
await asyncio.gather(
*(worker(domain, data) for domain, data in to_call.items())
)
def state_as_number(state: State) -> float:
"""
Try to coerce our state to a number.
Raises ValueError if this is not possible.
"""
if state.state in (
STATE_ON,
STATE_LOCKED,
STATE_ABOVE_HORIZON,
STATE_OPEN,
STATE_HOME,
):
return 1
if state.state in (
STATE_OFF,
STATE_UNLOCKED,
STATE_UNKNOWN,
STATE_BELOW_HORIZON,
STATE_CLOSED,
STATE_NOT_HOME,
):
return 0
return float(state.state)
| leppa/home-assistant | homeassistant/helpers/state.py | Python | apache-2.0 | 3,813 |
# Copyright (C) Mesosphere, Inc. See LICENSE file for details.
import copy
import logging
import os
import time
import pytest
import requests
from generic_test_code.common import (
generic_correct_upstream_dest_test,
generic_correct_upstream_request_test,
generic_upstream_headers_verify_test,
generic_verify_response_test,
overridden_file_content,
verify_header,
)
from util import GuardedSubprocess, LineBufferFilter, SearchCriteria
log = logging.getLogger(__name__)
class TestServiceEndpoint:
# Majority of /service endpoint tests are done with generic tests framework
def test_if_accept_encoding_header_is_removed_from_upstream_request(
self, master_ar_process_perclass, mocker, valid_user_header):
headers = copy.deepcopy(valid_user_header)
headers['Accept-Encoding'] = 'gzip'
generic_upstream_headers_verify_test(master_ar_process_perclass,
headers,
'/service/scheduler-alwaysthere/foo/bar/',
assert_headers_absent=["Accept-Encoding"],
)
class TestAgentEndpoint:
# Tests for /agent endpoint routing are done in test_cache.py
def test_if_accept_encoding_header_is_removed_from_upstream_request(
self, master_ar_process_perclass, mocker, valid_user_header):
headers = copy.deepcopy(valid_user_header)
headers['Accept-Encoding'] = 'gzip'
generic_upstream_headers_verify_test(master_ar_process_perclass,
headers,
'/agent/de1baf83-c36c-4d23-9cb0-f89f596cd6ab-S1/',
assert_headers_absent=["Accept-Encoding"],
)
class TestSystemAgentEndpoint:
# Tests for /agent endpoint routing are done in test_cache.py
def test_if_accept_encoding_header_is_removed_from_upstream_request(
self, master_ar_process_perclass, mocker, valid_user_header):
headers = copy.deepcopy(valid_user_header)
headers['Accept-Encoding'] = 'gzip'
generic_upstream_headers_verify_test(
master_ar_process_perclass,
headers,
'/system/v1/agent/de1baf83-c36c-4d23-9cb0-f89f596cd6ab-S0/logs',
assert_headers_absent=["Accept-Encoding"],
)
class TestHistoryServiceRouting:
def test_if_invalid_cache_case_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
ar = nginx_class()
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
with GuardedSubprocess(ar):
# Unfortunatelly there are upstreams that use `leader.mesos` and
# removing this entry too early will result in Nginx failing to start.
# So we need to do it right after nginx starts, but before first
# cache update.
time.sleep(1)
dns_server_mock.remove_dns_entry('leader.mesos.')
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
assert 'cache is invalid' in resp.text
def test_if_leader_is_unknown_state_is_handled(
self, nginx_class, valid_user_header):
ar = nginx_class(host_ip=None)
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
with GuardedSubprocess(ar):
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
assert 'mesos leader is unknown' in resp.text
def test_if_leader_is_local_state_is_handled(
self, nginx_class, valid_user_header):
ar = nginx_class()
path_sent = '/dcos-history-service/foo/bar?a1=GET+param&a2=foobarism'
path_expected = '/foo/bar?a1=GET+param&a2=foobarism'
with GuardedSubprocess(ar):
generic_correct_upstream_dest_test(
ar,
valid_user_header,
path_sent,
"http://127.0.0.1:15055")
generic_correct_upstream_request_test(
ar,
valid_user_header,
path_sent,
path_expected)
generic_upstream_headers_verify_test(
ar,
valid_user_header,
path_sent)
def test_if_leader_is_nonlocal_state_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
ar = nginx_class()
path_sent = '/dcos-history-service/foo/bar?a1=GET+param&a2=foobarism'
path_expected = '/dcos-history-service/foo/bar?a1=GET+param&a2=foobarism'
dns_server_mock.set_dns_entry('leader.mesos.', ip='127.0.0.3')
with GuardedSubprocess(ar):
generic_correct_upstream_dest_test(
ar,
valid_user_header,
path_sent,
"http://127.0.0.3:80")
generic_correct_upstream_request_test(
ar,
valid_user_header,
path_sent,
path_expected)
generic_upstream_headers_verify_test(
ar,
valid_user_header,
path_sent,
assert_headers={"DCOS-Forwarded": "true"})
def test_if_proxy_loop_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
ar = nginx_class()
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
dns_server_mock.set_dns_entry('leader.mesos.', ip='127.0.0.3')
h = valid_user_header
h.update({"DCOS-Forwarded": "true"})
with GuardedSubprocess(ar):
resp = requests.get(url,
allow_redirects=False,
headers=h)
assert resp.status_code == 503
assert 'mesos leader is unknown' in resp.text
class TestMetadata:
@pytest.mark.parametrize("public_ip", ['1.2.3.4', "10.20.20.30"])
def test_if_public_ip_detection_works(
self, master_ar_process_perclass, valid_user_header, public_ip):
url = master_ar_process_perclass.make_url_from_path('/metadata')
with overridden_file_content(
'/usr/local/detect_ip_public_data.txt',
"return ip {}".format(public_ip)):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['PUBLIC_IPV4'] == public_ip
def test_if_clusterid_is_returned(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['CLUSTER_ID'] == 'fdb1d7c0-06cf-4d65-bb9b-a8920bb854ef'
with overridden_file_content(
'/var/lib/dcos/cluster-id',
"fd21689b-4fe2-4779-8c30-9125149eef11"):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['CLUSTER_ID'] == "fd21689b-4fe2-4779-8c30-9125149eef11"
def test_if_missing_clusterid_file_is_handled(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
with overridden_file_content('/var/lib/dcos/cluster-id'):
os.unlink('/var/lib/dcos/cluster-id')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert 'CLUSTER_ID' not in resp_data
def test_if_public_ip_detect_script_failue_is_handled(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
filter_regexp = {
'Traceback \(most recent call last\):': SearchCriteria(1, True),
("FileNotFoundError: \[Errno 2\] No such file or directory:"
" '/usr/local/detect_ip_public_data.txt'"): SearchCriteria(1, True),
}
lbf = LineBufferFilter(filter_regexp,
line_buffer=master_ar_process_perclass.stderr_line_buffer)
with lbf, overridden_file_content('/usr/local/detect_ip_public_data.txt'):
os.unlink('/usr/local/detect_ip_public_data.txt')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert lbf.extra_matches == {}
resp_data = resp.json()
assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
@pytest.mark.xfail(reason="Needs some refactoring, tracked in DCOS_OSS-1007")
def test_if_public_ip_detect_script_execution_is_timed_out(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
ts_start = time.time()
with overridden_file_content('/usr/local/detect_ip_public_data.txt',
"timeout 10"):
requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
ts_total = time.time() - ts_start
assert ts_total < 10
# TODO (prozlach): tune it a bit
# assert resp.status_code == 200
# resp_data = resp.json()
# assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
@pytest.mark.xfail(reason="Needs some refactoring, tracked in DCOS_OSS-1007")
def test_if_public_ip_detect_script_nonzero_exit_status_is_handled(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
with overridden_file_content(
'/usr/local/detect_ip_public_data.txt',
"break with 1"):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
class TestUiRoot:
@pytest.mark.parametrize("uniq_content", ["(。◕‿‿◕。)", "plain text 1234"])
@pytest.mark.parametrize("path", ["plain-ui-testfile.html",
"nest1/nested-ui-testfile.html"])
def test_if_ui_files_are_handled(
self,
master_ar_process_perclass,
valid_user_header,
uniq_content,
path):
url = master_ar_process_perclass.make_url_from_path('/{}'.format(path))
with overridden_file_content(
'/opt/mesosphere/active/dcos-ui/usr/{}'.format(path),
uniq_content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp.encoding = 'utf-8'
assert resp.text == uniq_content
verify_header(resp.headers.items(), 'X-Frame-Options', 'DENY')
class TestMisc:
@pytest.mark.parametrize("content", ["{'data': '1234'}", "{'data': 'abcd'}"])
def test_if_buildinfo_is_served(
self, master_ar_process_perclass, valid_user_header, content):
url = master_ar_process_perclass.make_url_from_path(
'/pkgpanda/active.buildinfo.full.json')
with overridden_file_content(
'/opt/mesosphere/active.buildinfo.full.json',
content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header
)
assert resp.status_code == 200
assert resp.text == content
@pytest.mark.parametrize("content", ["{'data': '1234'}", "{'data': 'abcd'}"])
def test_if_dcos_metadata_is_served(
self, master_ar_process_perclass, valid_user_header, content):
url = master_ar_process_perclass.make_url_from_path(
'/dcos-metadata/dcos-version.json')
with overridden_file_content(
'/opt/mesosphere/active/dcos-metadata/etc/dcos-version.json',
content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header
)
assert resp.status_code == 200
assert resp.text == content
def test_if_xaccel_header_is_passed_to_client_by_ar(
self,
master_ar_process_perclass,
valid_user_header,
mocker):
accel_buff_header = {"X-Accel-Buffering": "TEST"}
mocker.send_command(
endpoint_id='http:///run/dcos/dcos-log.sock',
func_name='set_response_headers',
aux_data=accel_buff_header,
)
generic_verify_response_test(
master_ar_process_perclass,
valid_user_header,
'/system/v1/logs/foo/bar',
assert_headers=accel_buff_header)
| surdy/dcos | packages/adminrouter/extra/src/test-harness/tests/test_master.py | Python | apache-2.0 | 13,807 |
from django.utils.translation import ugettext_lazy as _
import horizon
from {{ dash_path }} import dashboard
class {{ panel_name|title }}(horizon.Panel):
name = _("{{ panel_name|title }}")
slug = "{{ panel_name|slugify }}"
dashboard.register({{ panel_name|title }})
| xhorn/xchorizon | horizon/conf/panel_template/panel.py | Python | apache-2.0 | 280 |
# automatically generated, do not modify
# namespace: NamespaceB
import flatbuffers
class TableInNestedNS(object):
__slots__ = ['_tab']
# TableInNestedNS
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# TableInNestedNS
def Foo(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def TableInNestedNSStart(builder): builder.StartObject(1)
def TableInNestedNSAddFoo(builder, foo): builder.PrependInt32Slot(0, foo, 0)
def TableInNestedNSEnd(builder): return builder.EndObject()
| evanw/flatbuffers | tests/namespace_test/NamespaceA/NamespaceB/TableInNestedNS.py | Python | apache-2.0 | 693 |
#
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
PYTHON26 = sys.version_info < (2, 7)
if PYTHON26:
import unittest2 as unittest
else:
import unittest
__all__ = ['unittest']
| aidanhs/blockade | blockade/tests/__init__.py | Python | apache-2.0 | 731 |
"""Tests for fan platforms."""
import pytest
from homeassistant.components.fan import FanEntity
class BaseFan(FanEntity):
"""Implementation of the abstract FanEntity."""
def __init__(self):
"""Initialize the fan."""
def test_fanentity():
"""Test fan entity methods."""
fan = BaseFan()
assert fan.state == "off"
assert len(fan.speed_list) == 0
assert fan.supported_features == 0
assert fan.capability_attributes == {}
# Test set_speed not required
with pytest.raises(NotImplementedError):
fan.oscillate(True)
with pytest.raises(NotImplementedError):
fan.set_speed("slow")
with pytest.raises(NotImplementedError):
fan.turn_on()
with pytest.raises(NotImplementedError):
fan.turn_off()
| tboyce021/home-assistant | tests/components/fan/test_init.py | Python | apache-2.0 | 781 |
# line 1
'A module docstring.'
import sys, inspect
# line 5
# line 7
def spam(a, b, c, d=3, (e, (f,))=(4, (5,)), *g, **h):
eggs(b + d, c + f)
# line 11
def eggs(x, y):
"A docstring."
global fr, st
fr = inspect.currentframe()
st = inspect.stack()
p = x
q = y // 0
# line 20
class StupidGit:
"""A longer,
indented
docstring."""
# line 27
def abuse(self, a, b, c):
"""Another
\tdocstring
containing
\ttabs
\t
"""
self.argue(a, b, c)
# line 40
def argue(self, a, b, c):
try:
spam(a, b, c)
except:
self.ex = sys.exc_info()
self.tr = inspect.trace()
# line 48
class MalodorousPervert(StupidGit):
pass
Tit = MalodorousPervert
class ParrotDroppings:
pass
class FesteringGob(MalodorousPervert, ParrotDroppings):
pass
currentframe = inspect.currentframe()
try:
raise Exception()
except:
tb = sys.exc_info()[2]
| slozier/ironpython2 | Src/StdLib/Lib/test/inspect_fodder.py | Python | apache-2.0 | 967 |
#!/usr/bin/env python
# Copyright 2015, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import maas_common
import subprocess
STATUSES = {'HEALTH_OK': 2, 'HEALTH_WARN': 1, 'HEALTH_ERR': 0}
def check_command(command):
output = subprocess.check_output(command, stderr=subprocess.STDOUT)
lines = output.strip().split('\n')
return json.loads(lines[-1])
def get_ceph_status(client, keyring, fmt='json'):
return check_command(('ceph', '--format', fmt, '--name', client,
'--keyring', keyring, 'status'))
def get_ceph_pg_dump_osds(client, keyring, fmt='json'):
return check_command(('ceph', '--format', fmt, '--name', client,
'--keyring', keyring, 'pg', 'dump', 'osds'))
def get_ceph_osd_dump(client, keyring, fmt='json'):
return check_command(('ceph', '--format', fmt, '--name', client,
'--keyring', keyring, 'osd', 'dump'))
def get_mon_statistics(client=None, keyring=None, host=None):
ceph_status = get_ceph_status(client=client, keyring=keyring)
mon = [m for m in ceph_status['monmap']['mons']
if m['name'] == host]
mon_in = mon[0]['rank'] in ceph_status['quorum']
maas_common.metric_bool('mon_in_quorum', mon_in)
health_status = 0
for each in ceph_status['health']['health']['health_services'][0]['mons']:
if each['name'] == host:
health_status = STATUSES[each['health']]
break
maas_common.metric('mon_health', 'uint32', health_status)
def get_osd_statistics(client=None, keyring=None, osd_ids=None):
osd_dump = get_ceph_osd_dump(client=client, keyring=keyring)
pg_osds_dump = get_ceph_pg_dump_osds(client=client, keyring=keyring)
for osd_id in osd_ids:
osd_ref = 'osd.%s' % osd_id
for _osd in osd_dump['osds']:
if _osd['osd'] == osd_id:
osd = _osd
break
else:
msg = 'The OSD ID %s does not exist.' % osd_id
raise maas_common.MaaSException(msg)
for key in ('up', 'in'):
name = '_'.join((osd_ref, key))
maas_common.metric_bool(name, osd[key])
for _osd in pg_osds_dump:
if _osd['osd'] == osd_id:
osd = _osd
break
for key in ('kb', 'kb_used', 'kb_avail'):
name = '_'.join((osd_ref, key))
maas_common.metric(name, 'uint64', osd[key])
def get_cluster_statistics(client=None, keyring=None):
metrics = []
ceph_status = get_ceph_status(client=client, keyring=keyring)
# Get overall cluster health
metrics.append({
'name': 'cluster_health',
'type': 'uint32',
'value': STATUSES[ceph_status['health']['overall_status']]})
# Collect epochs for the mon and osd maps
metrics.append({'name': "monmap_epoch",
'type': 'uint32',
'value': ceph_status['monmap']['epoch']})
metrics.append({'name': "osdmap_epoch",
'type': 'uint32',
'value': ceph_status['osdmap']['osdmap']['epoch']})
# Collect OSDs per state
osds = {'total': ceph_status['osdmap']['osdmap']['num_osds'],
'up': ceph_status['osdmap']['osdmap']['num_up_osds'],
'in': ceph_status['osdmap']['osdmap']['num_in_osds']}
for k in osds:
metrics.append({'name': 'osds_%s' % k,
'type': 'uint32',
'value': osds[k]})
# Collect cluster size & utilisation
metrics.append({'name': 'osds_kb_used',
'type': 'uint64',
'value': ceph_status['pgmap']['bytes_used'] / 1024})
metrics.append({'name': 'osds_kb_avail',
'type': 'uint64',
'value': ceph_status['pgmap']['bytes_avail'] / 1024})
metrics.append({'name': 'osds_kb',
'type': 'uint64',
'value': ceph_status['pgmap']['bytes_total'] / 1024})
# Collect num PGs and num healthy PGs
pgs = {'total': ceph_status['pgmap']['num_pgs'], 'active_clean': 0}
for state in ceph_status['pgmap']['pgs_by_state']:
if state['state_name'] == 'active+clean':
pgs['active_clean'] = state['count']
break
for k in pgs:
metrics.append({'name': 'pgs_%s' % k,
'type': 'uint32',
'value': pgs[k]})
# Submit gathered metrics
for m in metrics:
maas_common.metric(m['name'], m['type'], m['value'])
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--name', required=True, help='Ceph client name')
parser.add_argument('--keyring', required=True, help='Ceph client keyring')
subparsers = parser.add_subparsers(dest='subparser_name')
parser_mon = subparsers.add_parser('mon')
parser_mon.add_argument('--host', required=True, help='Mon hostname')
parser_osd = subparsers.add_parser('osd')
parser_osd.add_argument('--osd_ids', required=True,
help='Space separated list of OSD IDs')
subparsers.add_parser('cluster')
return parser.parse_args()
def main(args):
get_statistics = {'cluster': get_cluster_statistics,
'mon': get_mon_statistics,
'osd': get_osd_statistics}
kwargs = {'client': args.name, 'keyring': args.keyring}
if args.subparser_name == 'osd':
kwargs['osd_ids'] = [int(i) for i in args.osd_ids.split(' ')]
if args.subparser_name == 'mon':
kwargs['host'] = args.host
get_statistics[args.subparser_name](**kwargs)
maas_common.status_ok()
if __name__ == '__main__':
with maas_common.print_output():
args = get_args()
main(args)
| byronmccollum/rpc-openstack | maas/plugins/ceph_monitoring.py | Python | apache-2.0 | 6,319 |
#!/usr/bin/env python
"""
@package mi.dataset.parser.metbk_a_dcl
@file marine-integrations/mi/dataset/parser/metbk_a_dcl.py
@author Ronald Ronquillo
@brief Parser for the metbk_a_dcl dataset driver
This file contains code for the metbk_a_dcl parsers and code to produce data particles.
For telemetered data, there is one parser which produces one type of data particle.
For recovered data, there is one parser which produces one type of data particle.
The input files and the content of the data particles are the same for both
recovered and telemetered.
Only the names of the output particle streams are different.
The input file is ASCII and contains 2 types of records.
Records are separated by a newline.
All records start with a timestamp.
Metadata records: timestamp [text] more text newline.
Sensor Data records: timestamp sensor_data newline.
Only sensor data records produce particles if properly formed.
Mal-formed sensor data records and all metadata records produce no particles.
Release notes:
Initial Release
"""
import re
from mi.core.log import get_logger
from mi.core.common import BaseEnum
from mi.dataset.parser.dcl_file_common import \
DclInstrumentDataParticle, \
DclFileCommonParser
from mi.core.instrument.dataset_data_particle import DataParticleKey
from mi.core.exceptions import UnexpectedDataException
log = get_logger()
__author__ = 'Phillip Tran'
__license__ = 'Apache 2.0'
# SENSOR_DATA_MATCHER produces the following groups.
# The following are indices into groups() produced by SENSOR_DATA_MATCHER
# incremented after common timestamp values.
# i.e, match.groups()[INDEX]
SENSOR_GROUP_BAROMETRIC_PRESSURE = 1
SENSOR_GROUP_RELATIVE_HUMIDITY = 2
SENSOR_GROUP_AIR_TEMPERATURE = 3
SENSOR_GROUP_LONGWAVE_IRRADIANCE = 4
SENSOR_GROUP_PRECIPITATION = 5
SENSOR_GROUP_SEA_SURFACE_TEMPERATURE = 6
SENSOR_GROUP_SEA_SURFACE_CONDUCTIVITY = 7
SENSOR_GROUP_SHORTWAVE_IRRADIANCE = 8
SENSOR_GROUP_EASTWARD_WIND_VELOCITY = 9
SENSOR_GROUP_NORTHWARD_WIND_VELOCITY = 10
# This table is used in the generation of the instrument data particle.
# This will be a list of tuples with the following columns.
# Column 1 - particle parameter name
# Column 2 - group number (index into raw_data)
# Column 3 - data encoding function (conversion required - int, float, etc)
INSTRUMENT_PARTICLE_MAP = [
('barometric_pressure', SENSOR_GROUP_BAROMETRIC_PRESSURE, float),
('relative_humidity', SENSOR_GROUP_RELATIVE_HUMIDITY, float),
('air_temperature', SENSOR_GROUP_AIR_TEMPERATURE, float),
('longwave_irradiance', SENSOR_GROUP_LONGWAVE_IRRADIANCE, float),
('precipitation', SENSOR_GROUP_PRECIPITATION, float),
('sea_surface_temperature', SENSOR_GROUP_SEA_SURFACE_TEMPERATURE, float),
('sea_surface_conductivity', SENSOR_GROUP_SEA_SURFACE_CONDUCTIVITY, float),
('shortwave_irradiance', SENSOR_GROUP_SHORTWAVE_IRRADIANCE, float),
('eastward_wind_velocity', SENSOR_GROUP_EASTWARD_WIND_VELOCITY, float),
('northward_wind_velocity', SENSOR_GROUP_NORTHWARD_WIND_VELOCITY, float)
]
class DataParticleType(BaseEnum):
REC_INSTRUMENT_PARTICLE = 'metbk_a_dcl_instrument_recovered'
TEL_INSTRUMENT_PARTICLE = 'metbk_a_dcl_instrument'
class MetbkADclInstrumentDataParticle(DclInstrumentDataParticle):
"""
Class for generating the Metbk_a instrument particle.
"""
def __init__(self, raw_data, *args, **kwargs):
super(MetbkADclInstrumentDataParticle, self).__init__(
raw_data,
INSTRUMENT_PARTICLE_MAP,
*args, **kwargs)
def _build_parsed_values(self):
"""
Build parsed values for Recovered and Telemetered Instrument Data Particle.
Will only append float values and ignore strings.
Returns the list.
"""
data_list = []
for name, group, func in INSTRUMENT_PARTICLE_MAP:
if isinstance(self.raw_data[group], func):
data_list.append(self._encode_value(name, self.raw_data[group], func))
return data_list
class MetbkADclRecoveredInstrumentDataParticle(MetbkADclInstrumentDataParticle):
"""
Class for generating Offset Data Particles from Recovered data.
"""
_data_particle_type = DataParticleType.REC_INSTRUMENT_PARTICLE
class MetbkADclTelemeteredInstrumentDataParticle(MetbkADclInstrumentDataParticle):
"""
Class for generating Offset Data Particles from Telemetered data.
"""
_data_particle_type = DataParticleType.TEL_INSTRUMENT_PARTICLE
class MetbkADclParser(DclFileCommonParser):
"""
This is the entry point for the Metbk_a_dcl parser.
"""
def __init__(self,
config,
stream_handle,
exception_callback):
super(MetbkADclParser, self).__init__(config,
stream_handle,
exception_callback,
'',
'')
self.particle_classes = None
self.instrument_particle_map = INSTRUMENT_PARTICLE_MAP
self.raw_data_length = 14
def parse_file(self):
"""
This method reads the file and parses the data within, and at
the end of this method self._record_buffer will be filled with all the particles in the file.
"""
# If not set from config & no InstrumentParameterException error from constructor
if self.particle_classes is None:
self.particle_classes = (self._particle_class,)
for particle_class in self.particle_classes:
for line in self._stream_handle:
if not re.findall(r'.*\[.*\]:\b[^\W\d_]+\b', line) and line is not None: # Disregard anything that has a word after [metbk2:DLOGP6]:
line = re.sub(r'\[.*\]:', '', line)
raw_data = line.split()
if len(raw_data) != self.raw_data_length: # The raw data should have a length of 14
self.handle_unknown_data(line)
continue
if re.findall(r'[a-zA-Z][0-9]|[0-9][a-zA-Z]', line):
self.handle_unknown_data(line)
continue
raw_data[0:2] = [' '.join(raw_data[0:2])] # Merge the first and second elements to form a timestamp
if raw_data is not None:
for i in range(1, len(raw_data)): # Ignore 0th element, because that is the timestamp
raw_data[i] = self.select_type(raw_data[i])
particle = self._extract_sample(particle_class,
None,
raw_data,
preferred_ts=DataParticleKey.PORT_TIMESTAMP)
self._record_buffer.append(particle)
def handle_unknown_data(self, line):
# Otherwise generate warning for unknown data.
error_message = 'Unknown data found in chunk %s' % line
log.warn(error_message)
self._exception_callback(UnexpectedDataException(error_message))
@staticmethod
def select_type(raw_list_element):
"""
This function will return the float value if possible
"""
try:
return float(raw_list_element)
except ValueError:
return None
| renegelinas/mi-instrument | mi/dataset/parser/metbk_a_dcl.py | Python | bsd-2-clause | 7,536 |
import glob
import os
import sys
from jedi.evaluate.site import addsitedir
from jedi._compatibility import exec_function, unicode
from jedi.parser import tree
from jedi.parser import ParserWithRecovery
from jedi.evaluate.cache import memoize_default
from jedi import debug
from jedi import common
from jedi.evaluate.compiled import CompiledObject
from jedi.parser.utils import load_parser, save_parser
def get_venv_path(venv):
"""Get sys.path for specified virtual environment."""
sys_path = _get_venv_path_dirs(venv)
with common.ignored(ValueError):
sys_path.remove('')
sys_path = _get_sys_path_with_egglinks(sys_path)
# As of now, get_venv_path_dirs does not scan built-in pythonpath and
# user-local site-packages, let's approximate them using path from Jedi
# interpreter.
return sys_path + sys.path
def _get_sys_path_with_egglinks(sys_path):
"""Find all paths including those referenced by egg-links.
Egg-link-referenced directories are inserted into path immediately before
the directory on which their links were found. Such directories are not
taken into consideration by normal import mechanism, but they are traversed
when doing pkg_resources.require.
"""
result = []
for p in sys_path:
# pkg_resources does not define a specific order for egg-link files
# using os.listdir to enumerate them, we're sorting them to have
# reproducible tests.
for egg_link in sorted(glob.glob(os.path.join(p, '*.egg-link'))):
with open(egg_link) as fd:
for line in fd:
line = line.strip()
if line:
result.append(os.path.join(p, line))
# pkg_resources package only interprets the first
# non-empty line in egg-link files.
break
result.append(p)
return result
def _get_venv_path_dirs(venv):
"""Get sys.path for venv without starting up the interpreter."""
venv = os.path.abspath(venv)
sitedir = _get_venv_sitepackages(venv)
sys_path = []
addsitedir(sys_path, sitedir)
return sys_path
def _get_venv_sitepackages(venv):
if os.name == 'nt':
p = os.path.join(venv, 'lib', 'site-packages')
else:
p = os.path.join(venv, 'lib', 'python%d.%d' % sys.version_info[:2],
'site-packages')
return p
def _execute_code(module_path, code):
c = "import os; from os.path import *; result=%s"
variables = {'__file__': module_path}
try:
exec_function(c % code, variables)
except Exception:
debug.warning('sys.path manipulation detected, but failed to evaluate.')
else:
try:
res = variables['result']
if isinstance(res, str):
return [os.path.abspath(res)]
except KeyError:
pass
return []
def _paths_from_assignment(module_context, expr_stmt):
"""
Extracts the assigned strings from an assignment that looks as follows::
>>> sys.path[0:0] = ['module/path', 'another/module/path']
This function is in general pretty tolerant (and therefore 'buggy').
However, it's not a big issue usually to add more paths to Jedi's sys_path,
because it will only affect Jedi in very random situations and by adding
more paths than necessary, it usually benefits the general user.
"""
for assignee, operator in zip(expr_stmt.children[::2], expr_stmt.children[1::2]):
try:
assert operator in ['=', '+=']
assert assignee.type in ('power', 'atom_expr') and \
len(assignee.children) > 1
c = assignee.children
assert c[0].type == 'name' and c[0].value == 'sys'
trailer = c[1]
assert trailer.children[0] == '.' and trailer.children[1].value == 'path'
# TODO Essentially we're not checking details on sys.path
# manipulation. Both assigment of the sys.path and changing/adding
# parts of the sys.path are the same: They get added to the current
# sys.path.
"""
execution = c[2]
assert execution.children[0] == '['
subscript = execution.children[1]
assert subscript.type == 'subscript'
assert ':' in subscript.children
"""
except AssertionError:
continue
from jedi.evaluate.iterable import py__iter__
from jedi.evaluate.precedence import is_string
types = module_context.create_context(expr_stmt).eval_node(expr_stmt)
for lazy_context in py__iter__(module_context.evaluator, types, expr_stmt):
for context in lazy_context.infer():
if is_string(context):
yield context.obj
def _paths_from_list_modifications(module_path, trailer1, trailer2):
""" extract the path from either "sys.path.append" or "sys.path.insert" """
# Guarantee that both are trailers, the first one a name and the second one
# a function execution with at least one param.
if not (trailer1.type == 'trailer' and trailer1.children[0] == '.'
and trailer2.type == 'trailer' and trailer2.children[0] == '('
and len(trailer2.children) == 3):
return []
name = trailer1.children[1].value
if name not in ['insert', 'append']:
return []
arg = trailer2.children[1]
if name == 'insert' and len(arg.children) in (3, 4): # Possible trailing comma.
arg = arg.children[2]
return _execute_code(module_path, arg.get_code())
def _check_module(module_context):
"""
Detect sys.path modifications within module.
"""
def get_sys_path_powers(names):
for name in names:
power = name.parent.parent
if power.type in ('power', 'atom_expr'):
c = power.children
if isinstance(c[0], tree.Name) and c[0].value == 'sys' \
and c[1].type == 'trailer':
n = c[1].children[1]
if isinstance(n, tree.Name) and n.value == 'path':
yield name, power
sys_path = list(module_context.evaluator.sys_path) # copy
if isinstance(module_context, CompiledObject):
return sys_path
try:
possible_names = module_context.tree_node.used_names['path']
except KeyError:
# module.used_names is MergedNamesDict whose getitem never throws
# keyerror, this is superfluous.
pass
else:
for name, power in get_sys_path_powers(possible_names):
stmt = name.get_definition()
if len(power.children) >= 4:
sys_path.extend(
_paths_from_list_modifications(
module_context.py__file__(), *power.children[2:4]
)
)
elif name.get_definition().type == 'expr_stmt':
sys_path.extend(_paths_from_assignment(module_context, stmt))
return sys_path
@memoize_default(evaluator_is_first_arg=True, default=[])
def sys_path_with_modifications(evaluator, module_context):
path = module_context.py__file__()
if path is None:
# Support for modules without a path is bad, therefore return the
# normal path.
return list(evaluator.sys_path)
curdir = os.path.abspath(os.curdir)
#TODO why do we need a chdir?
with common.ignored(OSError):
os.chdir(os.path.dirname(path))
buildout_script_paths = set()
result = _check_module(module_context)
result += _detect_django_path(path)
for buildout_script in _get_buildout_scripts(path):
for path in _get_paths_from_buildout_script(evaluator, buildout_script):
buildout_script_paths.add(path)
# cleanup, back to old directory
os.chdir(curdir)
return list(result) + list(buildout_script_paths)
def _get_paths_from_buildout_script(evaluator, buildout_script):
def load(buildout_script):
try:
with open(buildout_script, 'rb') as f:
source = common.source_to_unicode(f.read())
except IOError:
debug.dbg('Error trying to read buildout_script: %s', buildout_script)
return
p = ParserWithRecovery(evaluator.grammar, source, buildout_script)
save_parser(buildout_script, p)
return p.module
cached = load_parser(buildout_script)
module_node = cached and cached.module or load(buildout_script)
if module_node is None:
return
from jedi.evaluate.representation import ModuleContext
for path in _check_module(ModuleContext(evaluator, module_node)):
yield path
def traverse_parents(path):
while True:
new = os.path.dirname(path)
if new == path:
return
path = new
yield path
def _get_parent_dir_with_file(path, filename):
for parent in traverse_parents(path):
if os.path.isfile(os.path.join(parent, filename)):
return parent
return None
def _detect_django_path(module_path):
""" Detects the path of the very well known Django library (if used) """
result = []
for parent in traverse_parents(module_path):
with common.ignored(IOError):
with open(parent + os.path.sep + 'manage.py'):
debug.dbg('Found django path: %s', module_path)
result.append(parent)
return result
def _get_buildout_scripts(module_path):
"""
if there is a 'buildout.cfg' file in one of the parent directories of the
given module it will return a list of all files in the buildout bin
directory that look like python files.
:param module_path: absolute path to the module.
:type module_path: str
"""
project_root = _get_parent_dir_with_file(module_path, 'buildout.cfg')
if not project_root:
return []
bin_path = os.path.join(project_root, 'bin')
if not os.path.exists(bin_path):
return []
extra_module_paths = []
for filename in os.listdir(bin_path):
try:
filepath = os.path.join(bin_path, filename)
with open(filepath, 'r') as f:
firstline = f.readline()
if firstline.startswith('#!') and 'python' in firstline:
extra_module_paths.append(filepath)
except (UnicodeDecodeError, IOError) as e:
# Probably a binary file; permission error or race cond. because file got deleted
# ignore
debug.warning(unicode(e))
continue
return extra_module_paths
| tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/site-packages/jedi/evaluate/sys_path.py | Python | bsd-3-clause | 10,686 |
from __future__ import absolute_import
from django.conf.urls import include, url
from django.conf import settings
from django.conf.urls.static import static
from . import views
from . import settings as wooey_settings
wooey_patterns = [
url(r'^jobs/command$', views.celery_task_command, name='celery_task_command'),
url(r'^jobs/queue/global/json$', views.global_queue_json, name='global_queue_json'),
url(r'^jobs/queue/user/json$', views.user_queue_json, name='user_queue_json'),
url(r'^jobs/results/user/json$', views.user_results_json, name='user_results_json'),
url(r'^jobs/queue/all/json', views.all_queues_json, name='all_queues_json'),
url(r'^jobs/queue/global', views.GlobalQueueView.as_view(), name='global_queue'),
url(r'^jobs/queue/user', views.UserQueueView.as_view(), name='user_queue'),
url(r'^jobs/results/user', views.UserResultsView.as_view(), name='user_results'),
url(r'^jobs/(?P<job_id>[0-9\-]+)/$', views.JobView.as_view(), name='celery_results'),
url(r'^jobs/(?P<job_id>[0-9\-]+)/json$', views.JobJSON.as_view(), name='celery_results_json'),
# Global public access via uuid
url(r'^jobs/(?P<uuid>[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89aAbB][a-f0-9]{3}-[a-f0-9]{12})/$', views.JobView.as_view(), name='celery_results_uuid'),
url(r'^jobs/(?P<uuid>[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89aAbB][a-f0-9]{3}-[a-f0-9]{12})/json$', views.JobJSON.as_view(), name='celery_results_json_uuid'),
url(r'^scripts/(?P<slug>[a-zA-Z0-9\-\_]+)/$', views.WooeyScriptView.as_view(), name='wooey_script'),
url(r'^scripts/(?P<slug>[a-zA-Z0-9\-\_]+)/version/(?P<script_version>[A-Za-z\.0-9]+)$', views.WooeyScriptView.as_view(), name='wooey_script'),
url(r'^scripts/(?P<slug>[a-zA-Z0-9\-\_]+)/version/(?P<script_version>[A-Za-z\.0-9]+)/iteration/(?P<script_iteration>\d+)$', views.WooeyScriptView.as_view(), name='wooey_script'),
url(r'^scripts/(?P<slug>[a-zA-Z0-9\-\_]+)/jobs/(?P<job_id>[a-zA-Z0-9\-]+)$', views.WooeyScriptView.as_view(), name='wooey_script_clone'),
url(r'^scripts/(?P<slug>[a-zA-Z0-9\-\_]+)/$', views.WooeyScriptJSON.as_view(), name='wooey_script_json'),
url(r'^scripts/search/json$', views.WooeyScriptSearchJSON.as_view(), name='wooey_search_script_json'),
url(r'^scripts/search/jsonhtml$', views.WooeyScriptSearchJSONHTML.as_view(), name='wooey_search_script_jsonhtml'),
url(r'^profile/$', views.WooeyProfileView.as_view(), name='profile_home'),
url(r'^profile/(?P<username>[a-zA-Z0-9\-]+)$', views.WooeyProfileView.as_view(), name='profile'),
url(r'^$', views.WooeyHomeView.as_view(), name='wooey_home'),
url(r'^$', views.WooeyHomeView.as_view(), name='wooey_job_launcher'),
url('^{}'.format(wooey_settings.WOOEY_LOGIN_URL.lstrip('/')), views.wooey_login, name='wooey_login'),
url('^{}'.format(wooey_settings.WOOEY_REGISTER_URL.lstrip('/')), views.WooeyRegister.as_view(), name='wooey_register'),
url(r'^favorite/toggle$', views.toggle_favorite, name='toggle_favorite'),
url(r'^scrapbook$', views.WooeyScrapbookView.as_view(), name='scrapbook'),
]
urlpatterns = [
url('^', include(wooey_patterns, namespace='wooey')),
url('^', include('django.contrib.auth.urls')),
]
| alexkolar/Wooey | wooey/urls.py | Python | bsd-3-clause | 3,225 |
# -*- coding:utf-8 -*-
#--
# Copyright (c) 2012-2014 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
#--
from nagare import log
import pkg_resources
from ..assetsmanager import AssetsManager
class DummyAssetsManager(AssetsManager):
def __init__(self):
super(DummyAssetsManager, self).__init__('', None)
def save(self, data, file_id=None, metadata={}):
log.debug("Save Image")
log.debug("%s" % metadata)
return 'mock_id'
def load(self, file_id):
log.debug("Load Image")
package = pkg_resources.Requirement.parse('kansha')
fname = pkg_resources.resource_filename(package, 'kansha/services/dummyassetsmanager/tie.jpg')
with open(fname, 'r') as f:
data = f.read()
return data, {}
def update_metadata(self, file_id, metadata):
pass
def get_metadata(self, file_id):
pass
| bcroq/kansha | kansha/services/dummyassetsmanager/dummyassetsmanager.py | Python | bsd-3-clause | 1,033 |
from __future__ import absolute_import
import logging
from rest_framework import serializers
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from uuid import uuid4
from sentry.api.base import Endpoint, SessionAuthentication
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.serializers import serialize
from sentry.api.serializers.rest_framework import ListField
from sentry.models import ApiApplication, ApiApplicationStatus
from sentry.tasks.deletion import delete_api_application
delete_logger = logging.getLogger("sentry.deletions.api")
class ApiApplicationSerializer(serializers.Serializer):
name = serializers.CharField(max_length=64)
redirectUris = ListField(child=serializers.URLField(max_length=255), required=False)
allowedOrigins = ListField(
# TODO(dcramer): make this validate origins
child=serializers.CharField(max_length=255),
required=False,
)
homepageUrl = serializers.URLField(
max_length=255, required=False, allow_null=True, allow_blank=True
)
termsUrl = serializers.URLField(
max_length=255, required=False, allow_null=True, allow_blank=True
)
privacyUrl = serializers.URLField(
max_length=255, required=False, allow_null=True, allow_blank=True
)
class ApiApplicationDetailsEndpoint(Endpoint):
authentication_classes = (SessionAuthentication,)
permission_classes = (IsAuthenticated,)
def get(self, request, app_id):
try:
instance = ApiApplication.objects.get(
owner=request.user, client_id=app_id, status=ApiApplicationStatus.active
)
except ApiApplication.DoesNotExist:
raise ResourceDoesNotExist
return Response(serialize(instance, request.user))
def put(self, request, app_id):
try:
instance = ApiApplication.objects.get(
owner=request.user, client_id=app_id, status=ApiApplicationStatus.active
)
except ApiApplication.DoesNotExist:
raise ResourceDoesNotExist
serializer = ApiApplicationSerializer(data=request.data, partial=True)
if serializer.is_valid():
result = serializer.validated_data
kwargs = {}
if "name" in result:
kwargs["name"] = result["name"]
if "allowedOrigins" in result:
kwargs["allowed_origins"] = "\n".join(result["allowedOrigins"])
if "redirectUris" in result:
kwargs["redirect_uris"] = "\n".join(result["redirectUris"])
if "homepageUrl" in result:
kwargs["homepage_url"] = result["homepageUrl"]
if "privacyUrl" in result:
kwargs["privacy_url"] = result["privacyUrl"]
if "termsUrl" in result:
kwargs["terms_url"] = result["termsUrl"]
if kwargs:
instance.update(**kwargs)
return Response(serialize(instance, request.user), status=200)
return Response(serializer.errors, status=400)
def delete(self, request, app_id):
try:
instance = ApiApplication.objects.get(
owner=request.user, client_id=app_id, status=ApiApplicationStatus.active
)
except ApiApplication.DoesNotExist:
raise ResourceDoesNotExist
updated = ApiApplication.objects.filter(id=instance.id).update(
status=ApiApplicationStatus.pending_deletion
)
if updated:
transaction_id = uuid4().hex
delete_api_application.apply_async(
kwargs={"object_id": instance.id, "transaction_id": transaction_id}, countdown=3600
)
delete_logger.info(
"object.delete.queued",
extra={
"object_id": instance.id,
"transaction_id": transaction_id,
"model": type(instance).__name__,
},
)
return Response(status=204)
| mvaled/sentry | src/sentry/api/endpoints/api_application_details.py | Python | bsd-3-clause | 4,091 |
import warnings
import numpy as np
from numpy.testing import (assert_almost_equal, assert_equal, assert_allclose,
assert_, suppress_warnings)
from pytest import raises as assert_raises
from scipy.signal import (ss2tf, tf2ss, lsim2, impulse2, step2, lti,
dlti, bode, freqresp, lsim, impulse, step,
abcd_normalize, place_poles,
TransferFunction, StateSpace, ZerosPolesGain)
from scipy.signal.filter_design import BadCoefficients
import scipy.linalg as linalg
from scipy.sparse.sputils import matrix
def _assert_poles_close(P1,P2, rtol=1e-8, atol=1e-8):
"""
Check each pole in P1 is close to a pole in P2 with a 1e-8
relative tolerance or 1e-8 absolute tolerance (useful for zero poles).
These tolerances are very strict but the systems tested are known to
accept these poles so we should not be far from what is requested.
"""
P2 = P2.copy()
for p1 in P1:
found = False
for p2_idx in range(P2.shape[0]):
if np.allclose([np.real(p1), np.imag(p1)],
[np.real(P2[p2_idx]), np.imag(P2[p2_idx])],
rtol, atol):
found = True
np.delete(P2, p2_idx)
break
if not found:
raise ValueError("Can't find pole " + str(p1) + " in " + str(P2))
class TestPlacePoles(object):
def _check(self, A, B, P, **kwargs):
"""
Perform the most common tests on the poles computed by place_poles
and return the Bunch object for further specific tests
"""
fsf = place_poles(A, B, P, **kwargs)
expected, _ = np.linalg.eig(A - np.dot(B, fsf.gain_matrix))
_assert_poles_close(expected,fsf.requested_poles)
_assert_poles_close(expected,fsf.computed_poles)
_assert_poles_close(P,fsf.requested_poles)
return fsf
def test_real(self):
# Test real pole placement using KNV and YT0 algorithm and example 1 in
# section 4 of the reference publication (see place_poles docstring)
A = np.array([1.380, -0.2077, 6.715, -5.676, -0.5814, -4.290, 0,
0.6750, 1.067, 4.273, -6.654, 5.893, 0.0480, 4.273,
1.343, -2.104]).reshape(4, 4)
B = np.array([0, 5.679, 1.136, 1.136, 0, 0, -3.146,0]).reshape(4, 2)
P = np.array([-0.2, -0.5, -5.0566, -8.6659])
# Check that both KNV and YT compute correct K matrix
self._check(A, B, P, method='KNV0')
self._check(A, B, P, method='YT')
# Try to reach the specific case in _YT_real where two singular
# values are almost equal. This is to improve code coverage but I
# have no way to be sure this code is really reached
# on some architectures this can lead to a RuntimeWarning invalid
# value in divide (see gh-7590), so suppress it for now
with np.errstate(invalid='ignore'):
self._check(A, B, (2,2,3,3))
def test_complex(self):
# Test complex pole placement on a linearized car model, taken from L.
# Jaulin, Automatique pour la robotique, Cours et Exercices, iSTE
# editions p 184/185
A = np.array([0,7,0,0,0,0,0,7/3.,0,0,0,0,0,0,0,0]).reshape(4,4)
B = np.array([0,0,0,0,1,0,0,1]).reshape(4,2)
# Test complex poles on YT
P = np.array([-3, -1, -2-1j, -2+1j])
self._check(A, B, P)
# Try to reach the specific case in _YT_complex where two singular
# values are almost equal. This is to improve code coverage but I
# have no way to be sure this code is really reached
P = [0-1e-6j,0+1e-6j,-10,10]
self._check(A, B, P, maxiter=1000)
# Try to reach the specific case in _YT_complex where the rank two
# update yields two null vectors. This test was found via Monte Carlo.
A = np.array(
[-2148,-2902, -2267, -598, -1722, -1829, -165, -283, -2546,
-167, -754, -2285, -543, -1700, -584, -2978, -925, -1300,
-1583, -984, -386, -2650, -764, -897, -517, -1598, 2, -1709,
-291, -338, -153, -1804, -1106, -1168, -867, -2297]
).reshape(6,6)
B = np.array(
[-108, -374, -524, -1285, -1232, -161, -1204, -672, -637,
-15, -483, -23, -931, -780, -1245, -1129, -1290, -1502,
-952, -1374, -62, -964, -930, -939, -792, -756, -1437,
-491, -1543, -686]
).reshape(6,5)
P = [-25.-29.j, -25.+29.j, 31.-42.j, 31.+42.j, 33.-41.j, 33.+41.j]
self._check(A, B, P)
# Use a lot of poles to go through all cases for update_order
# in _YT_loop
big_A = np.ones((11,11))-np.eye(11)
big_B = np.ones((11,10))-np.diag([1]*10,1)[:,1:]
big_A[:6,:6] = A
big_B[:6,:5] = B
P = [-10,-20,-30,40,50,60,70,-20-5j,-20+5j,5+3j,5-3j]
self._check(big_A, big_B, P)
#check with only complex poles and only real poles
P = [-10,-20,-30,-40,-50,-60,-70,-80,-90,-100]
self._check(big_A[:-1,:-1], big_B[:-1,:-1], P)
P = [-10+10j,-20+20j,-30+30j,-40+40j,-50+50j,
-10-10j,-20-20j,-30-30j,-40-40j,-50-50j]
self._check(big_A[:-1,:-1], big_B[:-1,:-1], P)
# need a 5x5 array to ensure YT handles properly when there
# is only one real pole and several complex
A = np.array([0,7,0,0,0,0,0,7/3.,0,0,0,0,0,0,0,0,
0,0,0,5,0,0,0,0,9]).reshape(5,5)
B = np.array([0,0,0,0,1,0,0,1,2,3]).reshape(5,2)
P = np.array([-2, -3+1j, -3-1j, -1+1j, -1-1j])
place_poles(A, B, P)
# same test with an odd number of real poles > 1
# this is another specific case of YT
P = np.array([-2, -3, -4, -1+1j, -1-1j])
self._check(A, B, P)
def test_tricky_B(self):
# check we handle as we should the 1 column B matrices and
# n column B matrices (with n such as shape(A)=(n, n))
A = np.array([1.380, -0.2077, 6.715, -5.676, -0.5814, -4.290, 0,
0.6750, 1.067, 4.273, -6.654, 5.893, 0.0480, 4.273,
1.343, -2.104]).reshape(4, 4)
B = np.array([0, 5.679, 1.136, 1.136, 0, 0, -3.146, 0, 1, 2, 3, 4,
5, 6, 7, 8]).reshape(4, 4)
# KNV or YT are not called here, it's a specific case with only
# one unique solution
P = np.array([-0.2, -0.5, -5.0566, -8.6659])
fsf = self._check(A, B, P)
# rtol and nb_iter should be set to np.nan as the identity can be
# used as transfer matrix
assert_equal(fsf.rtol, np.nan)
assert_equal(fsf.nb_iter, np.nan)
# check with complex poles too as they trigger a specific case in
# the specific case :-)
P = np.array((-2+1j,-2-1j,-3,-2))
fsf = self._check(A, B, P)
assert_equal(fsf.rtol, np.nan)
assert_equal(fsf.nb_iter, np.nan)
#now test with a B matrix with only one column (no optimisation)
B = B[:,0].reshape(4,1)
P = np.array((-2+1j,-2-1j,-3,-2))
fsf = self._check(A, B, P)
# we can't optimize anything, check they are set to 0 as expected
assert_equal(fsf.rtol, 0)
assert_equal(fsf.nb_iter, 0)
def test_errors(self):
# Test input mistakes from user
A = np.array([0,7,0,0,0,0,0,7/3.,0,0,0,0,0,0,0,0]).reshape(4,4)
B = np.array([0,0,0,0,1,0,0,1]).reshape(4,2)
#should fail as the method keyword is invalid
assert_raises(ValueError, place_poles, A, B, (-2.1,-2.2,-2.3,-2.4),
method="foo")
#should fail as poles are not 1D array
assert_raises(ValueError, place_poles, A, B,
np.array((-2.1,-2.2,-2.3,-2.4)).reshape(4,1))
#should fail as A is not a 2D array
assert_raises(ValueError, place_poles, A[:,:,np.newaxis], B,
(-2.1,-2.2,-2.3,-2.4))
#should fail as B is not a 2D array
assert_raises(ValueError, place_poles, A, B[:,:,np.newaxis],
(-2.1,-2.2,-2.3,-2.4))
#should fail as there are too many poles
assert_raises(ValueError, place_poles, A, B, (-2.1,-2.2,-2.3,-2.4,-3))
#should fail as there are not enough poles
assert_raises(ValueError, place_poles, A, B, (-2.1,-2.2,-2.3))
#should fail as the rtol is greater than 1
assert_raises(ValueError, place_poles, A, B, (-2.1,-2.2,-2.3,-2.4),
rtol=42)
#should fail as maxiter is smaller than 1
assert_raises(ValueError, place_poles, A, B, (-2.1,-2.2,-2.3,-2.4),
maxiter=-42)
# should fail as ndim(B) is two
assert_raises(ValueError, place_poles, A, B, (-2,-2,-2,-2))
#unctrollable system
assert_raises(ValueError, place_poles, np.ones((4,4)),
np.ones((4,2)), (1,2,3,4))
# Should not raise ValueError as the poles can be placed but should
# raise a warning as the convergence is not reached
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
fsf = place_poles(A, B, (-1,-2,-3,-4), rtol=1e-16, maxiter=42)
assert_(len(w) == 1)
assert_(issubclass(w[-1].category, UserWarning))
assert_("Convergence was not reached after maxiter iterations"
in str(w[-1].message))
assert_equal(fsf.nb_iter, 42)
# should fail as a complex misses its conjugate
assert_raises(ValueError, place_poles, A, B, (-2+1j,-2-1j,-2+3j,-2))
# should fail as A is not square
assert_raises(ValueError, place_poles, A[:,:3], B, (-2,-3,-4,-5))
# should fail as B has not the same number of lines as A
assert_raises(ValueError, place_poles, A, B[:3,:], (-2,-3,-4,-5))
# should fail as KNV0 does not support complex poles
assert_raises(ValueError, place_poles, A, B,
(-2+1j,-2-1j,-2+3j,-2-3j), method="KNV0")
class TestSS2TF:
def check_matrix_shapes(self, p, q, r):
ss2tf(np.zeros((p, p)),
np.zeros((p, q)),
np.zeros((r, p)),
np.zeros((r, q)), 0)
def test_shapes(self):
# Each tuple holds:
# number of states, number of inputs, number of outputs
for p, q, r in [(3, 3, 3), (1, 3, 3), (1, 1, 1)]:
self.check_matrix_shapes(p, q, r)
def test_basic(self):
# Test a round trip through tf2ss and ss2tf.
b = np.array([1.0, 3.0, 5.0])
a = np.array([1.0, 2.0, 3.0])
A, B, C, D = tf2ss(b, a)
assert_allclose(A, [[-2, -3], [1, 0]], rtol=1e-13)
assert_allclose(B, [[1], [0]], rtol=1e-13)
assert_allclose(C, [[1, 2]], rtol=1e-13)
assert_allclose(D, [[1]], rtol=1e-14)
bb, aa = ss2tf(A, B, C, D)
assert_allclose(bb[0], b, rtol=1e-13)
assert_allclose(aa, a, rtol=1e-13)
def test_zero_order_round_trip(self):
# See gh-5760
tf = (2, 1)
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[0]], rtol=1e-13)
assert_allclose(B, [[0]], rtol=1e-13)
assert_allclose(C, [[0]], rtol=1e-13)
assert_allclose(D, [[2]], rtol=1e-13)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[2, 0]], rtol=1e-13)
assert_allclose(den, [1, 0], rtol=1e-13)
tf = ([[5], [2]], 1)
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[0]], rtol=1e-13)
assert_allclose(B, [[0]], rtol=1e-13)
assert_allclose(C, [[0], [0]], rtol=1e-13)
assert_allclose(D, [[5], [2]], rtol=1e-13)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[5, 0], [2, 0]], rtol=1e-13)
assert_allclose(den, [1, 0], rtol=1e-13)
def test_simo_round_trip(self):
# See gh-5753
tf = ([[1, 2], [1, 1]], [1, 2])
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[-2]], rtol=1e-13)
assert_allclose(B, [[1]], rtol=1e-13)
assert_allclose(C, [[0], [-1]], rtol=1e-13)
assert_allclose(D, [[1], [1]], rtol=1e-13)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[1, 2], [1, 1]], rtol=1e-13)
assert_allclose(den, [1, 2], rtol=1e-13)
tf = ([[1, 0, 1], [1, 1, 1]], [1, 1, 1])
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[-1, -1], [1, 0]], rtol=1e-13)
assert_allclose(B, [[1], [0]], rtol=1e-13)
assert_allclose(C, [[-1, 0], [0, 0]], rtol=1e-13)
assert_allclose(D, [[1], [1]], rtol=1e-13)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[1, 0, 1], [1, 1, 1]], rtol=1e-13)
assert_allclose(den, [1, 1, 1], rtol=1e-13)
tf = ([[1, 2, 3], [1, 2, 3]], [1, 2, 3, 4])
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[-2, -3, -4], [1, 0, 0], [0, 1, 0]], rtol=1e-13)
assert_allclose(B, [[1], [0], [0]], rtol=1e-13)
assert_allclose(C, [[1, 2, 3], [1, 2, 3]], rtol=1e-13)
assert_allclose(D, [[0], [0]], rtol=1e-13)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[0, 1, 2, 3], [0, 1, 2, 3]], rtol=1e-13)
assert_allclose(den, [1, 2, 3, 4], rtol=1e-13)
tf = (np.array([1, [2, 3]], dtype=object), [1, 6])
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[-6]], rtol=1e-31)
assert_allclose(B, [[1]], rtol=1e-31)
assert_allclose(C, [[1], [-9]], rtol=1e-31)
assert_allclose(D, [[0], [2]], rtol=1e-31)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[0, 1], [2, 3]], rtol=1e-13)
assert_allclose(den, [1, 6], rtol=1e-13)
tf = (np.array([[1, -3], [1, 2, 3]], dtype=object), [1, 6, 5])
A, B, C, D = tf2ss(*tf)
assert_allclose(A, [[-6, -5], [1, 0]], rtol=1e-13)
assert_allclose(B, [[1], [0]], rtol=1e-13)
assert_allclose(C, [[1, -3], [-4, -2]], rtol=1e-13)
assert_allclose(D, [[0], [1]], rtol=1e-13)
num, den = ss2tf(A, B, C, D)
assert_allclose(num, [[0, 1, -3], [1, 2, 3]], rtol=1e-13)
assert_allclose(den, [1, 6, 5], rtol=1e-13)
def test_multioutput(self):
# Regression test for gh-2669.
# 4 states
A = np.array([[-1.0, 0.0, 1.0, 0.0],
[-1.0, 0.0, 2.0, 0.0],
[-4.0, 0.0, 3.0, 0.0],
[-8.0, 8.0, 0.0, 4.0]])
# 1 input
B = np.array([[0.3],
[0.0],
[7.0],
[0.0]])
# 3 outputs
C = np.array([[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
[8.0, 8.0, 0.0, 0.0]])
D = np.array([[0.0],
[0.0],
[1.0]])
# Get the transfer functions for all the outputs in one call.
b_all, a = ss2tf(A, B, C, D)
# Get the transfer functions for each output separately.
b0, a0 = ss2tf(A, B, C[0], D[0])
b1, a1 = ss2tf(A, B, C[1], D[1])
b2, a2 = ss2tf(A, B, C[2], D[2])
# Check that we got the same results.
assert_allclose(a0, a, rtol=1e-13)
assert_allclose(a1, a, rtol=1e-13)
assert_allclose(a2, a, rtol=1e-13)
assert_allclose(b_all, np.vstack((b0, b1, b2)), rtol=1e-13, atol=1e-14)
class TestLsim(object):
def lti_nowarn(self, *args):
with suppress_warnings() as sup:
sup.filter(BadCoefficients)
system = lti(*args)
return system
def test_first_order(self):
# y' = -y
# exact solution is y(t) = exp(-t)
system = self.lti_nowarn(-1.,1.,1.,0.)
t = np.linspace(0,5)
u = np.zeros_like(t)
tout, y, x = lsim(system, u, t, X0=[1.0])
expected_x = np.exp(-tout)
assert_almost_equal(x, expected_x)
assert_almost_equal(y, expected_x)
def test_integrator(self):
# integrator: y' = u
system = self.lti_nowarn(0., 1., 1., 0.)
t = np.linspace(0,5)
u = t
tout, y, x = lsim(system, u, t)
expected_x = 0.5 * tout**2
assert_almost_equal(x, expected_x)
assert_almost_equal(y, expected_x)
def test_double_integrator(self):
# double integrator: y'' = 2u
A = matrix("0. 1.; 0. 0.")
B = matrix("0.; 1.")
C = matrix("2. 0.")
system = self.lti_nowarn(A, B, C, 0.)
t = np.linspace(0,5)
u = np.ones_like(t)
tout, y, x = lsim(system, u, t)
expected_x = np.transpose(np.array([0.5 * tout**2, tout]))
expected_y = tout**2
assert_almost_equal(x, expected_x)
assert_almost_equal(y, expected_y)
def test_jordan_block(self):
# Non-diagonalizable A matrix
# x1' + x1 = x2
# x2' + x2 = u
# y = x1
# Exact solution with u = 0 is y(t) = t exp(-t)
A = matrix("-1. 1.; 0. -1.")
B = matrix("0.; 1.")
C = matrix("1. 0.")
system = self.lti_nowarn(A, B, C, 0.)
t = np.linspace(0,5)
u = np.zeros_like(t)
tout, y, x = lsim(system, u, t, X0=[0.0, 1.0])
expected_y = tout * np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_miso(self):
# A system with two state variables, two inputs, and one output.
A = np.array([[-1.0, 0.0], [0.0, -2.0]])
B = np.array([[1.0, 0.0], [0.0, 1.0]])
C = np.array([1.0, 0.0])
D = np.zeros((1,2))
system = self.lti_nowarn(A, B, C, D)
t = np.linspace(0, 5.0, 101)
u = np.zeros_like(t)
tout, y, x = lsim(system, u, t, X0=[1.0, 1.0])
expected_y = np.exp(-tout)
expected_x0 = np.exp(-tout)
expected_x1 = np.exp(-2.0*tout)
assert_almost_equal(y, expected_y)
assert_almost_equal(x[:,0], expected_x0)
assert_almost_equal(x[:,1], expected_x1)
def test_nonzero_initial_time(self):
system = self.lti_nowarn(-1.,1.,1.,0.)
t = np.linspace(1,2)
u = np.zeros_like(t)
tout, y, x = lsim(system, u, t, X0=[1.0])
expected_y = np.exp(-tout)
assert_almost_equal(y, expected_y)
class Test_lsim2(object):
def test_01(self):
t = np.linspace(0,10,1001)
u = np.zeros_like(t)
# First order system: x'(t) + x(t) = u(t), x(0) = 1.
# Exact solution is x(t) = exp(-t).
system = ([1.0],[1.0,1.0])
tout, y, x = lsim2(system, u, t, X0=[1.0])
expected_x = np.exp(-tout)
assert_almost_equal(x[:,0], expected_x)
def test_02(self):
t = np.array([0.0, 1.0, 1.0, 3.0])
u = np.array([0.0, 0.0, 1.0, 1.0])
# Simple integrator: x'(t) = u(t)
system = ([1.0],[1.0,0.0])
tout, y, x = lsim2(system, u, t, X0=[1.0])
expected_x = np.maximum(1.0, tout)
assert_almost_equal(x[:,0], expected_x)
def test_03(self):
t = np.array([0.0, 1.0, 1.0, 1.1, 1.1, 2.0])
u = np.array([0.0, 0.0, 1.0, 1.0, 0.0, 0.0])
# Simple integrator: x'(t) = u(t)
system = ([1.0],[1.0, 0.0])
tout, y, x = lsim2(system, u, t, hmax=0.01)
expected_x = np.array([0.0, 0.0, 0.0, 0.1, 0.1, 0.1])
assert_almost_equal(x[:,0], expected_x)
def test_04(self):
t = np.linspace(0, 10, 1001)
u = np.zeros_like(t)
# Second order system with a repeated root: x''(t) + 2*x(t) + x(t) = 0.
# With initial conditions x(0)=1.0 and x'(t)=0.0, the exact solution
# is (1-t)*exp(-t).
system = ([1.0], [1.0, 2.0, 1.0])
tout, y, x = lsim2(system, u, t, X0=[1.0, 0.0])
expected_x = (1.0 - tout) * np.exp(-tout)
assert_almost_equal(x[:,0], expected_x)
def test_05(self):
# The call to lsim2 triggers a "BadCoefficients" warning from
# scipy.signal.filter_design, but the test passes. I think the warning
# is related to the incomplete handling of multi-input systems in
# scipy.signal.
# A system with two state variables, two inputs, and one output.
A = np.array([[-1.0, 0.0], [0.0, -2.0]])
B = np.array([[1.0, 0.0], [0.0, 1.0]])
C = np.array([1.0, 0.0])
D = np.zeros((1, 2))
t = np.linspace(0, 10.0, 101)
with suppress_warnings() as sup:
sup.filter(BadCoefficients)
tout, y, x = lsim2((A,B,C,D), T=t, X0=[1.0, 1.0])
expected_y = np.exp(-tout)
expected_x0 = np.exp(-tout)
expected_x1 = np.exp(-2.0 * tout)
assert_almost_equal(y, expected_y)
assert_almost_equal(x[:,0], expected_x0)
assert_almost_equal(x[:,1], expected_x1)
def test_06(self):
# Test use of the default values of the arguments `T` and `U`.
# Second order system with a repeated root: x''(t) + 2*x(t) + x(t) = 0.
# With initial conditions x(0)=1.0 and x'(t)=0.0, the exact solution
# is (1-t)*exp(-t).
system = ([1.0], [1.0, 2.0, 1.0])
tout, y, x = lsim2(system, X0=[1.0, 0.0])
expected_x = (1.0 - tout) * np.exp(-tout)
assert_almost_equal(x[:,0], expected_x)
class _TestImpulseFuncs(object):
# Common tests for impulse/impulse2 (= self.func)
def test_01(self):
# First order system: x'(t) + x(t) = u(t)
# Exact impulse response is x(t) = exp(-t).
system = ([1.0], [1.0,1.0])
tout, y = self.func(system)
expected_y = np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_02(self):
# Specify the desired time values for the output.
# First order system: x'(t) + x(t) = u(t)
# Exact impulse response is x(t) = exp(-t).
system = ([1.0], [1.0,1.0])
n = 21
t = np.linspace(0, 2.0, n)
tout, y = self.func(system, T=t)
assert_equal(tout.shape, (n,))
assert_almost_equal(tout, t)
expected_y = np.exp(-t)
assert_almost_equal(y, expected_y)
def test_03(self):
# Specify an initial condition as a scalar.
# First order system: x'(t) + x(t) = u(t), x(0)=3.0
# Exact impulse response is x(t) = 4*exp(-t).
system = ([1.0], [1.0,1.0])
tout, y = self.func(system, X0=3.0)
expected_y = 4.0 * np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_04(self):
# Specify an initial condition as a list.
# First order system: x'(t) + x(t) = u(t), x(0)=3.0
# Exact impulse response is x(t) = 4*exp(-t).
system = ([1.0], [1.0,1.0])
tout, y = self.func(system, X0=[3.0])
expected_y = 4.0 * np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_05(self):
# Simple integrator: x'(t) = u(t)
system = ([1.0], [1.0,0.0])
tout, y = self.func(system)
expected_y = np.ones_like(tout)
assert_almost_equal(y, expected_y)
def test_06(self):
# Second order system with a repeated root:
# x''(t) + 2*x(t) + x(t) = u(t)
# The exact impulse response is t*exp(-t).
system = ([1.0], [1.0, 2.0, 1.0])
tout, y = self.func(system)
expected_y = tout * np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_array_like(self):
# Test that function can accept sequences, scalars.
system = ([1.0], [1.0, 2.0, 1.0])
# TODO: add meaningful test where X0 is a list
tout, y = self.func(system, X0=[3], T=[5, 6])
tout, y = self.func(system, X0=[3], T=[5])
def test_array_like2(self):
system = ([1.0], [1.0, 2.0, 1.0])
tout, y = self.func(system, X0=3, T=5)
class TestImpulse2(_TestImpulseFuncs):
def setup_method(self):
self.func = impulse2
class TestImpulse(_TestImpulseFuncs):
def setup_method(self):
self.func = impulse
class _TestStepFuncs(object):
def test_01(self):
# First order system: x'(t) + x(t) = u(t)
# Exact step response is x(t) = 1 - exp(-t).
system = ([1.0], [1.0,1.0])
tout, y = self.func(system)
expected_y = 1.0 - np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_02(self):
# Specify the desired time values for the output.
# First order system: x'(t) + x(t) = u(t)
# Exact step response is x(t) = 1 - exp(-t).
system = ([1.0], [1.0,1.0])
n = 21
t = np.linspace(0, 2.0, n)
tout, y = self.func(system, T=t)
assert_equal(tout.shape, (n,))
assert_almost_equal(tout, t)
expected_y = 1 - np.exp(-t)
assert_almost_equal(y, expected_y)
def test_03(self):
# Specify an initial condition as a scalar.
# First order system: x'(t) + x(t) = u(t), x(0)=3.0
# Exact step response is x(t) = 1 + 2*exp(-t).
system = ([1.0], [1.0,1.0])
tout, y = self.func(system, X0=3.0)
expected_y = 1 + 2.0*np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_04(self):
# Specify an initial condition as a list.
# First order system: x'(t) + x(t) = u(t), x(0)=3.0
# Exact step response is x(t) = 1 + 2*exp(-t).
system = ([1.0], [1.0,1.0])
tout, y = self.func(system, X0=[3.0])
expected_y = 1 + 2.0*np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_05(self):
# Simple integrator: x'(t) = u(t)
# Exact step response is x(t) = t.
system = ([1.0],[1.0,0.0])
tout, y = self.func(system)
expected_y = tout
assert_almost_equal(y, expected_y)
def test_06(self):
# Second order system with a repeated root:
# x''(t) + 2*x(t) + x(t) = u(t)
# The exact step response is 1 - (1 + t)*exp(-t).
system = ([1.0], [1.0, 2.0, 1.0])
tout, y = self.func(system)
expected_y = 1 - (1 + tout) * np.exp(-tout)
assert_almost_equal(y, expected_y)
def test_array_like(self):
# Test that function can accept sequences, scalars.
system = ([1.0], [1.0, 2.0, 1.0])
# TODO: add meaningful test where X0 is a list
tout, y = self.func(system, T=[5, 6])
class TestStep2(_TestStepFuncs):
def setup_method(self):
self.func = step2
def test_05(self):
# This test is almost the same as the one it overwrites in the base
# class. The only difference is the tolerances passed to step2:
# the default tolerances are not accurate enough for this test
# Simple integrator: x'(t) = u(t)
# Exact step response is x(t) = t.
system = ([1.0], [1.0,0.0])
tout, y = self.func(system, atol=1e-10, rtol=1e-8)
expected_y = tout
assert_almost_equal(y, expected_y)
class TestStep(_TestStepFuncs):
def setup_method(self):
self.func = step
def test_complex_input(self):
# Test that complex input doesn't raise an error.
# `step` doesn't seem to have been designed for complex input, but this
# works and may be used, so add regression test. See gh-2654.
step(([], [-1], 1+0j))
class TestLti(object):
def test_lti_instantiation(self):
# Test that lti can be instantiated with sequences, scalars.
# See PR-225.
# TransferFunction
s = lti([1], [-1])
assert_(isinstance(s, TransferFunction))
assert_(isinstance(s, lti))
assert_(not isinstance(s, dlti))
assert_(s.dt is None)
# ZerosPolesGain
s = lti(np.array([]), np.array([-1]), 1)
assert_(isinstance(s, ZerosPolesGain))
assert_(isinstance(s, lti))
assert_(not isinstance(s, dlti))
assert_(s.dt is None)
# StateSpace
s = lti([], [-1], 1)
s = lti([1], [-1], 1, 3)
assert_(isinstance(s, StateSpace))
assert_(isinstance(s, lti))
assert_(not isinstance(s, dlti))
assert_(s.dt is None)
class TestStateSpace(object):
def test_initialization(self):
# Check that all initializations work
StateSpace(1, 1, 1, 1)
StateSpace([1], [2], [3], [4])
StateSpace(np.array([[1, 2], [3, 4]]), np.array([[1], [2]]),
np.array([[1, 0]]), np.array([[0]]))
def test_conversion(self):
# Check the conversion functions
s = StateSpace(1, 2, 3, 4)
assert_(isinstance(s.to_ss(), StateSpace))
assert_(isinstance(s.to_tf(), TransferFunction))
assert_(isinstance(s.to_zpk(), ZerosPolesGain))
# Make sure copies work
assert_(StateSpace(s) is not s)
assert_(s.to_ss() is not s)
def test_properties(self):
# Test setters/getters for cross class properties.
# This implicitly tests to_tf() and to_zpk()
# Getters
s = StateSpace(1, 1, 1, 1)
assert_equal(s.poles, [1])
assert_equal(s.zeros, [0])
assert_(s.dt is None)
def test_operators(self):
# Test +/-/* operators on systems
class BadType(object):
pass
s1 = StateSpace(np.array([[-0.5, 0.7], [0.3, -0.8]]),
np.array([[1], [0]]),
np.array([[1, 0]]),
np.array([[0]]),
)
s2 = StateSpace(np.array([[-0.2, -0.1], [0.4, -0.1]]),
np.array([[1], [0]]),
np.array([[1, 0]]),
np.array([[0]])
)
s_discrete = s1.to_discrete(0.1)
s2_discrete = s2.to_discrete(0.2)
# Impulse response
t = np.linspace(0, 1, 100)
u = np.zeros_like(t)
u[0] = 1
# Test multiplication
for typ in (int, float, complex, np.float32, np.complex128, np.array):
assert_allclose(lsim(typ(2) * s1, U=u, T=t)[1],
typ(2) * lsim(s1, U=u, T=t)[1])
assert_allclose(lsim(s1 * typ(2), U=u, T=t)[1],
lsim(s1, U=u, T=t)[1] * typ(2))
assert_allclose(lsim(s1 / typ(2), U=u, T=t)[1],
lsim(s1, U=u, T=t)[1] / typ(2))
with assert_raises(TypeError):
typ(2) / s1
assert_allclose(lsim(s1 * 2, U=u, T=t)[1],
lsim(s1, U=2 * u, T=t)[1])
assert_allclose(lsim(s1 * s2, U=u, T=t)[1],
lsim(s1, U=lsim(s2, U=u, T=t)[1], T=t)[1],
atol=1e-5)
with assert_raises(TypeError):
s1 / s1
with assert_raises(TypeError):
s1 * s_discrete
with assert_raises(TypeError):
# Check different discretization constants
s_discrete * s2_discrete
with assert_raises(TypeError):
s1 * BadType()
with assert_raises(TypeError):
BadType() * s1
with assert_raises(TypeError):
s1 / BadType()
with assert_raises(TypeError):
BadType() / s1
# Test addition
assert_allclose(lsim(s1 + 2, U=u, T=t)[1],
2 * u + lsim(s1, U=u, T=t)[1])
# Check for dimension mismatch
with assert_raises(ValueError):
s1 + np.array([1, 2])
with assert_raises(ValueError):
np.array([1, 2]) + s1
with assert_raises(TypeError):
s1 + s_discrete
with assert_raises(ValueError):
s1 / np.array([[1, 2], [3, 4]])
with assert_raises(TypeError):
# Check different discretization constants
s_discrete + s2_discrete
with assert_raises(TypeError):
s1 + BadType()
with assert_raises(TypeError):
BadType() + s1
assert_allclose(lsim(s1 + s2, U=u, T=t)[1],
lsim(s1, U=u, T=t)[1] + lsim(s2, U=u, T=t)[1])
# Test subtraction
assert_allclose(lsim(s1 - 2, U=u, T=t)[1],
-2 * u + lsim(s1, U=u, T=t)[1])
assert_allclose(lsim(2 - s1, U=u, T=t)[1],
2 * u + lsim(-s1, U=u, T=t)[1])
assert_allclose(lsim(s1 - s2, U=u, T=t)[1],
lsim(s1, U=u, T=t)[1] - lsim(s2, U=u, T=t)[1])
with assert_raises(TypeError):
s1 - BadType()
with assert_raises(TypeError):
BadType() - s1
class TestTransferFunction(object):
def test_initialization(self):
# Check that all initializations work
TransferFunction(1, 1)
TransferFunction([1], [2])
TransferFunction(np.array([1]), np.array([2]))
def test_conversion(self):
# Check the conversion functions
s = TransferFunction([1, 0], [1, -1])
assert_(isinstance(s.to_ss(), StateSpace))
assert_(isinstance(s.to_tf(), TransferFunction))
assert_(isinstance(s.to_zpk(), ZerosPolesGain))
# Make sure copies work
assert_(TransferFunction(s) is not s)
assert_(s.to_tf() is not s)
def test_properties(self):
# Test setters/getters for cross class properties.
# This implicitly tests to_ss() and to_zpk()
# Getters
s = TransferFunction([1, 0], [1, -1])
assert_equal(s.poles, [1])
assert_equal(s.zeros, [0])
class TestZerosPolesGain(object):
def test_initialization(self):
# Check that all initializations work
ZerosPolesGain(1, 1, 1)
ZerosPolesGain([1], [2], 1)
ZerosPolesGain(np.array([1]), np.array([2]), 1)
def test_conversion(self):
#Check the conversion functions
s = ZerosPolesGain(1, 2, 3)
assert_(isinstance(s.to_ss(), StateSpace))
assert_(isinstance(s.to_tf(), TransferFunction))
assert_(isinstance(s.to_zpk(), ZerosPolesGain))
# Make sure copies work
assert_(ZerosPolesGain(s) is not s)
assert_(s.to_zpk() is not s)
class Test_abcd_normalize(object):
def setup_method(self):
self.A = np.array([[1.0, 2.0], [3.0, 4.0]])
self.B = np.array([[-1.0], [5.0]])
self.C = np.array([[4.0, 5.0]])
self.D = np.array([[2.5]])
def test_no_matrix_fails(self):
assert_raises(ValueError, abcd_normalize)
def test_A_nosquare_fails(self):
assert_raises(ValueError, abcd_normalize, [1, -1],
self.B, self.C, self.D)
def test_AB_mismatch_fails(self):
assert_raises(ValueError, abcd_normalize, self.A, [-1, 5],
self.C, self.D)
def test_AC_mismatch_fails(self):
assert_raises(ValueError, abcd_normalize, self.A, self.B,
[[4.0], [5.0]], self.D)
def test_CD_mismatch_fails(self):
assert_raises(ValueError, abcd_normalize, self.A, self.B,
self.C, [2.5, 0])
def test_BD_mismatch_fails(self):
assert_raises(ValueError, abcd_normalize, self.A, [-1, 5],
self.C, self.D)
def test_normalized_matrices_unchanged(self):
A, B, C, D = abcd_normalize(self.A, self.B, self.C, self.D)
assert_equal(A, self.A)
assert_equal(B, self.B)
assert_equal(C, self.C)
assert_equal(D, self.D)
def test_shapes(self):
A, B, C, D = abcd_normalize(self.A, self.B, [1, 0], 0)
assert_equal(A.shape[0], A.shape[1])
assert_equal(A.shape[0], B.shape[0])
assert_equal(A.shape[0], C.shape[1])
assert_equal(C.shape[0], D.shape[0])
assert_equal(B.shape[1], D.shape[1])
def test_zero_dimension_is_not_none1(self):
B_ = np.zeros((2, 0))
D_ = np.zeros((0, 0))
A, B, C, D = abcd_normalize(A=self.A, B=B_, D=D_)
assert_equal(A, self.A)
assert_equal(B, B_)
assert_equal(D, D_)
assert_equal(C.shape[0], D_.shape[0])
assert_equal(C.shape[1], self.A.shape[0])
def test_zero_dimension_is_not_none2(self):
B_ = np.zeros((2, 0))
C_ = np.zeros((0, 2))
A, B, C, D = abcd_normalize(A=self.A, B=B_, C=C_)
assert_equal(A, self.A)
assert_equal(B, B_)
assert_equal(C, C_)
assert_equal(D.shape[0], C_.shape[0])
assert_equal(D.shape[1], B_.shape[1])
def test_missing_A(self):
A, B, C, D = abcd_normalize(B=self.B, C=self.C, D=self.D)
assert_equal(A.shape[0], A.shape[1])
assert_equal(A.shape[0], B.shape[0])
assert_equal(A.shape, (self.B.shape[0], self.B.shape[0]))
def test_missing_B(self):
A, B, C, D = abcd_normalize(A=self.A, C=self.C, D=self.D)
assert_equal(B.shape[0], A.shape[0])
assert_equal(B.shape[1], D.shape[1])
assert_equal(B.shape, (self.A.shape[0], self.D.shape[1]))
def test_missing_C(self):
A, B, C, D = abcd_normalize(A=self.A, B=self.B, D=self.D)
assert_equal(C.shape[0], D.shape[0])
assert_equal(C.shape[1], A.shape[0])
assert_equal(C.shape, (self.D.shape[0], self.A.shape[0]))
def test_missing_D(self):
A, B, C, D = abcd_normalize(A=self.A, B=self.B, C=self.C)
assert_equal(D.shape[0], C.shape[0])
assert_equal(D.shape[1], B.shape[1])
assert_equal(D.shape, (self.C.shape[0], self.B.shape[1]))
def test_missing_AB(self):
A, B, C, D = abcd_normalize(C=self.C, D=self.D)
assert_equal(A.shape[0], A.shape[1])
assert_equal(A.shape[0], B.shape[0])
assert_equal(B.shape[1], D.shape[1])
assert_equal(A.shape, (self.C.shape[1], self.C.shape[1]))
assert_equal(B.shape, (self.C.shape[1], self.D.shape[1]))
def test_missing_AC(self):
A, B, C, D = abcd_normalize(B=self.B, D=self.D)
assert_equal(A.shape[0], A.shape[1])
assert_equal(A.shape[0], B.shape[0])
assert_equal(C.shape[0], D.shape[0])
assert_equal(C.shape[1], A.shape[0])
assert_equal(A.shape, (self.B.shape[0], self.B.shape[0]))
assert_equal(C.shape, (self.D.shape[0], self.B.shape[0]))
def test_missing_AD(self):
A, B, C, D = abcd_normalize(B=self.B, C=self.C)
assert_equal(A.shape[0], A.shape[1])
assert_equal(A.shape[0], B.shape[0])
assert_equal(D.shape[0], C.shape[0])
assert_equal(D.shape[1], B.shape[1])
assert_equal(A.shape, (self.B.shape[0], self.B.shape[0]))
assert_equal(D.shape, (self.C.shape[0], self.B.shape[1]))
def test_missing_BC(self):
A, B, C, D = abcd_normalize(A=self.A, D=self.D)
assert_equal(B.shape[0], A.shape[0])
assert_equal(B.shape[1], D.shape[1])
assert_equal(C.shape[0], D.shape[0])
assert_equal(C.shape[1], A.shape[0])
assert_equal(B.shape, (self.A.shape[0], self.D.shape[1]))
assert_equal(C.shape, (self.D.shape[0], self.A.shape[0]))
def test_missing_ABC_fails(self):
assert_raises(ValueError, abcd_normalize, D=self.D)
def test_missing_BD_fails(self):
assert_raises(ValueError, abcd_normalize, A=self.A, C=self.C)
def test_missing_CD_fails(self):
assert_raises(ValueError, abcd_normalize, A=self.A, B=self.B)
class Test_bode(object):
def test_01(self):
# Test bode() magnitude calculation (manual sanity check).
# 1st order low-pass filter: H(s) = 1 / (s + 1),
# cutoff: 1 rad/s, slope: -20 dB/decade
# H(s=0.1) ~= 0 dB
# H(s=1) ~= -3 dB
# H(s=10) ~= -20 dB
# H(s=100) ~= -40 dB
system = lti([1], [1, 1])
w = [0.1, 1, 10, 100]
w, mag, phase = bode(system, w=w)
expected_mag = [0, -3, -20, -40]
assert_almost_equal(mag, expected_mag, decimal=1)
def test_02(self):
# Test bode() phase calculation (manual sanity check).
# 1st order low-pass filter: H(s) = 1 / (s + 1),
# angle(H(s=0.1)) ~= -5.7 deg
# angle(H(s=1)) ~= -45 deg
# angle(H(s=10)) ~= -84.3 deg
system = lti([1], [1, 1])
w = [0.1, 1, 10]
w, mag, phase = bode(system, w=w)
expected_phase = [-5.7, -45, -84.3]
assert_almost_equal(phase, expected_phase, decimal=1)
def test_03(self):
# Test bode() magnitude calculation.
# 1st order low-pass filter: H(s) = 1 / (s + 1)
system = lti([1], [1, 1])
w = [0.1, 1, 10, 100]
w, mag, phase = bode(system, w=w)
jw = w * 1j
y = np.polyval(system.num, jw) / np.polyval(system.den, jw)
expected_mag = 20.0 * np.log10(abs(y))
assert_almost_equal(mag, expected_mag)
def test_04(self):
# Test bode() phase calculation.
# 1st order low-pass filter: H(s) = 1 / (s + 1)
system = lti([1], [1, 1])
w = [0.1, 1, 10, 100]
w, mag, phase = bode(system, w=w)
jw = w * 1j
y = np.polyval(system.num, jw) / np.polyval(system.den, jw)
expected_phase = np.arctan2(y.imag, y.real) * 180.0 / np.pi
assert_almost_equal(phase, expected_phase)
def test_05(self):
# Test that bode() finds a reasonable frequency range.
# 1st order low-pass filter: H(s) = 1 / (s + 1)
system = lti([1], [1, 1])
n = 10
# Expected range is from 0.01 to 10.
expected_w = np.logspace(-2, 1, n)
w, mag, phase = bode(system, n=n)
assert_almost_equal(w, expected_w)
def test_06(self):
# Test that bode() doesn't fail on a system with a pole at 0.
# integrator, pole at zero: H(s) = 1 / s
system = lti([1], [1, 0])
w, mag, phase = bode(system, n=2)
assert_equal(w[0], 0.01) # a fail would give not-a-number
def test_07(self):
# bode() should not fail on a system with pure imaginary poles.
# The test passes if bode doesn't raise an exception.
system = lti([1], [1, 0, 100])
w, mag, phase = bode(system, n=2)
def test_08(self):
# Test that bode() return continuous phase, issues/2331.
system = lti([], [-10, -30, -40, -60, -70], 1)
w, mag, phase = system.bode(w=np.logspace(-3, 40, 100))
assert_almost_equal(min(phase), -450, decimal=15)
def test_from_state_space(self):
# Ensure that bode works with a system that was created from the
# state space representation matrices A, B, C, D. In this case,
# system.num will be a 2-D array with shape (1, n+1), where (n,n)
# is the shape of A.
# A Butterworth lowpass filter is used, so we know the exact
# frequency response.
a = np.array([1.0, 2.0, 2.0, 1.0])
A = linalg.companion(a).T
B = np.array([[0.0], [0.0], [1.0]])
C = np.array([[1.0, 0.0, 0.0]])
D = np.array([[0.0]])
with suppress_warnings() as sup:
sup.filter(BadCoefficients)
system = lti(A, B, C, D)
w, mag, phase = bode(system, n=100)
expected_magnitude = 20 * np.log10(np.sqrt(1.0 / (1.0 + w**6)))
assert_almost_equal(mag, expected_magnitude)
class Test_freqresp(object):
def test_output_manual(self):
# Test freqresp() output calculation (manual sanity check).
# 1st order low-pass filter: H(s) = 1 / (s + 1),
# re(H(s=0.1)) ~= 0.99
# re(H(s=1)) ~= 0.5
# re(H(s=10)) ~= 0.0099
system = lti([1], [1, 1])
w = [0.1, 1, 10]
w, H = freqresp(system, w=w)
expected_re = [0.99, 0.5, 0.0099]
expected_im = [-0.099, -0.5, -0.099]
assert_almost_equal(H.real, expected_re, decimal=1)
assert_almost_equal(H.imag, expected_im, decimal=1)
def test_output(self):
# Test freqresp() output calculation.
# 1st order low-pass filter: H(s) = 1 / (s + 1)
system = lti([1], [1, 1])
w = [0.1, 1, 10, 100]
w, H = freqresp(system, w=w)
s = w * 1j
expected = np.polyval(system.num, s) / np.polyval(system.den, s)
assert_almost_equal(H.real, expected.real)
assert_almost_equal(H.imag, expected.imag)
def test_freq_range(self):
# Test that freqresp() finds a reasonable frequency range.
# 1st order low-pass filter: H(s) = 1 / (s + 1)
# Expected range is from 0.01 to 10.
system = lti([1], [1, 1])
n = 10
expected_w = np.logspace(-2, 1, n)
w, H = freqresp(system, n=n)
assert_almost_equal(w, expected_w)
def test_pole_zero(self):
# Test that freqresp() doesn't fail on a system with a pole at 0.
# integrator, pole at zero: H(s) = 1 / s
system = lti([1], [1, 0])
w, H = freqresp(system, n=2)
assert_equal(w[0], 0.01) # a fail would give not-a-number
def test_from_state_space(self):
# Ensure that freqresp works with a system that was created from the
# state space representation matrices A, B, C, D. In this case,
# system.num will be a 2-D array with shape (1, n+1), where (n,n) is
# the shape of A.
# A Butterworth lowpass filter is used, so we know the exact
# frequency response.
a = np.array([1.0, 2.0, 2.0, 1.0])
A = linalg.companion(a).T
B = np.array([[0.0],[0.0],[1.0]])
C = np.array([[1.0, 0.0, 0.0]])
D = np.array([[0.0]])
with suppress_warnings() as sup:
sup.filter(BadCoefficients)
system = lti(A, B, C, D)
w, H = freqresp(system, n=100)
s = w * 1j
expected = (1.0 / (1.0 + 2*s + 2*s**2 + s**3))
assert_almost_equal(H.real, expected.real)
assert_almost_equal(H.imag, expected.imag)
def test_from_zpk(self):
# 4th order low-pass filter: H(s) = 1 / (s + 1)
system = lti([],[-1]*4,[1])
w = [0.1, 1, 10, 100]
w, H = freqresp(system, w=w)
s = w * 1j
expected = 1 / (s + 1)**4
assert_almost_equal(H.real, expected.real)
assert_almost_equal(H.imag, expected.imag)
| aeklant/scipy | scipy/signal/tests/test_ltisys.py | Python | bsd-3-clause | 46,195 |
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import get_object_or_404
from pdfdocument.utils import pdf_response
import plata
import plata.reporting.product
import plata.reporting.order
@staff_member_required
def product_xls(request):
"""
Returns an XLS containing product information
"""
return plata.reporting.product.product_xls().to_response('products.xlsx')
@staff_member_required
def invoice_pdf(request, order_id):
"""
Returns the invoice PDF
"""
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
pdf, response = pdf_response('invoice-%09d' % order.id)
plata.reporting.order.invoice_pdf(pdf, order)
return response
@staff_member_required
def packing_slip_pdf(request, order_id):
"""
Returns the packing slip PDF
"""
order = get_object_or_404(plata.shop_instance().order_model, pk=order_id)
pdf, response = pdf_response('packing-slip-%09d' % order.id)
plata.reporting.order.packing_slip_pdf(pdf, order)
return response
| ixc/plata | plata/reporting/views.py | Python | bsd-3-clause | 1,080 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-29 12:16
from __future__ import unicode_literals
from django.contrib.postgres.operations import BtreeGinExtension
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('product', '0037_auto_20171124_0847'),
]
operations = [
BtreeGinExtension()
]
| UITools/saleor | saleor/product/migrations/0038_auto_20171129_0616.py | Python | bsd-3-clause | 383 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Afrikaans language.
.. seealso:: http://en.wikipedia.org/wiki/Afrikaans_language
"""
import re
from translate.lang import common
articlere = re.compile(r"'n\b")
class af(common.Common):
"""This class represents Afrikaans."""
validdoublewords = [u"u"]
punctuation = u"".join([common.Common.commonpunc, common.Common.quotes,
common.Common.miscpunc])
sentenceend = u".!?…"
sentencere = re.compile(r"""
(?s) # make . also match newlines
.*? # anything, but match non-greedy
[%s] # the puntuation for sentence ending
\s+ # the spacing after the puntuation
(?='n\s[A-Z]|[^'a-z\d]|'[^n])
# lookahead that next part starts with caps or 'n followed by caps
""" % sentenceend, re.VERBOSE
)
specialchars = u"ëïêôûáéíóúý"
def capsstart(cls, text):
"""Modify this for the indefinite article ('n)."""
match = articlere.search(text, 0, 20)
if match:
#construct a list of non-apostrophe punctuation:
nonapos = u"".join(cls.punctuation.split(u"'"))
stripped = text.lstrip().lstrip(nonapos)
match = articlere.match(stripped)
if match:
return common.Common.capsstart(stripped[match.end():])
return common.Common.capsstart(text)
capsstart = classmethod(capsstart)
cyr2lat = {
u"А": "A", u"а": "a",
u"Б": "B", u"б": "b",
u"В": "W", u"в": "w", # Different if at the end of a syllable see rule 2.
u"Г": "G", u"г": "g", # see rule 3 and 4
u"Д": "D", u"д": "d",
u"ДЖ": "Dj", u"дж": "dj",
u"Е": "Je", u"е": "je", # Sometimes e need to check when/why see rule 5.
u"Ё": "Jo", u"ё": "jo", # see rule 6
u"ЕЙ": "Ei", u"ей": "ei",
u"Ж": "Zj", u"ж": "zj",
u"З": "Z", u"з": "z",
u"И": "I", u"и": "i",
u"Й": "J", u"й": "j", # see rule 9 and 10
u"К": "K", u"к": "k", # see note 11
u"Л": "L", u"л": "l",
u"М": "M", u"м": "m",
u"Н": "N", u"н": "n",
u"О": "O", u"о": "o",
u"П": "P", u"п": "p",
u"Р": "R", u"р": "r",
u"С": "S", u"с": "s", # see note 12
u"Т": "T", u"т": "t",
u"У": "Oe", u"у": "oe",
u"Ф": "F", u"ф": "f",
u"Х": "Ch", u"х": "ch", # see rule 12
u"Ц": "Ts", u"ц": "ts",
u"Ч": "Tj", u"ч": "tj",
u"Ш": "Sj", u"ш": "sj",
u"Щ": "Sjtsj", u"щ": "sjtsj",
u"Ы": "I", u"ы": "i", # see note 13
u"Ъ": "", u"ъ": "", # See note 14
u"Ь": "", u"ь": "", # this letter is not in the AWS we assume it is left out as in the previous letter
u"Э": "E", u"э": "e",
u"Ю": "Joe", u"ю": "joe",
u"Я": "Ja", u"я": "ja",
}
"""Mapping of Cyrillic to Latin letters for transliteration in Afrikaans"""
cyr_vowels = u"аеёиоуыэюя"
def tranliterate_cyrillic(text):
"""Convert Cyrillic text to Latin according to the AWS transliteration rules."""
trans = u""
for i in text:
trans += cyr2lat.get(i, i)
return trans
| staranjeet/fjord | vendor/packages/translate-toolkit/translate/lang/af.py | Python | bsd-3-clause | 3,846 |
from unittest import TestCase
from chatterbot.corpus import Corpus
import os
class CorpusUtilsTestCase(TestCase):
def setUp(self):
self.corpus = Corpus()
def test_get_file_path(self):
"""
Test that a dotted path is properly converted to a file address.
"""
path = self.corpus.get_file_path('chatterbot.corpus.english')
self.assertIn(
os.path.join('chatterbot_corpus', 'data', 'english'),
path
)
def test_read_english_corpus(self):
corpus_path = os.path.join(
self.corpus.data_directory,
'english', 'conversations.corpus.json'
)
data = self.corpus.read_corpus(corpus_path)
self.assertIn('conversations', data)
def test_list_english_corpus_files(self):
data_files = self.corpus.list_corpus_files('chatterbot.corpus.english')
self.assertIn('.json', data_files[0])
def test_load_corpus(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.english.greetings')
self.assertEqual(len(corpus), 1)
self.assertIn(['Hi', 'Hello'], corpus[0])
class CorpusLoadingTestCase(TestCase):
def setUp(self):
self.corpus = Corpus()
def test_load_corpus_chinese(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.chinese')
self.assertTrue(len(corpus))
def test_load_corpus_english(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.english')
self.assertTrue(len(corpus))
def test_load_corpus_french(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.french')
self.assertTrue(len(corpus))
def test_load_corpus_german(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.german')
self.assertTrue(len(corpus))
def test_load_corpus_hindi(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.hindi')
self.assertTrue(len(corpus))
def test_load_corpus_indonesia(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.indonesia')
self.assertTrue(len(corpus))
def test_load_corpus_italian(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.italian')
self.assertTrue(len(corpus))
def test_load_corpus_marathi(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.marathi')
self.assertTrue(len(corpus))
def test_load_corpus_portuguese(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.portuguese')
self.assertTrue(len(corpus))
def test_load_corpus_russian(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.russian')
self.assertTrue(len(corpus))
def test_load_corpus_spanish(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.spanish')
self.assertTrue(len(corpus))
def test_load_corpus_telugu(self):
corpus = self.corpus.load_corpus('chatterbot.corpus.telugu')
self.assertTrue(len(corpus))
| davizucon/ChatterBot | tests/corpus_tests/test_corpus.py | Python | bsd-3-clause | 3,023 |
import sys
import os
import glob
import shutil
import datetime
assert 'pymel' not in sys.modules or 'PYMEL_INCLUDE_EXAMPLES' in os.environ, "to generate docs PYMEL_INCLUDE_EXAMPLES env var must be set before pymel is imported"
# remember, the processed command examples are not version specific. you must
# run cmdcache.fixCodeExamples() to bring processed examples in from the raw
# version-specific example caches
os.environ['PYMEL_INCLUDE_EXAMPLES'] = 'True'
pymel_root = os.path.dirname(os.path.dirname(sys.modules[__name__].__file__))
docsdir = os.path.join(pymel_root, 'docs')
stubdir = os.path.join(pymel_root, 'extras', 'completion', 'py')
useStubs = False
if useStubs:
sys.path.insert(0, stubdir)
import pymel
print pymel.__file__
else:
import pymel
# make sure dynamic modules are fully loaded
from pymel.core.uitypes import *
from pymel.core.nodetypes import *
version = pymel.__version__.rsplit('.',1)[0]
SOURCE = 'source'
BUILD_ROOT = 'build'
BUILD = os.path.join(BUILD_ROOT, version)
sourcedir = os.path.join(docsdir, SOURCE)
gendir = os.path.join(sourcedir, 'generated')
buildrootdir = os.path.join(docsdir, BUILD_ROOT)
builddir = os.path.join(docsdir, BUILD)
from pymel.internal.cmdcache import fixCodeExamples
def generate(clean=True):
"delete build and generated directories and generate a top-level documentation source file for each module."
print "generating %s - %s" % (docsdir, datetime.datetime.now())
from sphinx.ext.autosummary.generate import main as sphinx_autogen
if clean:
clean_build()
clean_generated()
os.chdir(sourcedir)
sphinx_autogen( [''] + '--templates ../templates modules.rst'.split() )
sphinx_autogen( [''] + '--templates ../templates'.split() + glob.glob('generated/pymel.*.rst') )
print "...done generating %s - %s" % (docsdir, datetime.datetime.now())
def clean_build():
"delete existing build directory"
if os.path.exists(buildrootdir):
print "removing %s - %s" % (buildrootdir, datetime.datetime.now())
shutil.rmtree(buildrootdir)
def clean_generated():
"delete existing generated directory"
if os.path.exists(gendir):
print "removing %s - %s" % (gendir, datetime.datetime.now())
shutil.rmtree(gendir)
def find_dot():
if os.name == 'posix':
dot_bin = 'dot'
else:
dot_bin = 'dot.exe'
for p in os.environ['PATH'].split(os.pathsep):
d = os.path.join(p, dot_bin)
if os.path.exists(d):
return d
raise TypeError('cannot find graphiz dot executable in the path (%s)' % os.environ['PATH'])
def copy_changelog():
changelog = os.path.join(pymel_root, 'CHANGELOG.rst')
whatsnew = os.path.join(pymel_root, 'docs', 'source', 'whats_new.rst')
shutil.copy2(changelog, whatsnew)
def build(clean=True, **kwargs):
from sphinx import main as sphinx_build
print "building %s - %s" % (docsdir, datetime.datetime.now())
if not os.path.isdir(gendir):
generate()
os.chdir( docsdir )
if clean:
clean_build()
copy_changelog()
#mkdir -p build/html build/doctrees
#import pymel.internal.cmdcache as cmdcache
#cmdcache.fixCodeExamples()
opts = ['']
opts += '-b html -d build/doctrees'.split()
# set some defaults
if not kwargs.get('graphviz_dot', None):
kwargs['graphviz_dot'] = find_dot()
for key, value in kwargs.iteritems():
opts.append('-D')
opts.append( key.strip() + '=' + value.strip() )
opts.append('-P')
opts.append(SOURCE)
opts.append(BUILD)
sphinx_build(opts)
print "...done building %s - %s" % (docsdir, datetime.datetime.now())
| shrtcww/pymel | maintenance/docs.py | Python | bsd-3-clause | 3,694 |
# -*- coding: utf-8 -*-
"""
Classes that process (and maybe abort) responses based on
various conditions. They should be used with
:class:`splash.network_manager.SplashQNetworkAccessManager`.
"""
from __future__ import absolute_import
from PyQt5.QtNetwork import QNetworkRequest
from splash.qtutils import request_repr
from twisted.python import log
import fnmatch
class ContentTypeMiddleware(object):
"""
Response middleware, aborts responses depending on the content type.
A response will be aborted (and the underlying connection closed) after
receiving the response headers if the content type of the response is not
in the whitelist or it's in the blacklist. Both lists support wildcards.
"""
def __init__(self, verbosity=0):
self.verbosity = verbosity
@staticmethod
def contains(mime_set, mime):
"""
>>> ContentTypeMiddleware.contains({'*/*'}, 'any/thing')
True
>>> ContentTypeMiddleware.contains(set(), 'any/thing')
False
>>> ContentTypeMiddleware.contains({'text/css', 'image/*'}, 'image/png')
True
>>> ContentTypeMiddleware.contains({'*'}, 'any-thing')
True
"""
for pattern in mime_set:
if fnmatch.fnmatch(mime, pattern):
return True
return False
@staticmethod
def clean_mime(mime):
"""
Remove attributes from a mime string:
>>> ContentTypeMiddleware.clean_mime(' text/html; charset=utf-8\t ')
'text/html'
"""
separator = mime.find(';')
if separator > 0:
mime = mime[:separator]
return mime.strip()
def process(self, reply, render_options):
content_type = reply.header(QNetworkRequest.ContentTypeHeader)
if content_type is None:
return
mimetype = self.clean_mime(content_type)
allowed = render_options.get_allowed_content_types()
forbidden = render_options.get_forbidden_content_types()
whitelist = set(map(ContentTypeMiddleware.clean_mime, allowed))
blacklist = set(map(ContentTypeMiddleware.clean_mime, forbidden))
if self.contains(blacklist, mimetype) or not self.contains(whitelist, mimetype):
if self.verbosity >= 2:
request_str = request_repr(reply, reply.operation())
msg = "Dropping %s because of Content Type" % request_str
log.msg(msg, system='response_middleware')
reply.abort()
| pawelmhm/splash | splash/response_middleware.py | Python | bsd-3-clause | 2,510 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
class KeyVaultClientConfiguration(Configuration):
"""Configuration for KeyVaultClient.
Note that all parameters used to create this instance are saved as instance
attributes.
"""
def __init__(
self,
**kwargs # type: Any
):
# type: (...) -> None
super(KeyVaultClientConfiguration, self).__init__(**kwargs)
kwargs.setdefault('sdk_moniker', 'azure-keyvault/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
| Azure/azure-sdk-for-python | sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/_configuration.py | Python | mit | 2,179 |
#
# THIS IS WORK IN PROGRESS
#
# The Python Imaging Library.
# $Id$
#
# FlashPix support for PIL
#
# History:
# 97-01-25 fl Created (reads uncompressed RGB images only)
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1997.
#
# See the README file for information on usage and redistribution.
#
from __future__ import print_function
from . import Image, ImageFile
from ._binary import i32le as i32, i8
import olefile
__version__ = "0.1"
# we map from colour field tuples to (mode, rawmode) descriptors
MODES = {
# opacity
(0x00007ffe): ("A", "L"),
# monochrome
(0x00010000,): ("L", "L"),
(0x00018000, 0x00017ffe): ("RGBA", "LA"),
# photo YCC
(0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"),
(0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"),
# standard RGB (NIFRGB)
(0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"),
(0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"),
}
#
# --------------------------------------------------------------------
def _accept(prefix):
return prefix[:8] == olefile.MAGIC
##
# Image plugin for the FlashPix images.
class FpxImageFile(ImageFile.ImageFile):
format = "FPX"
format_description = "FlashPix"
def _open(self):
#
# read the OLE directory and see if this is a likely
# to be a FlashPix file
try:
self.ole = olefile.OleFileIO(self.fp)
except IOError:
raise SyntaxError("not an FPX file; invalid OLE file")
if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B":
raise SyntaxError("not an FPX file; bad root CLSID")
self._open_index(1)
def _open_index(self, index=1):
#
# get the Image Contents Property Set
prop = self.ole.getproperties([
"Data Object Store %06d" % index,
"\005Image Contents"
])
# size (highest resolution)
self._size = prop[0x1000002], prop[0x1000003]
size = max(self.size)
i = 1
while size > 64:
size = size / 2
i += 1
self.maxid = i - 1
# mode. instead of using a single field for this, flashpix
# requires you to specify the mode for each channel in each
# resolution subimage, and leaves it to the decoder to make
# sure that they all match. for now, we'll cheat and assume
# that this is always the case.
id = self.maxid << 16
s = prop[0x2000002 | id]
colors = []
for i in range(i32(s, 4)):
# note: for now, we ignore the "uncalibrated" flag
colors.append(i32(s, 8+i*4) & 0x7fffffff)
self.mode, self.rawmode = MODES[tuple(colors)]
# load JPEG tables, if any
self.jpeg = {}
for i in range(256):
id = 0x3000001 | (i << 16)
if id in prop:
self.jpeg[i] = prop[id]
self._open_subimage(1, self.maxid)
def _open_subimage(self, index=1, subimage=0):
#
# setup tile descriptors for a given subimage
stream = [
"Data Object Store %06d" % index,
"Resolution %04d" % subimage,
"Subimage 0000 Header"
]
fp = self.ole.openstream(stream)
# skip prefix
fp.read(28)
# header stream
s = fp.read(36)
size = i32(s, 4), i32(s, 8)
# tilecount = i32(s, 12)
tilesize = i32(s, 16), i32(s, 20)
# channels = i32(s, 24)
offset = i32(s, 28)
length = i32(s, 32)
if size != self.size:
raise IOError("subimage mismatch")
# get tile descriptors
fp.seek(28 + offset)
s = fp.read(i32(s, 12) * length)
x = y = 0
xsize, ysize = size
xtile, ytile = tilesize
self.tile = []
for i in range(0, len(s), length):
compression = i32(s, i+8)
if compression == 0:
self.tile.append(("raw", (x, y, x+xtile, y+ytile),
i32(s, i) + 28, (self.rawmode)))
elif compression == 1:
# FIXME: the fill decoder is not implemented
self.tile.append(("fill", (x, y, x+xtile, y+ytile),
i32(s, i) + 28, (self.rawmode, s[12:16])))
elif compression == 2:
internal_color_conversion = i8(s[14])
jpeg_tables = i8(s[15])
rawmode = self.rawmode
if internal_color_conversion:
# The image is stored as usual (usually YCbCr).
if rawmode == "RGBA":
# For "RGBA", data is stored as YCbCrA based on
# negative RGB. The following trick works around
# this problem :
jpegmode, rawmode = "YCbCrK", "CMYK"
else:
jpegmode = None # let the decoder decide
else:
# The image is stored as defined by rawmode
jpegmode = rawmode
self.tile.append(("jpeg", (x, y, x+xtile, y+ytile),
i32(s, i) + 28, (rawmode, jpegmode)))
# FIXME: jpeg tables are tile dependent; the prefix
# data must be placed in the tile descriptor itself!
if jpeg_tables:
self.tile_prefix = self.jpeg[jpeg_tables]
else:
raise IOError("unknown/invalid compression")
x = x + xtile
if x >= xsize:
x, y = 0, y + ytile
if y >= ysize:
break # isn't really required
self.stream = stream
self.fp = None
def load(self):
if not self.fp:
self.fp = self.ole.openstream(self.stream[:2] +
["Subimage 0000 Data"])
return ImageFile.ImageFile.load(self)
#
# --------------------------------------------------------------------
Image.register_open(FpxImageFile.format, FpxImageFile, _accept)
Image.register_extension(FpxImageFile.format, ".fpx")
| ryfeus/lambda-packs | pytorch/source/PIL/FpxImagePlugin.py | Python | mit | 6,282 |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Check DNSSEC trust chain.
# Todo: verify expiration dates
#
# Based on
# http://backreference.org/2010/11/17/dnssec-verification-with-dig/
# https://github.com/rthalley/dnspython/blob/master/tests/test_dnssec.py
import dns
import dns.name
import dns.query
import dns.dnssec
import dns.message
import dns.resolver
import dns.rdatatype
import dns.rdtypes.ANY.NS
import dns.rdtypes.ANY.CNAME
import dns.rdtypes.ANY.DLV
import dns.rdtypes.ANY.DNSKEY
import dns.rdtypes.ANY.DS
import dns.rdtypes.ANY.NSEC
import dns.rdtypes.ANY.NSEC3
import dns.rdtypes.ANY.NSEC3PARAM
import dns.rdtypes.ANY.RRSIG
import dns.rdtypes.ANY.SOA
import dns.rdtypes.ANY.TXT
import dns.rdtypes.IN.A
import dns.rdtypes.IN.AAAA
from .logging import get_logger
_logger = get_logger(__name__)
# hard-coded trust anchors (root KSKs)
trust_anchors = [
# KSK-2017:
dns.rrset.from_text('.', 1 , 'IN', 'DNSKEY', '257 3 8 AwEAAaz/tAm8yTn4Mfeh5eyI96WSVexTBAvkMgJzkKTOiW1vkIbzxeF3+/4RgWOq7HrxRixHlFlExOLAJr5emLvN7SWXgnLh4+B5xQlNVz8Og8kvArMtNROxVQuCaSnIDdD5LKyWbRd2n9WGe2R8PzgCmr3EgVLrjyBxWezF0jLHwVN8efS3rCj/EWgvIWgb9tarpVUDK/b58Da+sqqls3eNbuv7pr+eoZG+SrDK6nWeL3c6H5Apxz7LjVc1uTIdsIXxuOLYA4/ilBmSVIzuDWfdRUfhHdY6+cn8HFRm+2hM8AnXGXws9555KrUB5qihylGa8subX2Nn6UwNR1AkUTV74bU='),
# KSK-2010:
dns.rrset.from_text('.', 15202, 'IN', 'DNSKEY', '257 3 8 AwEAAagAIKlVZrpC6Ia7gEzahOR+9W29euxhJhVVLOyQbSEW0O8gcCjF FVQUTf6v58fLjwBd0YI0EzrAcQqBGCzh/RStIoO8g0NfnfL2MTJRkxoX bfDaUeVPQuYEhg37NZWAJQ9VnMVDxP/VHL496M/QZxkjf5/Efucp2gaD X6RS6CXpoY68LsvPVjR0ZSwzz1apAzvN9dlzEheX7ICJBBtuA6G3LQpz W5hOA2hzCTMjJPJ8LbqF6dsV6DoBQzgul0sGIcGOYl7OyQdXfZ57relS Qageu+ipAdTTJ25AsRTAoub8ONGcLmqrAmRLKBP1dfwhYB4N7knNnulq QxA+Uk1ihz0='),
]
def _check_query(ns, sub, _type, keys):
q = dns.message.make_query(sub, _type, want_dnssec=True)
response = dns.query.tcp(q, ns, timeout=5)
assert response.rcode() == 0, 'No answer'
answer = response.answer
assert len(answer) != 0, ('No DNS record found', sub, _type)
assert len(answer) != 1, ('No DNSSEC record found', sub, _type)
if answer[0].rdtype == dns.rdatatype.RRSIG:
rrsig, rrset = answer
elif answer[1].rdtype == dns.rdatatype.RRSIG:
rrset, rrsig = answer
else:
raise Exception('No signature set in record')
if keys is None:
keys = {dns.name.from_text(sub):rrset}
dns.dnssec.validate(rrset, rrsig, keys)
return rrset
def _get_and_validate(ns, url, _type):
# get trusted root key
root_rrset = None
for dnskey_rr in trust_anchors:
try:
# Check if there is a valid signature for the root dnskey
root_rrset = _check_query(ns, '', dns.rdatatype.DNSKEY, {dns.name.root: dnskey_rr})
break
except dns.dnssec.ValidationFailure:
# It's OK as long as one key validates
continue
if not root_rrset:
raise dns.dnssec.ValidationFailure('None of the trust anchors found in DNS')
keys = {dns.name.root: root_rrset}
# top-down verification
parts = url.split('.')
for i in range(len(parts), 0, -1):
sub = '.'.join(parts[i-1:])
name = dns.name.from_text(sub)
# If server is authoritative, don't fetch DNSKEY
query = dns.message.make_query(sub, dns.rdatatype.NS)
response = dns.query.udp(query, ns, 3)
assert response.rcode() == dns.rcode.NOERROR, "query error"
rrset = response.authority[0] if len(response.authority) > 0 else response.answer[0]
rr = rrset[0]
if rr.rdtype == dns.rdatatype.SOA:
continue
# get DNSKEY (self-signed)
rrset = _check_query(ns, sub, dns.rdatatype.DNSKEY, None)
# get DS (signed by parent)
ds_rrset = _check_query(ns, sub, dns.rdatatype.DS, keys)
# verify that a signed DS validates DNSKEY
for ds in ds_rrset:
for dnskey in rrset:
htype = 'SHA256' if ds.digest_type == 2 else 'SHA1'
good_ds = dns.dnssec.make_ds(name, dnskey, htype)
if ds == good_ds:
break
else:
continue
break
else:
raise Exception("DS does not match DNSKEY")
# set key for next iteration
keys = {name: rrset}
# get TXT record (signed by zone)
rrset = _check_query(ns, url, _type, keys)
return rrset
def query(url, rtype):
# 8.8.8.8 is Google's public DNS server
nameservers = ['8.8.8.8']
ns = nameservers[0]
try:
out = _get_and_validate(ns, url, rtype)
validated = True
except Exception as e:
_logger.info(f"DNSSEC error: {repr(e)}")
out = dns.resolver.resolve(url, rtype)
validated = False
return out, validated
| wakiyamap/electrum-mona | electrum_mona/dnssec.py | Python | mit | 5,922 |
#!/usr/bin/env python
class PivotFilter(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
"""
Attributes:
swaggerTypes (dict): The key is attribute name and the value is attribute type.
attributeMap (dict): The key is attribute name and the value is json key in definition.
"""
self.swaggerTypes = {
'AutoFilter': 'AutoFilter',
'EvaluationOrder': 'int',
'FieldIndex': 'int',
'FilterType': 'str',
'MeasureFldIndex': 'int',
'MemberPropertyFieldIndex': 'int',
'Name': 'str',
'Value1': 'str',
'Value2': 'str'
}
self.attributeMap = {
'AutoFilter': 'AutoFilter','EvaluationOrder': 'EvaluationOrder','FieldIndex': 'FieldIndex','FilterType': 'FilterType','MeasureFldIndex': 'MeasureFldIndex','MemberPropertyFieldIndex': 'MemberPropertyFieldIndex','Name': 'Name','Value1': 'Value1','Value2': 'Value2'}
self.AutoFilter = None # AutoFilter
self.EvaluationOrder = None # int
self.FieldIndex = None # int
self.FilterType = None # str
self.MeasureFldIndex = None # int
self.MemberPropertyFieldIndex = None # int
self.Name = None # str
self.Value1 = None # str
self.Value2 = None # str
| aspose-cells/Aspose.Cells-for-Cloud | SDKs/Aspose.Cells-Cloud-SDK-for-Python/asposecellscloud/models/PivotFilter.py | Python | mit | 1,456 |
Experiment(description='No with centred periodic',
data_dir='../data/tsdlr/',
max_depth=8,
random_order=False,
k=1,
debug=False,
local_computation=False,
n_rand=9,
sd=4,
max_jobs=600,
verbose=False,
make_predictions=False,
skip_complete=True,
results_dir='../results/2013-09-07/',
iters=250,
base_kernels='StepTanh,CenPer,Cos,Lin,SE,Const,MT5,IMT3Lin',
zero_mean=True,
random_seed=1,
period_heuristic=5,
subset=True,
subset_size=250,
full_iters=0,
bundle_size=5)
| jamesrobertlloyd/gpss-research | experiments/2013-09-07.py | Python | mit | 710 |
import pprint
import test.test_support
import unittest
import test.test_set
try:
uni = unicode
except NameError:
def uni(x):
return x
# list, tuple and dict subclasses that do or don't overwrite __repr__
class list2(list):
pass
class list3(list):
def __repr__(self):
return list.__repr__(self)
class tuple2(tuple):
pass
class tuple3(tuple):
def __repr__(self):
return tuple.__repr__(self)
class dict2(dict):
pass
class dict3(dict):
def __repr__(self):
return dict.__repr__(self)
class QueryTestCase(unittest.TestCase):
def setUp(self):
self.a = range(100)
self.b = range(200)
self.a[-12] = self.b
def test_basic(self):
# Verify .isrecursive() and .isreadable() w/o recursion
pp = pprint.PrettyPrinter()
for safe in (2, 2.0, 2j, "abc", [3], (2,2), {3: 3}, uni("yaddayadda"),
self.a, self.b):
# module-level convenience functions
self.assertFalse(pprint.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pprint.isreadable(safe),
"expected isreadable for %r" % (safe,))
# PrettyPrinter methods
self.assertFalse(pp.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pp.isreadable(safe),
"expected isreadable for %r" % (safe,))
def test_knotted(self):
# Verify .isrecursive() and .isreadable() w/ recursion
# Tie a knot.
self.b[67] = self.a
# Messy dict.
self.d = {}
self.d[0] = self.d[1] = self.d[2] = self.d
pp = pprint.PrettyPrinter()
for icky in self.a, self.b, self.d, (self.d, self.d):
self.assertTrue(pprint.isrecursive(icky), "expected isrecursive")
self.assertFalse(pprint.isreadable(icky), "expected not isreadable")
self.assertTrue(pp.isrecursive(icky), "expected isrecursive")
self.assertFalse(pp.isreadable(icky), "expected not isreadable")
# Break the cycles.
self.d.clear()
del self.a[:]
del self.b[:]
for safe in self.a, self.b, self.d, (self.d, self.d):
# module-level convenience functions
self.assertFalse(pprint.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pprint.isreadable(safe),
"expected isreadable for %r" % (safe,))
# PrettyPrinter methods
self.assertFalse(pp.isrecursive(safe),
"expected not isrecursive for %r" % (safe,))
self.assertTrue(pp.isreadable(safe),
"expected isreadable for %r" % (safe,))
def test_unreadable(self):
# Not recursive but not readable anyway
pp = pprint.PrettyPrinter()
for unreadable in type(3), pprint, pprint.isrecursive:
# module-level convenience functions
self.assertFalse(pprint.isrecursive(unreadable),
"expected not isrecursive for %r" % (unreadable,))
self.assertFalse(pprint.isreadable(unreadable),
"expected not isreadable for %r" % (unreadable,))
# PrettyPrinter methods
self.assertFalse(pp.isrecursive(unreadable),
"expected not isrecursive for %r" % (unreadable,))
self.assertFalse(pp.isreadable(unreadable),
"expected not isreadable for %r" % (unreadable,))
def test_same_as_repr(self):
# Simple objects, small containers and classes that overwrite __repr__
# For those the result should be the same as repr().
# Ahem. The docs don't say anything about that -- this appears to
# be testing an implementation quirk. Starting in Python 2.5, it's
# not true for dicts: pprint always sorts dicts by key now; before,
# it sorted a dict display if and only if the display required
# multiple lines. For that reason, dicts with more than one element
# aren't tested here.
for simple in (0, 0L, 0+0j, 0.0, "", uni(""),
(), tuple2(), tuple3(),
[], list2(), list3(),
{}, dict2(), dict3(),
self.assertTrue, pprint,
-6, -6L, -6-6j, -1.5, "x", uni("x"), (3,), [3], {3: 6},
(1,2), [3,4], {5: 6},
tuple2((1,2)), tuple3((1,2)), tuple3(range(100)),
[3,4], list2([3,4]), list3([3,4]), list3(range(100)),
dict2({5: 6}), dict3({5: 6}),
range(10, -11, -1)
):
native = repr(simple)
for function in "pformat", "saferepr":
f = getattr(pprint, function)
got = f(simple)
self.assertEqual(native, got,
"expected %s got %s from pprint.%s" %
(native, got, function))
def test_basic_line_wrap(self):
# verify basic line-wrapping operation
o = {'RPM_cal': 0,
'RPM_cal2': 48059,
'Speed_cal': 0,
'controldesk_runtime_us': 0,
'main_code_runtime_us': 0,
'read_io_runtime_us': 0,
'write_io_runtime_us': 43690}
exp = """\
{'RPM_cal': 0,
'RPM_cal2': 48059,
'Speed_cal': 0,
'controldesk_runtime_us': 0,
'main_code_runtime_us': 0,
'read_io_runtime_us': 0,
'write_io_runtime_us': 43690}"""
for type in [dict, dict2]:
self.assertEqual(pprint.pformat(type(o)), exp)
o = range(100)
exp = '[%s]' % ',\n '.join(map(str, o))
for type in [list, list2]:
self.assertEqual(pprint.pformat(type(o)), exp)
o = tuple(range(100))
exp = '(%s)' % ',\n '.join(map(str, o))
for type in [tuple, tuple2]:
self.assertEqual(pprint.pformat(type(o)), exp)
# indent parameter
o = range(100)
exp = '[ %s]' % ',\n '.join(map(str, o))
for type in [list, list2]:
self.assertEqual(pprint.pformat(type(o), indent=4), exp)
def test_nested_indentations(self):
o1 = list(range(10))
o2 = dict(first=1, second=2, third=3)
o = [o1, o2]
expected = """\
[ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
{ 'first': 1,
'second': 2,
'third': 3}]"""
self.assertEqual(pprint.pformat(o, indent=4, width=42), expected)
def test_sorted_dict(self):
# Starting in Python 2.5, pprint sorts dict displays by key regardless
# of how small the dictionary may be.
# Before the change, on 32-bit Windows pformat() gave order
# 'a', 'c', 'b' here, so this test failed.
d = {'a': 1, 'b': 1, 'c': 1}
self.assertEqual(pprint.pformat(d), "{'a': 1, 'b': 1, 'c': 1}")
self.assertEqual(pprint.pformat([d, d]),
"[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]")
# The next one is kind of goofy. The sorted order depends on the
# alphabetic order of type names: "int" < "str" < "tuple". Before
# Python 2.5, this was in the test_same_as_repr() test. It's worth
# keeping around for now because it's one of few tests of pprint
# against a crazy mix of types.
self.assertEqual(pprint.pformat({"xy\tab\n": (3,), 5: [[]], (): {}}),
r"{5: [[]], 'xy\tab\n': (3,), (): {}}")
def test_subclassing(self):
o = {'names with spaces': 'should be presented using repr()',
'others.should.not.be': 'like.this'}
exp = """\
{'names with spaces': 'should be presented using repr()',
others.should.not.be: like.this}"""
self.assertEqual(DottedPrettyPrinter().pformat(o), exp)
def test_set_reprs(self):
self.assertEqual(pprint.pformat(set()), 'set()')
self.assertEqual(pprint.pformat(set(range(3))), 'set([0, 1, 2])')
self.assertEqual(pprint.pformat(frozenset()), 'frozenset()')
self.assertEqual(pprint.pformat(frozenset(range(3))), 'frozenset([0, 1, 2])')
cube_repr_tgt = """\
{frozenset([]): frozenset([frozenset([2]), frozenset([0]), frozenset([1])]),
frozenset([0]): frozenset([frozenset(),
frozenset([0, 2]),
frozenset([0, 1])]),
frozenset([1]): frozenset([frozenset(),
frozenset([1, 2]),
frozenset([0, 1])]),
frozenset([2]): frozenset([frozenset(),
frozenset([1, 2]),
frozenset([0, 2])]),
frozenset([1, 2]): frozenset([frozenset([2]),
frozenset([1]),
frozenset([0, 1, 2])]),
frozenset([0, 2]): frozenset([frozenset([2]),
frozenset([0]),
frozenset([0, 1, 2])]),
frozenset([0, 1]): frozenset([frozenset([0]),
frozenset([1]),
frozenset([0, 1, 2])]),
frozenset([0, 1, 2]): frozenset([frozenset([1, 2]),
frozenset([0, 2]),
frozenset([0, 1])])}"""
cube = test.test_set.cube(3)
# XXX issues of dictionary order, and for the case below,
# order of items in the frozenset([...]) representation.
# Whether we get precisely cube_repr_tgt or not is open
# to implementation-dependent choices (this test probably
# fails horribly in CPython if we tweak the dict order too).
got = pprint.pformat(cube)
if test.test_support.check_impl_detail(cpython=True):
self.assertEqual(got, cube_repr_tgt)
else:
self.assertEqual(eval(got), cube)
cubo_repr_tgt = """\
{frozenset([frozenset([0, 2]), frozenset([0])]): frozenset([frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset([2]),
frozenset([0,
2])])]),
frozenset([frozenset([0, 1]), frozenset([1])]): frozenset([frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset([1]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([1])])]),
frozenset([frozenset([1, 2]), frozenset([1])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([1])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([1, 2]), frozenset([2])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([1]),
frozenset([1,
2])]),
frozenset([frozenset([2]),
frozenset([0,
2])]),
frozenset([frozenset(),
frozenset([2])])]),
frozenset([frozenset([]), frozenset([0])]): frozenset([frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset(),
frozenset([1])]),
frozenset([frozenset(),
frozenset([2])])]),
frozenset([frozenset([]), frozenset([1])]): frozenset([frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset([1]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([2])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([2]), frozenset([])]): frozenset([frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset(),
frozenset([1])]),
frozenset([frozenset([2]),
frozenset([0,
2])])]),
frozenset([frozenset([0, 1, 2]), frozenset([0, 1])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
1])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([0]), frozenset([0, 1])]): frozenset([frozenset([frozenset(),
frozenset([0])]),
frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset([1]),
frozenset([0,
1])])]),
frozenset([frozenset([2]), frozenset([0, 2])]): frozenset([frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset(),
frozenset([2])])]),
frozenset([frozenset([0, 1, 2]), frozenset([0, 2])]): frozenset([frozenset([frozenset([1,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0]),
frozenset([0,
2])]),
frozenset([frozenset([2]),
frozenset([0,
2])])]),
frozenset([frozenset([1, 2]), frozenset([0, 1, 2])]): frozenset([frozenset([frozenset([0,
2]),
frozenset([0,
1,
2])]),
frozenset([frozenset([0,
1]),
frozenset([0,
1,
2])]),
frozenset([frozenset([2]),
frozenset([1,
2])]),
frozenset([frozenset([1]),
frozenset([1,
2])])])}"""
cubo = test.test_set.linegraph(cube)
got = pprint.pformat(cubo)
if test.test_support.check_impl_detail(cpython=True):
self.assertEqual(got, cubo_repr_tgt)
else:
self.assertEqual(eval(got), cubo)
def test_depth(self):
nested_tuple = (1, (2, (3, (4, (5, 6)))))
nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}}
nested_list = [1, [2, [3, [4, [5, [6, []]]]]]]
self.assertEqual(pprint.pformat(nested_tuple), repr(nested_tuple))
self.assertEqual(pprint.pformat(nested_dict), repr(nested_dict))
self.assertEqual(pprint.pformat(nested_list), repr(nested_list))
lv1_tuple = '(1, (...))'
lv1_dict = '{1: {...}}'
lv1_list = '[1, [...]]'
self.assertEqual(pprint.pformat(nested_tuple, depth=1), lv1_tuple)
self.assertEqual(pprint.pformat(nested_dict, depth=1), lv1_dict)
self.assertEqual(pprint.pformat(nested_list, depth=1), lv1_list)
class DottedPrettyPrinter(pprint.PrettyPrinter):
def format(self, object, context, maxlevels, level):
if isinstance(object, str):
if ' ' in object:
return repr(object), 1, 0
else:
return object, 0, 0
else:
return pprint.PrettyPrinter.format(
self, object, context, maxlevels, level)
def test_main():
test.test_support.run_unittest(QueryTestCase)
if __name__ == "__main__":
test_main()
| bussiere/pypyjs | website/demo/home/rfk/repos/pypy/lib-python/2.7/test/test_pprint.py | Python | mit | 25,311 |
# -*- coding: utf-8 -*-
"""IPython Test Suite Runner.
This module provides a main entry point to a user script to test IPython
itself from the command line. There are two ways of running this script:
1. With the syntax `iptest all`. This runs our entire test suite by
calling this script (with different arguments) recursively. This
causes modules and package to be tested in different processes, using nose
or trial where appropriate.
2. With the regular nose syntax, like `iptest -vvs IPython`. In this form
the script simply calls nose, but with special command line flags and
plugins loaded.
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import glob
from io import BytesIO
import os
import os.path as path
import sys
from threading import Thread, Lock, Event
import warnings
import nose.plugins.builtin
from nose.plugins.xunit import Xunit
from nose import SkipTest
from nose.core import TestProgram
from nose.plugins import Plugin
from nose.util import safe_str
from IPython.utils.process import is_cmd_found
from IPython.utils.py3compat import bytes_to_str
from IPython.utils.importstring import import_item
from IPython.testing.plugin.ipdoctest import IPythonDoctest
from IPython.external.decorators import KnownFailure, knownfailureif
pjoin = path.join
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Warnings control
#-----------------------------------------------------------------------------
# Twisted generates annoying warnings with Python 2.6, as will do other code
# that imports 'sets' as of today
warnings.filterwarnings('ignore', 'the sets module is deprecated',
DeprecationWarning )
# This one also comes from Twisted
warnings.filterwarnings('ignore', 'the sha module is deprecated',
DeprecationWarning)
# Wx on Fedora11 spits these out
warnings.filterwarnings('ignore', 'wxPython/wxWidgets release number mismatch',
UserWarning)
# ------------------------------------------------------------------------------
# Monkeypatch Xunit to count known failures as skipped.
# ------------------------------------------------------------------------------
def monkeypatch_xunit():
try:
knownfailureif(True)(lambda: None)()
except Exception as e:
KnownFailureTest = type(e)
def addError(self, test, err, capt=None):
if issubclass(err[0], KnownFailureTest):
err = (SkipTest,) + err[1:]
return self.orig_addError(test, err, capt)
Xunit.orig_addError = Xunit.addError
Xunit.addError = addError
#-----------------------------------------------------------------------------
# Check which dependencies are installed and greater than minimum version.
#-----------------------------------------------------------------------------
def extract_version(mod):
return mod.__version__
def test_for(item, min_version=None, callback=extract_version):
"""Test to see if item is importable, and optionally check against a minimum
version.
If min_version is given, the default behavior is to check against the
`__version__` attribute of the item, but specifying `callback` allows you to
extract the value you are interested in. e.g::
In [1]: import sys
In [2]: from IPython.testing.iptest import test_for
In [3]: test_for('sys', (2,6), callback=lambda sys: sys.version_info)
Out[3]: True
"""
try:
check = import_item(item)
except (ImportError, RuntimeError):
# GTK reports Runtime error if it can't be initialized even if it's
# importable.
return False
else:
if min_version:
if callback:
# extra processing step to get version to compare
check = callback(check)
return check >= min_version
else:
return True
# Global dict where we can store information on what we have and what we don't
# have available at test run time
have = {}
have['curses'] = test_for('_curses')
have['matplotlib'] = test_for('matplotlib')
have['numpy'] = test_for('numpy')
have['pexpect'] = test_for('IPython.external.pexpect')
have['pymongo'] = test_for('pymongo')
have['pygments'] = test_for('pygments')
have['qt'] = test_for('IPython.external.qt')
have['sqlite3'] = test_for('sqlite3')
have['tornado'] = test_for('tornado.version_info', (4,0), callback=None)
have['jinja2'] = test_for('jinja2')
have['mistune'] = test_for('mistune')
have['requests'] = test_for('requests')
have['sphinx'] = test_for('sphinx')
have['jsonschema'] = test_for('jsonschema')
have['terminado'] = test_for('terminado')
have['casperjs'] = is_cmd_found('casperjs')
have['phantomjs'] = is_cmd_found('phantomjs')
have['slimerjs'] = is_cmd_found('slimerjs')
min_zmq = (13,)
have['zmq'] = test_for('zmq.pyzmq_version_info', min_zmq, callback=lambda x: x())
#-----------------------------------------------------------------------------
# Test suite definitions
#-----------------------------------------------------------------------------
test_group_names = ['parallel', 'kernel', 'kernel.inprocess', 'config', 'core',
'extensions', 'lib', 'terminal', 'testing', 'utils',
'nbformat', 'qt', 'html', 'nbconvert'
]
class TestSection(object):
def __init__(self, name, includes):
self.name = name
self.includes = includes
self.excludes = []
self.dependencies = []
self.enabled = True
def exclude(self, module):
if not module.startswith('IPython'):
module = self.includes[0] + "." + module
self.excludes.append(module.replace('.', os.sep))
def requires(self, *packages):
self.dependencies.extend(packages)
@property
def will_run(self):
return self.enabled and all(have[p] for p in self.dependencies)
# Name -> (include, exclude, dependencies_met)
test_sections = {n:TestSection(n, ['IPython.%s' % n]) for n in test_group_names}
# Exclusions and dependencies
# ---------------------------
# core:
sec = test_sections['core']
if not have['sqlite3']:
sec.exclude('tests.test_history')
sec.exclude('history')
if not have['matplotlib']:
sec.exclude('pylabtools'),
sec.exclude('tests.test_pylabtools')
# lib:
sec = test_sections['lib']
if not have['zmq']:
sec.exclude('kernel')
# We do this unconditionally, so that the test suite doesn't import
# gtk, changing the default encoding and masking some unicode bugs.
sec.exclude('inputhookgtk')
# We also do this unconditionally, because wx can interfere with Unix signals.
# There are currently no tests for it anyway.
sec.exclude('inputhookwx')
# Testing inputhook will need a lot of thought, to figure out
# how to have tests that don't lock up with the gui event
# loops in the picture
sec.exclude('inputhook')
# testing:
sec = test_sections['testing']
# These have to be skipped on win32 because they use echo, rm, cd, etc.
# See ticket https://github.com/ipython/ipython/issues/87
if sys.platform == 'win32':
sec.exclude('plugin.test_exampleip')
sec.exclude('plugin.dtexample')
# terminal:
if (not have['pexpect']) or (not have['zmq']):
test_sections['terminal'].exclude('console')
# parallel
sec = test_sections['parallel']
sec.requires('zmq')
if not have['pymongo']:
sec.exclude('controller.mongodb')
sec.exclude('tests.test_mongodb')
# kernel:
sec = test_sections['kernel']
sec.requires('zmq')
# The in-process kernel tests are done in a separate section
sec.exclude('inprocess')
# importing gtk sets the default encoding, which we want to avoid
sec.exclude('zmq.gui.gtkembed')
sec.exclude('zmq.gui.gtk3embed')
if not have['matplotlib']:
sec.exclude('zmq.pylab')
# kernel.inprocess:
test_sections['kernel.inprocess'].requires('zmq')
# extensions:
sec = test_sections['extensions']
# This is deprecated in favour of rpy2
sec.exclude('rmagic')
# autoreload does some strange stuff, so move it to its own test section
sec.exclude('autoreload')
sec.exclude('tests.test_autoreload')
test_sections['autoreload'] = TestSection('autoreload',
['IPython.extensions.autoreload', 'IPython.extensions.tests.test_autoreload'])
test_group_names.append('autoreload')
# qt:
test_sections['qt'].requires('zmq', 'qt', 'pygments')
# html:
sec = test_sections['html']
sec.requires('zmq', 'tornado', 'requests', 'sqlite3', 'jsonschema')
# The notebook 'static' directory contains JS, css and other
# files for web serving. Occasionally projects may put a .py
# file in there (MathJax ships a conf.py), so we might as
# well play it safe and skip the whole thing.
sec.exclude('static')
sec.exclude('tasks')
if not have['jinja2']:
sec.exclude('notebookapp')
if not have['pygments'] or not have['jinja2']:
sec.exclude('nbconvert')
if not have['terminado']:
sec.exclude('terminal')
# config:
# Config files aren't really importable stand-alone
test_sections['config'].exclude('profile')
# nbconvert:
sec = test_sections['nbconvert']
sec.requires('pygments', 'jinja2', 'jsonschema', 'mistune')
# Exclude nbconvert directories containing config files used to test.
# Executing the config files with iptest would cause an exception.
sec.exclude('tests.files')
sec.exclude('exporters.tests.files')
if not have['tornado']:
sec.exclude('nbconvert.post_processors.serve')
sec.exclude('nbconvert.post_processors.tests.test_serve')
# nbformat:
test_sections['nbformat'].requires('jsonschema')
#-----------------------------------------------------------------------------
# Functions and classes
#-----------------------------------------------------------------------------
def check_exclusions_exist():
from IPython.utils.path import get_ipython_package_dir
from IPython.utils.warn import warn
parent = os.path.dirname(get_ipython_package_dir())
for sec in test_sections:
for pattern in sec.exclusions:
fullpath = pjoin(parent, pattern)
if not os.path.exists(fullpath) and not glob.glob(fullpath + '.*'):
warn("Excluding nonexistent file: %r" % pattern)
class ExclusionPlugin(Plugin):
"""A nose plugin to effect our exclusions of files and directories.
"""
name = 'exclusions'
score = 3000 # Should come before any other plugins
def __init__(self, exclude_patterns=None):
"""
Parameters
----------
exclude_patterns : sequence of strings, optional
Filenames containing these patterns (as raw strings, not as regular
expressions) are excluded from the tests.
"""
self.exclude_patterns = exclude_patterns or []
super(ExclusionPlugin, self).__init__()
def options(self, parser, env=os.environ):
Plugin.options(self, parser, env)
def configure(self, options, config):
Plugin.configure(self, options, config)
# Override nose trying to disable plugin.
self.enabled = True
def wantFile(self, filename):
"""Return whether the given filename should be scanned for tests.
"""
if any(pat in filename for pat in self.exclude_patterns):
return False
return None
def wantDirectory(self, directory):
"""Return whether the given directory should be scanned for tests.
"""
if any(pat in directory for pat in self.exclude_patterns):
return False
return None
class StreamCapturer(Thread):
daemon = True # Don't hang if main thread crashes
started = False
def __init__(self, echo=False):
super(StreamCapturer, self).__init__()
self.echo = echo
self.streams = []
self.buffer = BytesIO()
self.readfd, self.writefd = os.pipe()
self.buffer_lock = Lock()
self.stop = Event()
def run(self):
self.started = True
while not self.stop.is_set():
chunk = os.read(self.readfd, 1024)
with self.buffer_lock:
self.buffer.write(chunk)
if self.echo:
sys.stdout.write(bytes_to_str(chunk))
os.close(self.readfd)
os.close(self.writefd)
def reset_buffer(self):
with self.buffer_lock:
self.buffer.truncate(0)
self.buffer.seek(0)
def get_buffer(self):
with self.buffer_lock:
return self.buffer.getvalue()
def ensure_started(self):
if not self.started:
self.start()
def halt(self):
"""Safely stop the thread."""
if not self.started:
return
self.stop.set()
os.write(self.writefd, b'\0') # Ensure we're not locked in a read()
self.join()
class SubprocessStreamCapturePlugin(Plugin):
name='subprocstreams'
def __init__(self):
Plugin.__init__(self)
self.stream_capturer = StreamCapturer()
self.destination = os.environ.get('IPTEST_SUBPROC_STREAMS', 'capture')
# This is ugly, but distant parts of the test machinery need to be able
# to redirect streams, so we make the object globally accessible.
nose.iptest_stdstreams_fileno = self.get_write_fileno
def get_write_fileno(self):
if self.destination == 'capture':
self.stream_capturer.ensure_started()
return self.stream_capturer.writefd
elif self.destination == 'discard':
return os.open(os.devnull, os.O_WRONLY)
else:
return sys.__stdout__.fileno()
def configure(self, options, config):
Plugin.configure(self, options, config)
# Override nose trying to disable plugin.
if self.destination == 'capture':
self.enabled = True
def startTest(self, test):
# Reset log capture
self.stream_capturer.reset_buffer()
def formatFailure(self, test, err):
# Show output
ec, ev, tb = err
captured = self.stream_capturer.get_buffer().decode('utf-8', 'replace')
if captured.strip():
ev = safe_str(ev)
out = [ev, '>> begin captured subprocess output <<',
captured,
'>> end captured subprocess output <<']
return ec, '\n'.join(out), tb
return err
formatError = formatFailure
def finalize(self, result):
self.stream_capturer.halt()
def run_iptest():
"""Run the IPython test suite using nose.
This function is called when this script is **not** called with the form
`iptest all`. It simply calls nose with appropriate command line flags
and accepts all of the standard nose arguments.
"""
# Apply our monkeypatch to Xunit
if '--with-xunit' in sys.argv and not hasattr(Xunit, 'orig_addError'):
monkeypatch_xunit()
warnings.filterwarnings('ignore',
'This will be removed soon. Use IPython.testing.util instead')
arg1 = sys.argv[1]
if arg1 in test_sections:
section = test_sections[arg1]
sys.argv[1:2] = section.includes
elif arg1.startswith('IPython.') and arg1[8:] in test_sections:
section = test_sections[arg1[8:]]
sys.argv[1:2] = section.includes
else:
section = TestSection(arg1, includes=[arg1])
argv = sys.argv + [ '--detailed-errors', # extra info in tracebacks
'--with-ipdoctest',
'--ipdoctest-tests','--ipdoctest-extension=txt',
# We add --exe because of setuptools' imbecility (it
# blindly does chmod +x on ALL files). Nose does the
# right thing and it tries to avoid executables,
# setuptools unfortunately forces our hand here. This
# has been discussed on the distutils list and the
# setuptools devs refuse to fix this problem!
'--exe',
]
if '-a' not in argv and '-A' not in argv:
argv = argv + ['-a', '!crash']
if nose.__version__ >= '0.11':
# I don't fully understand why we need this one, but depending on what
# directory the test suite is run from, if we don't give it, 0 tests
# get run. Specifically, if the test suite is run from the source dir
# with an argument (like 'iptest.py IPython.core', 0 tests are run,
# even if the same call done in this directory works fine). It appears
# that if the requested package is in the current dir, nose bails early
# by default. Since it's otherwise harmless, leave it in by default
# for nose >= 0.11, though unfortunately nose 0.10 doesn't support it.
argv.append('--traverse-namespace')
# use our plugin for doctesting. It will remove the standard doctest plugin
# if it finds it enabled
plugins = [ExclusionPlugin(section.excludes), IPythonDoctest(), KnownFailure(),
SubprocessStreamCapturePlugin() ]
# Use working directory set by parent process (see iptestcontroller)
if 'IPTEST_WORKING_DIR' in os.environ:
os.chdir(os.environ['IPTEST_WORKING_DIR'])
# We need a global ipython running in this process, but the special
# in-process group spawns its own IPython kernels, so for *that* group we
# must avoid also opening the global one (otherwise there's a conflict of
# singletons). Ultimately the solution to this problem is to refactor our
# assumptions about what needs to be a singleton and what doesn't (app
# objects should, individual shells shouldn't). But for now, this
# workaround allows the test suite for the inprocess module to complete.
if 'kernel.inprocess' not in section.name:
from IPython.testing import globalipapp
globalipapp.start_ipython()
# Now nose can run
TestProgram(argv=argv, addplugins=plugins)
if __name__ == '__main__':
run_iptest()
| wolfram74/numerical_methods_iserles_notes | venv/lib/python2.7/site-packages/IPython/testing/iptest.py | Python | mit | 18,302 |
import sys
if __name__ == "__main__":
if len(sys.argv) < 2:
print "need input file"
sys.exit(1)
fin = open(sys.argv[1], "r")
lines = fin.readlines()
fin.close()
fout = open("bootloader.h", "w")
fout.write("/* File automatically generated by hex2header.py */\n\n")
fout.write("const unsigned int bootloader_data[] = {");
mem = {}
eAddr = 0
addr = 0
for line in lines:
line = line.strip()
if line[0] != ":":
continue
lType = int(line[7:9], 16)
lAddr = int(line[3:7], 16)
dLen = int(line[1:3], 16)
if lType == 2:
eAddr = int(line[9:13], 16) << 8;
continue
if lType == 4:
eAddr = int(line[9:13], 16) << 16;
continue
if lType == 1:
break
if lType == 0:
idx = 0
data = line[9:-2]
dataLen = len(data)
#print "data = ", data
for idx in range(dataLen / 4):
word = int(data[idx*4:(idx*4)+2], 16)
word |= int(data[(idx*4)+2:(idx*4)+4], 16) << 8;
addr = (lAddr + eAddr + idx*2) >> 1
#print hex(addr), "=", hex(word)
mem[addr] = word
output = []
for addr in range(0x800, 0xfff):
if mem.has_key(addr):
output.append(mem[addr])
else:
output.append(0xffff)
output.reverse()
idx = 0
for word in output:
if word != 0xffff:
break
output = output[1:]
output.reverse()
left = len(output) % 8
if left != 0:
output += [0xffff] * (8-left)
while (idx < len(output)):
fout.write("\n ")
for i in range(8):
fout.write("0x%04x, " % output[idx])
idx += 1
fout.write("\n};\n");
fout.close()
| diydrones/alceosd | firmware/bootloader_updater.X/hex2header.py | Python | gpl-2.0 | 1,513 |
#!/usr/bin/python2.4
# Copyright 2008 Google Inc.
# Author : Anoop Chandran <[email protected]>
#
# openduckbill is a simple backup application. It offers support for
# transferring data to a local backup directory, NFS. It also provides
# file system monitoring of directories marked for backup. Please read
# the README file for more details.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Helper class, does command execution and returns value.
This class has the method RunCommandPopen which executes commands passed to
it and returns the status.
"""
import os
import subprocess
import sys
class CommandHelper:
"""Run command and return status, either using Popen or call
"""
def __init__(self, log_handle=''):
"""Initialise logging state
Logging enabled in debug mode.
Args:
log_handle: Object - a handle to the logging subsystem.
"""
self.logmsg = log_handle
if self.logmsg.debug:
self.stdout_debug = None
self.stderr_debug = None
else:
self.stdout_debug = 1
self.stderr_debug = 1
def RunCommandPopen(self, runcmd):
"""Uses subprocess.Popen to run the command.
Also prints the command output if being run in debug mode.
Args:
runcmd: List - path to executable and its arguments.
Retuns:
runretval: Integer - exit value of the command, after execution.
"""
stdout_val=self.stdout_debug
stderr_val=self.stderr_debug
if stdout_val:
stdout_l = file(os.devnull, 'w')
else:
stdout_l=subprocess.PIPE
if stderr_val:
stderr_l = file(os.devnull, 'w')
else:
stderr_l=subprocess.STDOUT
try:
run_proc = subprocess.Popen(runcmd, bufsize=0,
executable=None, stdin=None,
stdout=stdout_l, stderr=stderr_l)
if self.logmsg.debug:
output = run_proc.stdout
while 1:
line = output.readline()
if not line:
break
line = line.rstrip()
self.logmsg.logger.debug("Command output: %s" % line)
run_proc.wait()
runretval = run_proc.returncode
except OSError, e:
self.logmsg.logger.error('%s', e)
runretval = 1
except KeyboardInterrupt, e:
self.logmsg.logger.error('User interrupt')
sys.exit(1)
if stdout_l:
pass
#stderr_l.close()
if stderr_l:
pass
#stderr_l.close()
return runretval
| xtao/openduckbill | src/helper.py | Python | gpl-2.0 | 3,110 |
#!/usr/bin/python
# pep8.py - Check Python source code formatting, according to PEP 8
# Copyright (C) 2006 Johann C. Rocholl <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
"""
Check Python source code formatting, according to PEP 8:
http://www.python.org/dev/peps/pep-0008/
For usage and a list of options, try this:
$ python pep8.py -h
This program and its regression test suite live here:
http://svn.browsershots.org/trunk/devtools/pep8/
http://trac.browsershots.org/browser/trunk/devtools/pep8/
Groups of errors and warnings:
E errors
W warnings
100 indentation
200 whitespace
300 blank lines
400 imports
500 line length
600 deprecation
700 statements
You can add checks to this program by writing plugins. Each plugin is
a simple function that is called for each line of source code, either
physical or logical.
Physical line:
- Raw line of text from the input file.
Logical line:
- Multi-line statements converted to a single line.
- Stripped left and right.
- Contents of strings replaced with 'xxx' of same length.
- Comments removed.
The check function requests physical or logical lines by the name of
the first argument:
def maximum_line_length(physical_line)
def extraneous_whitespace(logical_line)
def blank_lines(logical_line, blank_lines, indent_level, line_number)
The last example above demonstrates how check plugins can request
additional information with extra arguments. All attributes of the
Checker object are available. Some examples:
lines: a list of the raw lines from the input file
tokens: the tokens that contribute to this logical line
line_number: line number in the input file
blank_lines: blank lines before this one
indent_char: first indentation character in this file (' ' or '\t')
indent_level: indentation (with tabs expanded to multiples of 8)
previous_indent_level: indentation on previous line
previous_logical: previous logical line
The docstring of each check function shall be the relevant part of
text from PEP 8. It is printed if the user enables --show-pep8.
"""
import os
import sys
import re
import time
import inspect
import tokenize
from optparse import OptionParser
from keyword import iskeyword
from fnmatch import fnmatch
from six import iteritems
__version__ = '0.2.0'
__revision__ = '$Rev$'
default_exclude = '.svn,CVS,*.pyc,*.pyo'
indent_match = re.compile(r'([ \t]*)').match
raise_comma_match = re.compile(r'raise\s+\w+\s*(,)').match
operators = """
+ - * / % ^ & | = < > >> <<
+= -= *= /= %= ^= &= |= == <= >= >>= <<=
!= <> :
in is or not and
""".split()
options = None
args = None
##############################################################################
# Plugins (check functions) for physical lines
##############################################################################
def tabs_or_spaces(physical_line, indent_char):
"""
Never mix tabs and spaces.
The most popular way of indenting Python is with spaces only. The
second-most popular way is with tabs only. Code indented with a mixture
of tabs and spaces should be converted to using spaces exclusively. When
invoking the Python command line interpreter with the -t option, it issues
warnings about code that illegally mixes tabs and spaces. When using -tt
these warnings become errors. These options are highly recommended!
"""
indent = indent_match(physical_line).group(1)
for offset, char in enumerate(indent):
if char != indent_char:
return offset, "E101 indentation contains mixed spaces and tabs"
def tabs_obsolete(physical_line):
"""
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
"""
indent = indent_match(physical_line).group(1)
if indent.count('\t'):
return indent.index('\t'), "W191 indentation contains tabs"
def trailing_whitespace(physical_line):
"""
JCR: Trailing whitespace is superfluous.
"""
physical_line = physical_line.rstrip('\n') # chr(10), newline
physical_line = physical_line.rstrip('\r') # chr(13), carriage return
physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L
stripped = physical_line.rstrip()
if physical_line != stripped:
return len(stripped), "W291 trailing whitespace"
def trailing_blank_lines(physical_line, lines, line_number):
"""
JCR: Trailing blank lines are superfluous.
"""
if physical_line.strip() == '' and line_number == len(lines):
return 0, "W391 blank line at end of file"
def missing_newline(physical_line):
"""
JCR: The last line should have a newline.
"""
if physical_line.rstrip() == physical_line:
return len(physical_line), "W292 no newline at end of file"
def maximum_line_length(physical_line):
"""
Limit all lines to a maximum of 79 characters.
There are still many devices around that are limited to 80 character
lines; plus, limiting windows to 80 characters makes it possible to have
several windows side-by-side. The default wrapping on such devices looks
ugly. Therefore, please limit all lines to a maximum of 79 characters.
For flowing long blocks of text (docstrings or comments), limiting the
length to 72 characters is recommended.
"""
length = len(physical_line.rstrip())
if length > 79:
return 79, "E501 line too long (%d characters)" % length
##############################################################################
# Plugins (check functions) for logical lines
##############################################################################
def blank_lines(logical_line, blank_lines, indent_level, line_number,
previous_logical):
"""
Separate top-level function and class definitions with two blank lines.
Method definitions inside a class are separated by a single blank line.
Extra blank lines may be used (sparingly) to separate groups of related
functions. Blank lines may be omitted between a bunch of related
one-liners (e.g. a set of dummy implementations).
Use blank lines in functions, sparingly, to indicate logical sections.
"""
if line_number == 1:
return # Don't expect blank lines before the first line
if previous_logical.startswith('@'):
return # Don't expect blank lines after function decorator
if (logical_line.startswith('def ') or
logical_line.startswith('class ') or
logical_line.startswith('@')):
if indent_level > 0 and blank_lines != 1:
return 0, "E301 expected 1 blank line, found %d" % blank_lines
if indent_level == 0 and blank_lines != 2:
return 0, "E302 expected 2 blank lines, found %d" % blank_lines
if blank_lines > 2:
return 0, "E303 too many blank lines (%d)" % blank_lines
def extraneous_whitespace(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- Immediately inside parentheses, brackets or braces.
- Immediately before a comma, semicolon, or colon.
"""
line = logical_line
for char in '([{':
found = line.find(char + ' ')
if found > -1:
return found + 1, "E201 whitespace after '%s'" % char
for char in '}])':
found = line.find(' ' + char)
if found > -1 and line[found - 1] != ',':
return found, "E202 whitespace before '%s'" % char
for char in ',;:':
found = line.find(' ' + char)
if found > -1:
return found, "E203 whitespace before '%s'" % char
def missing_whitespace(logical_line):
"""
JCR: Each comma, semicolon or colon should be followed by whitespace.
"""
line = logical_line
for index in range(len(line) - 1):
char = line[index]
if char in ',;:' and line[index + 1] != ' ':
before = line[:index]
if char == ':' and before.count('[') > before.count(']'):
continue # Slice syntax, no space required
return index, "E231 missing whitespace after '%s'" % char
def indentation(logical_line, previous_logical, indent_char,
indent_level, previous_indent_level):
"""
Use 4 spaces per indentation level.
For really old code that you don't want to mess up, you can continue to
use 8-space tabs.
"""
if indent_char == ' ' and indent_level % 4:
return 0, "E111 indentation is not a multiple of four"
indent_expect = previous_logical.endswith(':')
if indent_expect and indent_level <= previous_indent_level:
return 0, "E112 expected an indented block"
if indent_level > previous_indent_level and not indent_expect:
return 0, "E113 unexpected indentation"
def whitespace_before_parameters(logical_line, tokens):
"""
Avoid extraneous whitespace in the following situations:
- Immediately before the open parenthesis that starts the argument
list of a function call.
- Immediately before the open parenthesis that starts an indexing or
slicing.
"""
prev_type = tokens[0][0]
prev_text = tokens[0][1]
prev_end = tokens[0][3]
for index in range(1, len(tokens)):
token_type, text, start, end, line = tokens[index]
if (token_type == tokenize.OP and
text in '([' and
start != prev_end and
prev_type == tokenize.NAME and
(index < 2 or tokens[index - 2][1] != 'class') and
(not iskeyword(prev_text))):
return prev_end, "E211 whitespace before '%s'" % text
prev_type = token_type
prev_text = text
prev_end = end
def whitespace_around_operator(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- More than one space around an assignment (or other) operator to
align it with another.
"""
line = logical_line
for operator in operators:
found = line.find(' ' + operator)
if found > -1:
return found, "E221 multiple spaces before operator"
found = line.find(operator + ' ')
if found > -1:
return found, "E222 multiple spaces after operator"
found = line.find('\t' + operator)
if found > -1:
return found, "E223 tab before operator"
found = line.find(operator + '\t')
if found > -1:
return found, "E224 tab after operator"
def whitespace_around_comma(logical_line):
"""
Avoid extraneous whitespace in the following situations:
- More than one space around an assignment (or other) operator to
align it with another.
JCR: This should also be applied around comma etc.
"""
line = logical_line
for separator in ',;:':
found = line.find(separator + ' ')
if found > -1:
return found + 1, "E241 multiple spaces after '%s'" % separator
found = line.find(separator + '\t')
if found > -1:
return found + 1, "E242 tab after '%s'" % separator
def imports_on_separate_lines(logical_line):
"""
Imports should usually be on separate lines.
"""
line = logical_line
if line.startswith('import '):
found = line.find(',')
if found > -1:
return found, "E401 multiple imports on one line"
def compound_statements(logical_line):
"""
Compound statements (multiple statements on the same line) are
generally discouraged.
"""
line = logical_line
found = line.find(':')
if -1 < found < len(line) - 1:
before = line[:found]
if (before.count('{') <= before.count('}') and # {'a': 1} (dict)
before.count('[') <= before.count(']') and # [1:2] (slice)
not re.search(r'\blambda\b', before)): # lambda x: x
return found, "E701 multiple statements on one line (colon)"
found = line.find(';')
if -1 < found:
return found, "E702 multiple statements on one line (semicolon)"
def python_3000_has_key(logical_line):
"""
The {}.has_key() method will be removed in the future version of
Python. Use the 'in' operation instead, like:
d = {"a": 1, "b": 2}
if "b" in d:
print d["b"]
"""
pos = logical_line.find('.has_key(')
if pos > -1:
return pos, "W601 .has_key() is deprecated, use 'in'"
def python_3000_raise_comma(logical_line):
"""
When raising an exception, use "raise ValueError('message')"
instead of the older form "raise ValueError, 'message'".
The paren-using form is preferred because when the exception arguments
are long or include string formatting, you don't need to use line
continuation characters thanks to the containing parentheses. The older
form will be removed in Python 3000.
"""
match = raise_comma_match(logical_line)
if match:
return match.start(1), "W602 deprecated form of raising exception"
##############################################################################
# Helper functions
##############################################################################
def expand_indent(line):
"""
Return the amount of indentation.
Tabs are expanded to the next multiple of 8.
>>> expand_indent(' ')
4
>>> expand_indent('\\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
8
>>> expand_indent(' \\t')
16
"""
result = 0
for char in line:
if char == '\t':
result = result / 8 * 8 + 8
elif char == ' ':
result += 1
else:
break
return result
##############################################################################
# Framework to run all checks
##############################################################################
def message(text):
"""Print a message."""
# print >> sys.stderr, options.prog + ': ' + text
# print >> sys.stderr, text
print(text)
def find_checks(argument_name):
"""
Find all globally visible functions where the first argument name
starts with argument_name.
"""
checks = []
function_type = type(find_checks)
for name, function in iteritems(globals()):
if type(function) is function_type:
args = inspect.getargspec(function)[0]
if len(args) >= 1 and args[0].startswith(argument_name):
checks.append((name, function, args))
checks.sort()
return checks
def mute_string(text):
"""
Replace contents with 'xxx' to prevent syntax matching.
>>> mute_string('"abc"')
'"xxx"'
>>> mute_string("'''abc'''")
"'''xxx'''"
>>> mute_string("r'abc'")
"r'xxx'"
"""
start = 1
end = len(text) - 1
# String modifiers (e.g. u or r)
if text.endswith('"'):
start += text.index('"')
elif text.endswith("'"):
start += text.index("'")
# Triple quotes
if text.endswith('"""') or text.endswith("'''"):
start += 2
end -= 2
return text[:start] + 'x' * (end - start) + text[end:]
class Checker:
"""
Load a Python source file, tokenize it, check coding style.
"""
def __init__(self, filename):
self.filename = filename
self.lines = file(filename).readlines()
self.physical_checks = find_checks('physical_line')
self.logical_checks = find_checks('logical_line')
options.counters['physical lines'] = \
options.counters.get('physical lines', 0) + len(self.lines)
def readline(self):
"""
Get the next line from the input buffer.
"""
self.line_number += 1
if self.line_number > len(self.lines):
return ''
return self.lines[self.line_number - 1]
def readline_check_physical(self):
"""
Check and return the next physical line. This method can be
used to feed tokenize.generate_tokens.
"""
line = self.readline()
if line:
self.check_physical(line)
return line
def run_check(self, check, argument_names):
"""
Run a check plugin.
"""
arguments = []
for name in argument_names:
arguments.append(getattr(self, name))
return check(*arguments)
def check_physical(self, line):
"""
Run all physical checks on a raw input line.
"""
self.physical_line = line
if self.indent_char is None and len(line) and line[0] in ' \t':
self.indent_char = line[0]
for name, check, argument_names in self.physical_checks:
result = self.run_check(check, argument_names)
if result is not None:
offset, text = result
self.report_error(self.line_number, offset, text, check)
def build_tokens_line(self):
"""
Build a logical line from tokens.
"""
self.mapping = []
logical = []
length = 0
previous = None
for token in self.tokens:
token_type, text = token[0:2]
if token_type in (tokenize.COMMENT, tokenize.NL,
tokenize.INDENT, tokenize.DEDENT,
tokenize.NEWLINE):
continue
if token_type == tokenize.STRING:
text = mute_string(text)
if previous:
end_line, end = previous[3]
start_line, start = token[2]
if end_line != start_line: # different row
if self.lines[end_line - 1][end - 1] not in '{[(':
logical.append(' ')
length += 1
elif end != start: # different column
fill = self.lines[end_line - 1][end:start]
logical.append(fill)
length += len(fill)
self.mapping.append((length, token))
logical.append(text)
length += len(text)
previous = token
self.logical_line = ''.join(logical)
assert self.logical_line.lstrip() == self.logical_line
assert self.logical_line.rstrip() == self.logical_line
def check_logical(self):
"""
Build a line from tokens and run all logical checks on it.
"""
options.counters['logical lines'] = \
options.counters.get('logical lines', 0) + 1
self.build_tokens_line()
first_line = self.lines[self.mapping[0][1][2][0] - 1]
indent = first_line[:self.mapping[0][1][2][1]]
self.previous_indent_level = self.indent_level
self.indent_level = expand_indent(indent)
if options.verbose >= 2:
print(self.logical_line[:80].rstrip())
for name, check, argument_names in self.logical_checks:
if options.verbose >= 3:
print(' ', name)
result = self.run_check(check, argument_names)
if result is not None:
offset, text = result
if type(offset) is tuple:
original_number, original_offset = offset
else:
for token_offset, token in self.mapping:
if offset >= token_offset:
original_number = token[2][0]
original_offset = (token[2][1]
+ offset - token_offset)
self.report_error(original_number, original_offset,
text, check)
self.previous_logical = self.logical_line
def check_all(self):
"""
Run all checks on the input file.
"""
self.file_errors = 0
self.line_number = 0
self.indent_char = None
self.indent_level = 0
self.previous_logical = ''
self.blank_lines = 0
self.tokens = []
parens = 0
for token in tokenize.generate_tokens(self.readline_check_physical):
# print tokenize.tok_name[token[0]], repr(token)
self.tokens.append(token)
token_type, text = token[0:2]
if token_type == tokenize.OP and text in '([{':
parens += 1
if token_type == tokenize.OP and text in '}])':
parens -= 1
if token_type == tokenize.NEWLINE and not parens:
self.check_logical()
self.blank_lines = 0
self.tokens = []
if token_type == tokenize.NL and not parens:
self.blank_lines += 1
self.tokens = []
if token_type == tokenize.COMMENT:
source_line = token[4]
token_start = token[2][1]
if source_line[:token_start].strip() == '':
self.blank_lines = 0
return self.file_errors
def report_error(self, line_number, offset, text, check):
"""
Report an error, according to options.
"""
if options.quiet == 1 and not self.file_errors:
message(self.filename)
self.file_errors += 1
code = text[:4]
options.counters[code] = options.counters.get(code, 0) + 1
options.messages[code] = text[5:]
if options.quiet:
return
if options.testsuite:
base = os.path.basename(self.filename)[:4]
if base == code:
return
if base[0] == 'E' and code[0] == 'W':
return
if ignore_code(code):
return
if options.counters[code] == 1 or options.repeat:
message("%s:%s:%d: %s" %
(self.filename, line_number, offset + 1, text))
if options.show_source:
line = self.lines[line_number - 1]
message(line.rstrip())
message(' ' * offset + '^')
if options.show_pep8:
message(check.__doc__.lstrip('\n').rstrip())
def input_file(filename):
"""
Run all checks on a Python source file.
"""
if excluded(filename) or not filename_match(filename):
return {}
if options.verbose:
message('checking ' + filename)
options.counters['files'] = options.counters.get('files', 0) + 1
errors = Checker(filename).check_all()
if options.testsuite and not errors:
message("%s: %s" % (filename, "no errors found"))
def input_dir(dirname):
"""
Check all Python source files in this directory and all subdirectories.
"""
dirname = dirname.rstrip('/')
if excluded(dirname):
return
for root, dirs, files in os.walk(dirname):
if options.verbose:
message('directory ' + root)
options.counters['directories'] = \
options.counters.get('directories', 0) + 1
dirs.sort()
for subdir in dirs:
if excluded(subdir):
dirs.remove(subdir)
files.sort()
for filename in files:
input_file(os.path.join(root, filename))
def excluded(filename):
"""
Check if options.exclude contains a pattern that matches filename.
"""
basename = os.path.basename(filename)
for pattern in options.exclude:
if fnmatch(basename, pattern):
# print basename, 'excluded because it matches', pattern
return True
def filename_match(filename):
"""
Check if options.filename contains a pattern that matches filename.
If options.filename is unspecified, this always returns True.
"""
if not options.filename:
return True
for pattern in options.filename:
if fnmatch(filename, pattern):
return True
def ignore_code(code):
"""
Check if options.ignore contains a prefix of the error code.
"""
for ignore in options.ignore:
if code.startswith(ignore):
return True
def get_error_statistics():
"""Get error statistics."""
return get_statistics("E")
def get_warning_statistics():
"""Get warning statistics."""
return get_statistics("W")
def get_statistics(prefix=''):
"""
Get statistics for message codes that start with the prefix.
prefix='' matches all errors and warnings
prefix='E' matches all errors
prefix='W' matches all warnings
prefix='E4' matches all errors that have to do with imports
"""
stats = []
keys = options.messages.keys()
keys.sort()
for key in keys:
if key.startswith(prefix):
stats.append('%-7s %s %s' %
(options.counters[key], key, options.messages[key]))
return stats
def print_statistics(prefix=''):
"""Print overall statistics (number of errors and warnings)."""
for line in get_statistics(prefix):
print(line)
def print_benchmark(elapsed):
"""
Print benchmark numbers.
"""
print('%-7.2f %s' % (elapsed, 'seconds elapsed'))
keys = ['directories', 'files',
'logical lines', 'physical lines']
for key in keys:
if key in options.counters:
print('%-7d %s per second (%d total)' % (
options.counters[key] / elapsed, key,
options.counters[key]))
def process_options(arglist=None):
"""
Process options passed either via arglist or via command line args.
"""
global options, args
usage = "%prog [options] input ..."
parser = OptionParser(usage)
parser.add_option('-v', '--verbose', default=0, action='count',
help="print status messages, or debug with -vv")
parser.add_option('-q', '--quiet', default=0, action='count',
help="report only file names, or nothing with -qq")
parser.add_option('--exclude', metavar='patterns', default=default_exclude,
help="skip matches (default %s)" % default_exclude)
parser.add_option('--filename', metavar='patterns',
help="only check matching files (e.g. *.py)")
parser.add_option('--ignore', metavar='errors', default='',
help="skip errors and warnings (e.g. E4,W)")
parser.add_option('--repeat', action='store_true',
help="show all occurrences of the same error")
parser.add_option('--show-source', action='store_true',
help="show source code for each error")
parser.add_option('--show-pep8', action='store_true',
help="show text of PEP 8 for each error")
parser.add_option('--statistics', action='store_true',
help="count errors and warnings")
parser.add_option('--benchmark', action='store_true',
help="measure processing speed")
parser.add_option('--testsuite', metavar='dir',
help="run regression tests from dir")
parser.add_option('--doctest', action='store_true',
help="run doctest on myself")
options, args = parser.parse_args(arglist)
if options.testsuite:
args.append(options.testsuite)
if len(args) == 0:
parser.error('input not specified')
options.prog = os.path.basename(sys.argv[0])
options.exclude = options.exclude.split(',')
for index in range(len(options.exclude)):
options.exclude[index] = options.exclude[index].rstrip('/')
if options.filename:
options.filename = options.filename.split(',')
if options.ignore:
options.ignore = options.ignore.split(',')
else:
options.ignore = []
options.counters = {}
options.messages = {}
return options, args
def _main():
"""
Parse options and run checks on Python source.
"""
options, args = process_options()
if options.doctest:
import doctest
return doctest.testmod()
start_time = time.time()
for path in args:
if os.path.isdir(path):
input_dir(path)
else:
input_file(path)
elapsed = time.time() - start_time
if options.statistics:
print_statistics()
if options.benchmark:
print_benchmark(elapsed)
if __name__ == '__main__':
_main()
| MSusik/invenio | scripts/pep8.py | Python | gpl-2.0 | 29,318 |
#!/usr/bin/env python3
import os
import sys
import subprocess
if os.getenv("MSYSTEM") == "MINGW32":
mingw_dir = "/mingw32"
elif os.getenv("MSYSTEM") == "MINGW64":
mingw_dir = "/mingw64"
p = subprocess.Popen([
"sh", "-c",
"cp {}/bin/{} {}".format(mingw_dir, sys.argv[1], sys.argv[2])])
sys.exit(p.wait())
| cnvogelg/fs-uae | dist/windows/clib.py | Python | gpl-2.0 | 323 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ContactRole'
db.create_table('base_contactrole', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('resource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.ResourceBase'])),
('contact', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['people.Profile'])),
('role', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['people.Role'])),
))
db.send_create_signal('base', ['ContactRole'])
# Adding unique constraint on 'ContactRole', fields ['contact', 'resource', 'role']
db.create_unique('base_contactrole', ['contact_id', 'resource_id', 'role_id'])
# Adding model 'TopicCategory'
db.create_table('base_topiccategory', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=50, db_index=True)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('base', ['TopicCategory'])
# Adding model 'Thumbnail'
db.create_table('base_thumbnail', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('thumb_file', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('thumb_spec', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('version', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0, null=True)),
))
db.send_create_signal('base', ['Thumbnail'])
# Adding model 'ResourceBase'
db.create_table('base_resourcebase', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('uuid', self.gf('django.db.models.fields.CharField')(max_length=36)),
('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=255)),
('date', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('date_type', self.gf('django.db.models.fields.CharField')(default='publication', max_length=255)),
('edition', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('abstract', self.gf('django.db.models.fields.TextField')(blank=True)),
('purpose', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('maintenance_frequency', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('keywords_region', self.gf('django.db.models.fields.CharField')(default='USA', max_length=3)),
('constraints_use', self.gf('django.db.models.fields.CharField')(default='copyright', max_length=255)),
('constraints_other', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('spatial_representation_type', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('language', self.gf('django.db.models.fields.CharField')(default='eng', max_length=3)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.TopicCategory'], null=True, blank=True)),
('temporal_extent_start', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('temporal_extent_end', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('supplemental_information', self.gf('django.db.models.fields.TextField')(default=u'No information provided')),
('distribution_url', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('distribution_description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('data_quality_statement', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('bbox_x0', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=19, decimal_places=10, blank=True)),
('bbox_x1', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=19, decimal_places=10, blank=True)),
('bbox_y0', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=19, decimal_places=10, blank=True)),
('bbox_y1', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=19, decimal_places=10, blank=True)),
('srid', self.gf('django.db.models.fields.CharField')(default='EPSG:4326', max_length=255)),
('csw_typename', self.gf('django.db.models.fields.CharField')(default='gmd:MD_Metadata', max_length=32)),
('csw_schema', self.gf('django.db.models.fields.CharField')(default='http://www.isotc211.org/2005/gmd', max_length=64)),
('csw_mdsource', self.gf('django.db.models.fields.CharField')(default='local', max_length=256)),
('csw_insert_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, null=True, blank=True)),
('csw_type', self.gf('django.db.models.fields.CharField')(default='dataset', max_length=32)),
('csw_anytext', self.gf('django.db.models.fields.TextField')(null=True)),
('csw_wkt_geometry', self.gf('django.db.models.fields.TextField')(default='SRID=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))')),
('metadata_uploaded', self.gf('django.db.models.fields.BooleanField')(default=False)),
('metadata_xml', self.gf('django.db.models.fields.TextField')(default='<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>', null=True, blank=True)),
('thumbnail', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.Thumbnail'], null=True, blank=True)),
))
db.send_create_signal('base', ['ResourceBase'])
# Adding model 'Link'
db.create_table('base_link', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('resource', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.ResourceBase'])),
('extension', self.gf('django.db.models.fields.CharField')(max_length=255)),
('link_type', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('mime', self.gf('django.db.models.fields.CharField')(max_length=255)),
('url', self.gf('django.db.models.fields.TextField')(unique=True, max_length=1000)),
))
db.send_create_signal('base', ['Link'])
def backwards(self, orm):
# Removing unique constraint on 'ContactRole', fields ['contact', 'resource', 'role']
db.delete_unique('base_contactrole', ['contact_id', 'resource_id', 'role_id'])
# Deleting model 'ContactRole'
db.delete_table('base_contactrole')
# Deleting model 'TopicCategory'
db.delete_table('base_topiccategory')
# Deleting model 'Thumbnail'
db.delete_table('base_thumbnail')
# Deleting model 'ResourceBase'
db.delete_table('base_resourcebase')
# Deleting model 'Link'
db.delete_table('base_link')
models = {
'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': "orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 15, 4, 16, 51, 384488)'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 15, 4, 16, 51, 388268)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 15, 4, 16, 51, 388203)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'relationships': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_to'", 'symmetrical': 'False', 'through': "orm['relationships.Relationship']", 'to': "orm['auth.User']"}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'base.contactrole': {
'Meta': {'unique_together': "(('contact', 'resource', 'role'),)", 'object_name': 'ContactRole'},
'contact': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Profile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.ResourceBase']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Role']"})
},
'base.link': {
'Meta': {'object_name': 'Link'},
'extension': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mime': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.ResourceBase']"}),
'url': ('django.db.models.fields.TextField', [], {'unique': 'True', 'max_length': '1000'})
},
'base.resourcebase': {
'Meta': {'object_name': 'ResourceBase'},
'abstract': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bbox_x0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_x1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_y0': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'bbox_y1': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '10', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.TopicCategory']", 'null': 'True', 'blank': 'True'}),
'constraints_other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'constraints_use': ('django.db.models.fields.CharField', [], {'default': "'copyright'", 'max_length': '255'}),
'contacts': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['people.Profile']", 'through': "orm['base.ContactRole']", 'symmetrical': 'False'}),
'csw_anytext': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'csw_insert_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'csw_mdsource': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '256'}),
'csw_schema': ('django.db.models.fields.CharField', [], {'default': "'http://www.isotc211.org/2005/gmd'", 'max_length': '64'}),
'csw_type': ('django.db.models.fields.CharField', [], {'default': "'dataset'", 'max_length': '32'}),
'csw_typename': ('django.db.models.fields.CharField', [], {'default': "'gmd:MD_Metadata'", 'max_length': '32'}),
'csw_wkt_geometry': ('django.db.models.fields.TextField', [], {'default': "'SRID=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'"}),
'data_quality_statement': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_type': ('django.db.models.fields.CharField', [], {'default': "'publication'", 'max_length': '255'}),
'distribution_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'distribution_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'edition': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords_region': ('django.db.models.fields.CharField', [], {'default': "'USA'", 'max_length': '3'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'eng'", 'max_length': '3'}),
'maintenance_frequency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'metadata_uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'metadata_xml': ('django.db.models.fields.TextField', [], {'default': '\'<gmd:MD_Metadata xmlns:gmd="http://www.isotc211.org/2005/gmd"/>\'', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'purpose': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'spatial_representation_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'srid': ('django.db.models.fields.CharField', [], {'default': "'EPSG:4326'", 'max_length': '255'}),
'supplemental_information': ('django.db.models.fields.TextField', [], {'default': "u'No information provided'"}),
'temporal_extent_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'temporal_extent_start': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['base.Thumbnail']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '36'})
},
'base.thumbnail': {
'Meta': {'object_name': 'Thumbnail'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thumb_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'thumb_spec': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'})
},
'base.topiccategory': {
'Meta': {'ordering': "('name',)", 'object_name': 'TopicCategory'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'people.profile': {
'Meta': {'object_name': 'Profile'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'delivery': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'profile': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'profile'", 'unique': 'True', 'null': 'True', 'to': "orm['auth.User']"}),
'voice': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'people.role': {
'Meta': {'object_name': 'Role'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'relationships.relationship': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('from_user', 'to_user', 'status', 'site'),)", 'object_name': 'Relationship'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'from_users'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'related_name': "'relationships'", 'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['relationships.RelationshipStatus']"}),
'to_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'to_users'", 'to': "orm['auth.User']"}),
'weight': ('django.db.models.fields.FloatField', [], {'default': '1.0', 'null': 'True', 'blank': 'True'})
},
'relationships.relationshipstatus': {
'Meta': {'ordering': "('name',)", 'object_name': 'RelationshipStatus'},
'from_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symmetrical_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'to_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100', 'db_index': 'True'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['base']
| dwoods/gn-maps | geonode/base/migrations/0001_initial.py | Python | gpl-3.0 | 25,336 |
#! /usr/bin/env python
import sys
from aubio import source, pitch, freqtomidi
if len(sys.argv) < 2:
print "Usage: %s <filename> [samplerate]" % sys.argv[0]
sys.exit(1)
filename = sys.argv[1]
downsample = 1
samplerate = 44100 / downsample
if len( sys.argv ) > 2: samplerate = int(sys.argv[2])
win_s = 4096 / downsample # fft size
hop_s = 512 / downsample # hop size
s = source(filename, samplerate, hop_s)
samplerate = s.samplerate
tolerance = 0.8
pitch_o = pitch("yin", win_s, hop_s, samplerate)
pitch_o.set_unit("midi")
pitch_o.set_tolerance(tolerance)
pitches = []
confidences = []
# total number of frames read
total_frames = 0
while True:
samples, read = s()
pitch = pitch_o(samples)[0]
#pitch = int(round(pitch))
confidence = pitch_o.get_confidence()
#if confidence < 0.8: pitch = 0.
#print "%f %f %f" % (total_frames / float(samplerate), pitch, confidence)
pitches += [pitch]
confidences += [confidence]
total_frames += read
if read < hop_s: break
if 0: sys.exit(0)
#print pitches
from numpy import array, ma
import matplotlib.pyplot as plt
from demo_waveform_plot import get_waveform_plot, set_xlabels_sample2time
skip = 1
pitches = array(pitches[skip:])
confidences = array(confidences[skip:])
times = [t * hop_s for t in range(len(pitches))]
fig = plt.figure()
ax1 = fig.add_subplot(311)
ax1 = get_waveform_plot(filename, samplerate = samplerate, block_size = hop_s, ax = ax1)
plt.setp(ax1.get_xticklabels(), visible = False)
ax1.set_xlabel('')
def array_from_text_file(filename, dtype = 'float'):
import os.path
from numpy import array
filename = os.path.join(os.path.dirname(__file__), filename)
return array([line.split() for line in open(filename).readlines()],
dtype = dtype)
ax2 = fig.add_subplot(312, sharex = ax1)
import sys, os.path
ground_truth = os.path.splitext(filename)[0] + '.f0.Corrected'
if os.path.isfile(ground_truth):
ground_truth = array_from_text_file(ground_truth)
true_freqs = ground_truth[:,2]
true_freqs = ma.masked_where(true_freqs < 2, true_freqs)
true_times = float(samplerate) * ground_truth[:,0]
ax2.plot(true_times, true_freqs, 'r')
ax2.axis( ymin = 0.9 * true_freqs.min(), ymax = 1.1 * true_freqs.max() )
# plot raw pitches
ax2.plot(times, pitches, '.g')
# plot cleaned up pitches
cleaned_pitches = pitches
#cleaned_pitches = ma.masked_where(cleaned_pitches < 0, cleaned_pitches)
#cleaned_pitches = ma.masked_where(cleaned_pitches > 120, cleaned_pitches)
cleaned_pitches = ma.masked_where(confidences < tolerance, cleaned_pitches)
ax2.plot(times, cleaned_pitches, '.-')
#ax2.axis( ymin = 0.9 * cleaned_pitches.min(), ymax = 1.1 * cleaned_pitches.max() )
#ax2.axis( ymin = 55, ymax = 70 )
plt.setp(ax2.get_xticklabels(), visible = False)
ax2.set_ylabel('f0 (midi)')
# plot confidence
ax3 = fig.add_subplot(313, sharex = ax1)
# plot the confidence
ax3.plot(times, confidences)
# draw a line at tolerance
ax3.plot(times, [tolerance]*len(confidences))
ax3.axis( xmin = times[0], xmax = times[-1])
ax3.set_ylabel('condidence')
set_xlabels_sample2time(ax3, times[-1], samplerate)
plt.show()
#plt.savefig(os.path.basename(filename) + '.svg')
| madmouser1/aubio | python/demos/demo_pitch.py | Python | gpl-3.0 | 3,193 |
# Phatch - Photo Batch Processor
# Copyright (C) 2007-2008 www.stani.be
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/
#
# Phatch recommends SPE (http://pythonide.stani.be) for editing python files.
# Embedded icon is taken from www.openclipart.org (public domain)
# Follows PEP8
from core import models
from lib.reverse_translation import _t
#---PIL
def init():
global Image, imtools
from PIL import Image
from lib import imtools
def transpose(image, method, amount=100):
transposed = image.transpose(getattr(Image, method))
if amount < 100:
transposed = imtools.blend(image, transposed, amount / 100.0)
return transposed
#---Phatch
class Action(models.Action):
""""""
label = _t('Transpose')
author = 'Stani'
email = '[email protected]'
init = staticmethod(init)
pil = staticmethod(transpose)
version = '0.1'
tags = [_t('default'), _t('transform')]
__doc__ = _t('Flip or rotate 90 degrees')
def interface(self, fields):
fields[_t('Method')] = self.ImageTransposeField(
'Orientation')
fields[_t('Amount')] = self.SliderField(100, 1, 100)
def apply(self, photo, setting, cache):
#get info
info = photo.info
#dpi
method = self.get_field('Method', info)
#special case turn to its orientation
if method == 'ORIENTATION':
photo._exif_transposition_reverse = ()
info['orientation'] = 1
else:
amount = self.get_field('Amount', info)
layer = photo.get_layer()
layer.image = transpose(layer.image, method, amount)
return photo
icon = \
'x\xda\x01`\t\x9f\xf6\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x000\x00\
\x00\x000\x08\x06\x00\x00\x00W\x02\xf9\x87\x00\x00\x00\x04sBIT\x08\x08\x08\
\x08|\x08d\x88\x00\x00\t\x17IDATh\x81\xed\xd9{\x8c\x9c\xd5y\x06\xf0\xdf\x99o\
\xee3{\xb7Y_\xd6\xd8\xd8\xa6\xc4.\x94\xe2[q\xa0\x01\n\xa8n\xa4\xc4$\xa2T\t\
\xa9ZE\x15\x94^\x94J\xf9\xa3\x89Z\xb5RD\xa4\xa4Q/\x8a\x8aZH\xa2DjU\x13\xa5\
\x8dTTD\x1b \x89"\x07\x1a\xb0MK|\xc7\xb0x\xb1\x8d\xbd\xbe\xad\xed\xf5\xce^f\
\xe6\xeb\x1f\xdf7\xde\xf5\xb2\xbe\x82eU\xf2;:\xfaFG\xe7;\xf3<\xefy\xcf{\x9e\
\xf7L\x88\xe3\xd8\xffg\xcb\\m\x00\xef\xd7\xae\x11\xb8\xdav\x8d\xc0\xd5\xb6\
\x0f\x8c@\x08!\x84\xef\x85(\xbc\x1bz\xc2\xa9\xb0$\x8c\x86\x15\xeb\xeb\xf7\
\xaf\xfe\xa0\xe6?\x97e/\xf7\xc5 T5\xdd#\xb6J\xb0R\xc3J\x19\xbd\x08\xad1\x1d\
\xa7\xdb\x07C.\xf4\xa1\x1e_\xa1|}I\x04\x82P\xc4G\xc5>-\xf8\xa8\x8c\xd2L\xe3\
\xf2r\xe6\x98-;\x92\x9b\x85v\x9c\xc4\xc4\xfb\x87\xfb^\xbb(\x02A\xc8\xe3Q\xfc\
\x19z\'}\xccrK\xac\xf3\x11\xd7\x9b\xaf\xcf\\\x9d\xcd\x0ec\x13\x13\xf6\xd7\
\x06m;\xb2;\xc6,\xd4B\x08Wd\x15.H \x08\x0f\xe1k\xb8~\xf2\xa5\xc8}\xd6\xfa\
\x8c\x8f\xcb\xd5r\xbe\xfb\xf3\xe7\xfc\xd7\xde\x8d\xf6\xed?\xe8\xc4\x89S\x93/\
\x1f7\x81N\x1cD\xed\x83\x06\x9f`97\xf0\x02\xfe\x0e\xbf?\xb5\xff~\xb7\xfb|\
\xfc\xbb\xb6\x1c\xde\xe1+?\xf9\x86\xad\xdb\xde8\xdf\xfc\x01EW0\xdb\xcdH \x08\
\x1dx\x01\xab\xa6\xf6?\xe0\x1e\x9f\xac\xdd\xe7\xe1o\x7f\xc1\xd1\xc3C\x17\x9e\
=N\xa7\xbb\x82\xf6\x1e\x02A\xc8\xe2_M\x03\xff)\xeb\xdc3\xbc\xdacO>\xee\xf4\
\xf0EF\xc3\x15\x85\x9e\xd8L+\xf0\xb7\xb8oj\xc7\x1dn\xb5v\xe8\x16\x7f\xfc\x8f\
_566~\xfe\x19\x9b\x92h\x1f\xc5\x90Q\xd4\xb5\xd6\xe2\n\xd8Y\x04\x82\xb0\x04\
\x8fM\x1f\xf4[\xf5\xfb}\xf1\xa9\xbf\x9f\x19\xfc)\xec\x17;\xa6\xee\xa4\x9a\
\x13F\x8c9i\xdc~M\x9b1\x82\xc6\x95\x81?}\x05b\x7f*\x88\xa6v\x95\x15m\xdb\xf3\
\x96\xd3\'k\xe4\xce\x8c\xe30\xde2a\xa7~\xfblWw\x14\xc3)\xe0S\xe9\x88~\x1c\
\xc5\xc4\xe5\xa4\xd0^\xe1\x96X\xe6\xf7\xb2\xc2my\xe1\x95\xa0\xfe\xe7\xfd\xe2\
\xd1s\x13\x08>9}\x929\xbam\xdd\xb3\'\t\r)\xc4\x97\x9d\xb2\xc7v\x87\xbc\x86\
\xfdx\x17\xc7qZ\x12<\xa3\xe9\xf7!\x9cp\x89\x87\xd8RaiS\xf4\xe5\xaa\xdc\x839!\
\x93\x93\x91\x13~U\x9c_\x182\xe1\xa1\xa9\xce8C \x0c\x86\xeb\\\xa7g\xfad\x03\
\x0ei\x1cl&^?\x81\xe7\xf5\xdb\xea9\xbc\x89\xbd)\xf8\x96\xf7\xc7%1\xdfL\x9fu\
\x89\x8c\xb8\xa8\x10\xbaSh\x1b\x92\xfbRI\xe1\xb1\x9cL\xa1 \x92\x13\xc9\xc9\
\xc8\xca\xc8\x86\xcc\x83\x85\xc3\x85;B\x08/\xc5q\xdc<\x8b\x80\xa6\x1bg\x9a\
\xb4\xaea\xa82\x9c\xf8\xf2G\xb6\xdb\xe5\xdf\xb0U\x12\x1e\x83\x12\x99\xd0\xda\
\xacM\x93\x1b6\x86\x8b\x0b\x9d\x10~E\xf1\xb3\xb1\xf2\xe3\x1d\xa29\x05Yy\x91\
\x82lJ \x92\x15\x89d\xc4\x13\xe3Or|U\x08a,\x8e\xe3f\x88\xe3X\x08!\xe3\x9f\
\xfd\x82\x87\xed\x98i\xfa\xcc\xcf\x82\xe6\xef\xc4[\xed\xf24\xb6`\x0f\x8eH\
\xe2\xbd\x8e\xe6\xe5\xca\x84{t\xdc>!\xfezV\xb4\xba(+iyy\xd9\xb4\xe5\xe4R\xf8\
\x91H\xdc\x08\xf1\xe6\'FV\xff\xf8s[\x7f\x1e\xc7\xf1x\x8b@\x1e\xbdF\xecU\x9a!\
{7\xb1\xce\x13\x9e\xf7\x1f\xd8\x99\x82\xaf\xbd\x1f\xe0\x0f=\x14\xa2#O\xb7\
\x7f=\x93\x89\x1e-\xcaE%yE9EyE\x05\x05yy\xb9\x94J\xf2\xc9\x88dd\x1c\xec\xcf\
\xbc\xfa\xd7\x8b\x9f\xb9\x0f\xc3Ar\xdc\x94\xb1\xc4F\xff\xeb\x8es\xfc\xe2\x88\
c\xbe\xe9S>\xe7\xa7\xa8\xb5b\xf0R-\x84\x10\x10=\xbc\x7f\xc5\xf7F\xe6\r>\x90E\
YAIAIQIQQQ!]\x87|\x1aHUU\x19\x19e%\xcdS\x1d\x8dO\xb4\xff\xc5B\x0cfS\x02y7X\
\x9e\xdd\x10\x89\xee\xc8\x18\x9b)i\x94u\xfb#\xff\xe2\xb3\xd6\xc5\xd5x\xd3\
\xe5\x80O-B[c\xdb\xdc_\x7f\xa4\xf33\x0e4\xf6\xdb1\xb1]\xb1kX9\x94\x94\x94S\
\x1ae\xf9t=\x96[.;1\xcb\xc9\xc6)\x13\x1a\xfef\xf3\x86 \x11\x89\xc7\'\t\xcc\
\xf2\xe1\xfa\xf6\x86y\x9bz\x1cXuT}\xa6\xb3\'\xa3G\xd5\x8bA\xf8C|7\x16_RzL\
\xbd\x9f\xc7\xac\xa7\xff\xf3\xd9\xf0\xf4K\xcfZ\xbc\xac\xcb\'~\xe3\x17\xcd\n\
\xb3\x95U\x95T\x94T\x15U\x14\x94\xcd\xb7\xc0\xe9Z\xd1\x9aG\x1f\x10\xb7\xc5\
\x898\x1f\x03\x1d\xc8g%J1\xabfL\x89\x81\xc7\x07\xad\xfd\x87\xe5^\x9e\xbb\xfd\
\\8\xda\xf1O\xf8Z\x10\x9e\xc2\x93\xb1\xf8\xc0\xc5r@\xae\xef\x97\xab\xf3f\xdf\
U\xce\xae\xbc\xbd\xcf\x87\xae\x9b\xaf\xa2\xaa\xa2]Y\x9b\x8a\x0ee\x1dJ\xdau\
\x99\xad\xd4\xe8\xb1\xe6K\xf7\x8a\'\xe2$\xaf\xb5Z\xa2r\xa3 9_\xe7(\xfa5\xb7\
\xfb\x8e<mK\xca\xee\xfd\xca\n\xcf\xb6\xff\xb7\t\xf5\x0b\x81\xaa\xe3Y\xbc\x8e\
\xfe\x0e\x06\xe6)\x0c\xfc\x81\xeb\x07F\x1d\xcf\xffT\xbc\xac){sQvY).\xddTi\
\x94\x96\xb6\x87\xca\r\x95\xa8R\xacjS\xd5\xae\xa2]U\xa7\x8aNU\xdd*\xba\xf5\
\x98\xab\x14\xf7X\xf5\xed\xfb\xed\xd9\xdc\x9f\xa4\x8d\x1e\xad\x15h\xfa+\xeb\
\xb0%\x9b\xf2i\x18uRAS\x909u`\xc4\x0f\xbe\xb8\xc9\x83_\xb8\xdb\xce\x05{\xbd\
\xe6\xbc\x9a?\x8b\xf5X_AQ\xc6\xb8\xa6o\xd9\xd7\xa8\xca\x85\x8a|\xa6*\xab\xa4\
\xa8\x1aJ\xaa\xd9\x8a\x8a\x8a\x16\xf8\xaa\x0eU]\xaa\xbaTt\xe94\xc7<\x8b\x1d\
\xa9\x8f\xfa\xd8\xf7\x7f\xd3\x9e\x81\xfed\xdd\xc2\x14\xef7\'\xc5a&\xed\x1a\
\xc7\xb0!\xbbEI\xef\xc8\xb1Q\x1b\xfe\xf2\x05\x85\x8d9\x8f4\xd6\xebT=\xef2\
\x94P\x96Q\x96Q\x12)\x8a\xa2\xa2l\xa6$\xa7$?-\xcb\x94\xcel\xd4\xc2\x946W\x9f\
\xde\xb8\xcfWw>\xe5\x86/\xdfj\xd3\x8e\xd7\xce\xfe\x91Q\x89\x1ax\xd3q\x89<\
\x89[\xc2-\x8b\x8a\xe3\xc6,u\xb7\xac \x97\xf4\xee\xdbq\xd8\xae\xd7\x07\xfc\
\xf6\x82u\xee\xec\xb8U\x1cb\x87\x1c\x9b\xea\x049\x94\x84\x94@\xa4,\xab,\xa7$\
7%E\x16\x14\x15\x94\xce\x80Ozg\xbb\xce"\x8b,\xb4\xd8\xff\x9c\x1cp\xf77>\xed\
\xb9M/L\x82\x0e\x92\xf0\xd9\xe8m\xbb\xbdm\x97\xedv\xda`\xcc6\x1ci\x1ddE\xcc\
\xc5j\xcb|\xde\xcd\xd6(pV+R\x99_t\xd7\x9d+\xac]\xfaK\xc6+u?\xf1\x9aWl\x951\
\xa6"\xa3*R\x15\xa9\xc8\xa9\xc8\xa9*\xa8(\xa8\xa4\xb9\xa5\xa2\xac\xaaj\x91\
\x85nt\xa3\xf9\x16zg\xac\xe6\x99\xbd/\xfb\xe6\x0f\xbf\xef\xddC\x87\xde\xbb\
\xb4\xa7\xf1\xef\xb6\xd9\xedE\x89\xee:$\x911o\xe2p\x8b@$\xc9.7\xe2\xc3V\xf9\
\x13\x1f\xb2Pi\x1a\x89\xfc\xe4\xbc\x9d}m\xee]\xb5\xc6G\xfan\xd3\xddV\xaaG\
\xf9\x89l\xc6\xb8a\'\x8c\x186\xa6\xa6K\xd5l]ztj\xd7)\x8a\xcbq\xa3\x19\xc5\
\xfbF\x872\xcf\xecy\xd5\x86\x1f\xfd\xe0\xecK\x80\xe96\x8e\xe7\rx\xd5\xb7$\
\xfa\xeb\xdd4\x88Z*w\xb4E\xe0L~\xc6r\xac\xb5\xc2#n1_\xc7\x14\x02\xe7\xbb\xc3\
(P\xe8\xcc\xea\xed\xea1\xaf\xb3[_\xc7\xacf\xbd!\x1c8z,\xbcsh\xd0\xe0\xe01\
\x8d\xfa%\xd45\'\xb1\xc9Q\x1b=\xa1\xe9g\xd8%\x91\xecc\x92\xf8\xaf\x9f\x11sH\
\x04]\x92[\xe7`\x19VZd\xbd\xb5n\xb3@\xb8\xb2w\x0bS\xac\x8e74\xbd\xe2u\xfd\
\x9e\xc1\xab\xd8!\t\x9d\x9a\xb42ii\xb03>\x8d\xe3\xb8\x19B\x18\x93H\xe4d\xaa\
\xb7\x8d\x18\xf2\x8e\xe5\xeer\xb3Ns]\xd9B\xfd 6;l\xab\x17\xd5l\x91\x84\xcd\
\xee\x14Sm\xa6\xba"L\x17\x93\xe9~(\xa2[r\x99u\x13n2\xcb\x1a\xcb\xac\xb4X\x9b\
^\x89\xfc{\xbf\xd6*\x92\x06\xc5\xf6\x1a\xb6\xdb\x16\x87\xbd$\t\x977\xb0OR,\
\x8d\x9e\xab(z\x0f\x81\x94DF\xb2\'\xda\xd0\x8b\x05X\x8cE*n\xd2k\xb9\xc5\xfa\
\xccU\xd0\x9e\x8e*^\x04\xe0\x86dc\x8e\xe0\xb0X\xbfS\x0e\x180h\x871\xfdx\x1bo\
a@RS\x0fc\xfc|\xcawF\x02)\x89 \t\xb1b\nq\xb6$\xd5\xceK[\xaf\xb2>\xed\xfaT\
\xcc\xd6\xadS\xbb\xa2\x82HR\x0b\x06M\xb1\x11\r\xa7M\x186\xe6\xb4\x9a\t55\xc7\
\x1d\xf1\x86q\x07$\xb1}\xd0d\x8a<b\xb2\xcak\\\xa8\xde8\'\x81)D2)\x91\x02*\
\x12\x15\xd8-Q&\xdd\xe8\x92H\xdb\xaa$\xb0\xf2\xe9\xf8H\xe2\xf3\xba\xc4\xef\
\xadb\x7f$\x05xtJ\x1b\x92\xdcd\x8cH\xb2L\xfdb\xeb\x8d\x0b\x12\x98F$\x92\x1c\
\xbcy\x89z\xa8LiE\x93\xa7E\x94\xb6Vq?\x91\x92\x18O\x01\x8e\x98\xbc\x82\xa9\
\x99r\x19p\xa9\x85\xd2E\x13\x98B\xa4%\xadZ \xb3&=\x9e*\xa93-\x11\x8a\t\x91\
\xd6s\xea\x8dE\xab/\xbe\xdc\xd2\xf4\x92\t\xcc@\x86I\xbdh\x86\xe7\xa4\x82\xbf\
\xac\x1b\x8b\x0b`\xb8\xf6O\xfdU\xb6k\x04\xae\xb6]#p\xb5\xed\xff\x00\xffpD!\
\x93;\xfd \x00\x00\x00\x00IEND\xaeB`\x82j\x88\xbf\xb5'
| anish/phatch | phatch/actions/transpose.py | Python | gpl-3.0 | 9,462 |
# -*- coding: utf-8 -*-
# Copyright: Damien Elmes <[email protected]>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import re, os, shutil, cgi
from anki.utils import checksum, call, namedtmp, tmpdir, isMac, stripHTML
from anki.hooks import addHook
from anki.lang import _
# if you modify these in an add-on, you must make sure to take tmp.tex as the
# input, and output tmp.png as the output file
latexCmds = [
["latex", "-interaction=nonstopmode", "tmp.tex"],
["dvipng", "-D", "200", "-T", "tight", "tmp.dvi", "-o", "tmp.png"]
# ["dvipng", "-D", "600", "-T", "tight", "-bg", "Transparent", "tmp.dvi", "-o", "tmp.png"]
]
build = True # if off, use existing media but don't create new
regexps = {
"standard": re.compile(r"\[latex\](.+?)\[/latex\]", re.DOTALL | re.IGNORECASE),
"expression": re.compile(r"\[\$\](.+?)\[/\$\]", re.DOTALL | re.IGNORECASE),
"math": re.compile(r"\[\$\$\](.+?)\[/\$\$\]", re.DOTALL | re.IGNORECASE),
}
# add standard tex install location to osx
if isMac:
os.environ['PATH'] += ":/usr/texbin"
def stripLatex(text):
for match in regexps['standard'].finditer(text):
text = text.replace(match.group(), "")
for match in regexps['expression'].finditer(text):
text = text.replace(match.group(), "")
for match in regexps['math'].finditer(text):
text = text.replace(match.group(), "")
return text
def mungeQA(html, type, fields, model, data, col):
"Convert TEXT with embedded latex tags to image links."
for match in regexps['standard'].finditer(html):
html = html.replace(match.group(), _imgLink(col, match.group(1), model))
for match in regexps['expression'].finditer(html):
html = html.replace(match.group(), _imgLink(
col, "$" + match.group(1) + "$", model))
for match in regexps['math'].finditer(html):
html = html.replace(match.group(), _imgLink(
col,
"\\begin{displaymath}" + match.group(1) + "\\end{displaymath}", model))
return html
def _imgLink(col, latex, model):
"Return an img link for LATEX, creating if necesssary."
txt = _latexFromHtml(col, latex)
fname = "latex-%s.png" % checksum(txt.encode("utf8"))
link = '<img class=latex src="%s">' % fname
if os.path.exists(fname):
return link
elif not build:
return u"[latex]%s[/latex]" % latex
else:
err = _buildImg(col, txt, fname, model)
if err:
return err
else:
return link
def _latexFromHtml(col, latex):
"Convert entities and fix newlines."
latex = re.sub("<br( /)?>|<div>", "\n", latex)
latex = stripHTML(latex)
return latex
def _buildImg(col, latex, fname, model):
# add header/footer & convert to utf8
latex = (model["latexPre"] + "\n" +
latex + "\n" +
model["latexPost"])
latex = latex.encode("utf8")
# it's only really secure if run in a jail, but these are the most common
tmplatex = latex.replace("\\includegraphics", "")
for bad in ("\\write18", "\\readline", "\\input", "\\include",
"\\catcode", "\\openout", "\\write", "\\loop",
"\\def", "\\shipout"):
# don't mind if the sequence is only part of a command
bad_re = "\\" + bad + "[^a-zA-Z]"
if re.search(bad_re, tmplatex):
return _("""\
For security reasons, '%s' is not allowed on cards. You can still use \
it by placing the command in a different package, and importing that \
package in the LaTeX header instead.""") % bad
# write into a temp file
log = open(namedtmp("latex_log.txt"), "w")
texpath = namedtmp("tmp.tex")
texfile = file(texpath, "w")
texfile.write(latex)
texfile.close()
mdir = col.media.dir()
oldcwd = os.getcwd()
png = namedtmp("tmp.png")
try:
# generate png
os.chdir(tmpdir())
for latexCmd in latexCmds:
if call(latexCmd, stdout=log, stderr=log):
return _errMsg(latexCmd[0], texpath)
# add to media
shutil.copyfile(png, os.path.join(mdir, fname))
return
finally:
os.chdir(oldcwd)
def _errMsg(type, texpath):
msg = (_("Error executing %s.") % type) + "<br>"
msg += (_("Generated file: %s") % texpath) + "<br>"
try:
log = open(namedtmp("latex_log.txt", rm=False)).read()
if not log:
raise Exception()
msg += "<small><pre>" + cgi.escape(log) + "</pre></small>"
except:
msg += _("Have you installed latex and dvipng?")
pass
return msg
# setup q/a filter
addHook("mungeQA", mungeQA)
| sunclx/anki | anki/latex.py | Python | agpl-3.0 | 4,659 |
# -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
from dateutil.relativedelta import relativedelta
class saleOrderLine(models.Model):
_inherit = 'sale.order.line'
@api.multi
@api.depends('order_id', 'order_id.date_order', 'delay')
def _compute_date_planned(self):
for line in self:
new_date = fields.Date.context_today(self)
if line.order_id and line.order_id.date_order:
new_date = fields.Datetime.from_string(
line.order_id.date_order).date()
if line.delay:
new_date = (new_date +
(relativedelta(days=line.delay)))
line.date_planned = new_date
date_planned = fields.Date(
'Date planned', compute='_compute_date_planned', store=True,
default=_compute_date_planned)
def _find_sale_lines_from_stock_information(
self, company, to_date, product, location, from_date=None):
cond = [('company_id', '=', company.id),
('product_id', '=', product.id),
('date_planned', '<=', to_date),
('state', '=', 'draft')]
if from_date:
cond.append(('date_planned', '>=', from_date))
sale_lines = self.search(cond)
sale_lines = sale_lines.filtered(
lambda x: x.order_id.state not in ('cancel', 'except_picking',
'except_invoice', 'done',
'approved'))
sale_lines = sale_lines.filtered(
lambda x: x.order_id.warehouse_id.lot_stock_id.id == location.id)
return sale_lines
| alfredoavanzosc/odoo-addons | stock_information/models/sale_order_line.py | Python | agpl-3.0 | 1,803 |
# -*- coding: utf-8 -*-
import logging
import itertools
import math
import urllib
import httplib as http
from modularodm import Q
from modularodm.exceptions import NoResultsFound
from flask import request
from framework import utils
from framework import sentry
from framework.auth.core import User
from framework.flask import redirect # VOL-aware redirect
from framework.routing import proxy_url
from framework.exceptions import HTTPError
from framework.auth.forms import SignInForm
from framework.forms import utils as form_utils
from framework.auth.forms import RegistrationForm
from framework.auth.forms import ResetPasswordForm
from framework.auth.forms import ForgotPasswordForm
from framework.auth.decorators import must_be_logged_in
from website.models import Guid
from website.models import Node, Institution
from website.institutions.views import view_institution
from website.util import sanitize
from website.project import model
from website.util import permissions
from website.project import new_bookmark_collection
logger = logging.getLogger(__name__)
def _render_node(node, auth=None):
"""
:param node:
:return:
"""
perm = None
# NOTE: auth.user may be None if viewing public project while not
# logged in
if auth and auth.user and node.get_permissions(auth.user):
perm_list = node.get_permissions(auth.user)
perm = permissions.reduce_permissions(perm_list)
return {
'title': node.title,
'id': node._primary_key,
'url': node.url,
'api_url': node.api_url,
'primary': node.primary,
'date_modified': utils.iso8601format(node.date_modified),
'category': node.category,
'permissions': perm, # A string, e.g. 'admin', or None,
'archiving': node.archiving,
}
def _render_nodes(nodes, auth=None, show_path=False):
"""
:param nodes:
:return:
"""
ret = {
'nodes': [
_render_node(node, auth)
for node in nodes
],
'show_path': show_path
}
return ret
def index():
try:
#TODO : make this way more robust
inst = Institution.find_one(Q('domains', 'eq', request.host.lower()))
inst_dict = view_institution(inst._id)
inst_dict.update({
'home': False,
'institution': True,
'redirect_url': '/institutions/{}/'.format(inst._id)
})
return inst_dict
except NoResultsFound:
pass
return {'home': True}
def find_bookmark_collection(user):
bookmark_collection = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id))
if bookmark_collection.count() == 0:
new_bookmark_collection(user)
return bookmark_collection[0]
@must_be_logged_in
def dashboard(auth):
user = auth.user
dashboard_folder = find_bookmark_collection(user)
dashboard_id = dashboard_folder._id
return {'addons_enabled': user.get_addon_names(),
'dashboard_id': dashboard_id,
}
def validate_page_num(page, pages):
if page < 0 or (pages and page >= pages):
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='Invalid value for "page".'
))
def paginate(items, total, page, size):
pages = math.ceil(total / float(size))
validate_page_num(page, pages)
start = page * size
paginated_items = itertools.islice(items, start, start + size)
return paginated_items, pages
@must_be_logged_in
def watched_logs_get(**kwargs):
user = kwargs['auth'].user
try:
page = int(request.args.get('page', 0))
except ValueError:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='Invalid value for "page".'
))
try:
size = int(request.args.get('size', 10))
except ValueError:
raise HTTPError(http.BAD_REQUEST, data=dict(
message_long='Invalid value for "size".'
))
total = sum(1 for x in user.get_recent_log_ids())
paginated_logs, pages = paginate(user.get_recent_log_ids(), total, page, size)
logs = (model.NodeLog.load(id) for id in paginated_logs)
return {
"logs": [serialize_log(log) for log in logs],
"total": total,
"pages": pages,
"page": page
}
def serialize_log(node_log, auth=None, anonymous=False):
'''Return a dictionary representation of the log.'''
return {
'id': str(node_log._primary_key),
'user': node_log.user.serialize()
if isinstance(node_log.user, User)
else {'fullname': node_log.foreign_user},
'contributors': [node_log._render_log_contributor(c) for c in node_log.params.get("contributors", [])],
'action': node_log.action,
'params': sanitize.unescape_entities(node_log.params),
'date': utils.iso8601format(node_log.date),
'node': node_log.original_node.serialize(auth) if node_log.original_node else None,
'anonymous': anonymous
}
def reproducibility():
return redirect('/ezcuj/wiki')
def registration_form():
return form_utils.jsonify(RegistrationForm(prefix='register'))
def signin_form():
return form_utils.jsonify(SignInForm())
def forgot_password_form():
return form_utils.jsonify(ForgotPasswordForm(prefix='forgot_password'))
def reset_password_form():
return form_utils.jsonify(ResetPasswordForm())
# GUID ###
def _build_guid_url(base, suffix=None):
url = '/'.join([
each.strip('/') for each in [base, suffix]
if each
])
if not isinstance(url, unicode):
url = url.decode('utf-8')
return u'/{0}/'.format(url)
def resolve_guid(guid, suffix=None):
"""Load GUID by primary key, look up the corresponding view function in the
routing table, and return the return value of the view function without
changing the URL.
:param str guid: GUID primary key
:param str suffix: Remainder of URL after the GUID
:return: Return value of proxied view function
"""
# Look up GUID
guid_object = Guid.load(guid)
if guid_object:
# verify that the object implements a GuidStoredObject-like interface. If a model
# was once GuidStoredObject-like but that relationship has changed, it's
# possible to have referents that are instances of classes that don't
# have a deep_url attribute or otherwise don't behave as
# expected.
if not hasattr(guid_object.referent, 'deep_url'):
sentry.log_message(
'Guid `{}` resolved to an object with no deep_url'.format(guid)
)
raise HTTPError(http.NOT_FOUND)
referent = guid_object.referent
if referent is None:
logger.error('Referent of GUID {0} not found'.format(guid))
raise HTTPError(http.NOT_FOUND)
if not referent.deep_url:
raise HTTPError(http.NOT_FOUND)
url = _build_guid_url(urllib.unquote(referent.deep_url), suffix)
return proxy_url(url)
# GUID not found; try lower-cased and redirect if exists
guid_object_lower = Guid.load(guid.lower())
if guid_object_lower:
return redirect(
_build_guid_url(guid.lower(), suffix)
)
# GUID not found
raise HTTPError(http.NOT_FOUND)
##### Redirects #####
# Redirect /about/ to OSF wiki page
# https://github.com/CenterForOpenScience/osf.io/issues/3862
# https://github.com/CenterForOpenScience/community/issues/294
def redirect_about(**kwargs):
return redirect('https://osf.io/4znzp/wiki/home/')
def redirect_howosfworks(**kwargs):
return redirect('/getting-started/')
def redirect_getting_started(**kwargs):
return redirect('http://help.osf.io/')
def redirect_to_home():
# Redirect to support page
return redirect('/')
| zachjanicki/osf.io | website/views.py | Python | apache-2.0 | 7,843 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.data.python.kernel_tests import dataset_serialization_test_base
from tensorflow.contrib.data.python.ops import dataset_ops
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class FilterDatasetTest(test.TestCase):
def testFilterDataset(self):
components = (
np.arange(7, dtype=np.int64),
np.array([[1, 2, 3]], dtype=np.int64) * np.arange(
7, dtype=np.int64)[:, np.newaxis],
np.array(37.0, dtype=np.float64) * np.arange(7)
)
count = array_ops.placeholder(dtypes.int64, shape=[])
modulus = array_ops.placeholder(dtypes.int64)
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
iterator = (
dataset_ops.Dataset.from_tensor_slices(components).map(_map_fn)
.repeat(count)
.filter(lambda x, _y, _z: math_ops.equal(math_ops.mod(x, modulus), 0))
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([c.shape[1:] for c in components],
[t.shape for t in get_next])
with self.test_session() as sess:
# Test that we can dynamically feed a different modulus value for each
# iterator.
def do_test(count_val, modulus_val):
sess.run(init_op, feed_dict={count: count_val, modulus: modulus_val})
for _ in range(count_val):
for i in [x for x in range(7) if x**2 % modulus_val == 0]:
result = sess.run(get_next)
for component, result_component in zip(components, result):
self.assertAllEqual(component[i]**2, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
do_test(14, 2)
do_test(4, 18)
# Test an empty dataset.
do_test(0, 1)
def testFilterRange(self):
dataset = dataset_ops.Dataset.range(100).filter(
lambda x: math_ops.not_equal(math_ops.mod(x, 3), 2))
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
with self.test_session() as sess:
self.assertEqual(0, sess.run(get_next))
self.assertEqual(1, sess.run(get_next))
self.assertEqual(3, sess.run(get_next))
def testFilterDict(self):
iterator = (dataset_ops.Dataset.range(10)
.map(lambda x: {"foo": x * 2, "bar": x ** 2})
.filter(lambda d: math_ops.equal(d["bar"] % 2, 0))
.map(lambda d: d["foo"] + d["bar"])
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(10):
if (i ** 2) % 2 == 0:
self.assertEqual(i * 2 + i ** 2, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testUseStepContainerInFilter(self):
input_data = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int64)
# Define a predicate that returns true for the first element of
# the sequence and not the second, and uses `tf.map_fn()`.
def _predicate(xs):
squared_xs = functional_ops.map_fn(lambda x: x * x, xs)
summed = math_ops.reduce_sum(squared_xs)
return math_ops.equal(summed, 1 + 4 + 9)
iterator = (
dataset_ops.Dataset.from_tensor_slices([[1, 2, 3], [4, 5, 6]])
.filter(_predicate)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
self.assertAllEqual(input_data[0], sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def assertSparseValuesEqual(self, a, b):
self.assertAllEqual(a.indices, b.indices)
self.assertAllEqual(a.values, b.values)
self.assertAllEqual(a.dense_shape, b.dense_shape)
def testSparse(self):
def _map_fn(i):
return sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0]]),
values=(i * np.array([1])),
dense_shape=np.array([1, 1])), i
def _filter_fn(_, i):
return math_ops.equal(i % 2, 0)
iterator = (
dataset_ops.Dataset.range(10).map(_map_fn).filter(_filter_fn).map(
lambda x, i: x).make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(5):
actual = sess.run(get_next)
self.assertTrue(isinstance(actual, sparse_tensor.SparseTensorValue))
self.assertSparseValuesEqual(actual, _map_fn(i * 2)[0])
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
class FilterDatasetSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase):
def _build_filter_range_graph(self, div):
return dataset_ops.Dataset.range(100).filter(
lambda x: math_ops.not_equal(math_ops.mod(x, div), 2))
def testFilterCore(self):
div = 3
num_outputs = np.sum([x % 3 is not 2 for x in range(100)])
self.run_core_tests(lambda: self._build_filter_range_graph(div),
lambda: self._build_filter_range_graph(div * 2),
num_outputs)
def _build_filter_dict_graph(self):
return dataset_ops.Dataset.range(10).map(
lambda x: {"foo": x * 2, "bar": x ** 2}).filter(
lambda d: math_ops.equal(d["bar"] % 2, 0)).map(
lambda d: d["foo"] + d["bar"])
def testFilterDictCore(self):
num_outputs = np.sum([(x**2) % 2 == 0 for x in range(10)])
self.run_core_tests(self._build_filter_dict_graph, None, num_outputs)
def _build_sparse_filter(self):
def _map_fn(i):
return sparse_tensor.SparseTensor(
indices=[[0, 0]], values=(i * [1]), dense_shape=[1, 1]), i
def _filter_fn(_, i):
return math_ops.equal(i % 2, 0)
return dataset_ops.Dataset.range(10).map(_map_fn).filter(_filter_fn).map(
lambda x, i: x)
def testSparseCore(self):
num_outputs = 5
self.run_core_tests(self._build_sparse_filter, None, num_outputs)
if __name__ == "__main__":
test.main()
| rabipanda/tensorflow | tensorflow/contrib/data/python/kernel_tests/filter_dataset_op_test.py | Python | apache-2.0 | 7,371 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import clownfish._clownfish
from clownfish._clownfish import *
| rectang/lucy-clownfish | runtime/python/src/clownfish/__init__.py | Python | apache-2.0 | 846 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Generic Node base class for all workers that run on hosts."""
import os
import random
import sys
from oslo.config import cfg
from nova import conductor
from nova import context
from nova import exception
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
from nova.openstack.common import service
from nova import servicegroup
from nova import utils
from nova import version
from nova import wsgi
LOG = logging.getLogger(__name__)
service_opts = [
cfg.IntOpt('report_interval',
default=10,
help='seconds between nodes reporting state to datastore'),
cfg.BoolOpt('periodic_enable',
default=True,
help='enable periodic tasks'),
cfg.IntOpt('periodic_fuzzy_delay',
default=60,
help='range of seconds to randomly delay when starting the'
' periodic task scheduler to reduce stampeding.'
' (Disable by setting to 0)'),
cfg.ListOpt('enabled_apis',
default=['ec2', 'osapi_compute', 'metadata'],
help='a list of APIs to enable by default'),
cfg.ListOpt('enabled_ssl_apis',
default=[],
help='a list of APIs with enabled SSL'),
cfg.StrOpt('ec2_listen',
default="0.0.0.0",
help='IP address for EC2 API to listen'),
cfg.IntOpt('ec2_listen_port',
default=8773,
help='port for ec2 api to listen'),
cfg.IntOpt('ec2_workers',
default=None,
help='Number of workers for EC2 API service'),
cfg.StrOpt('osapi_compute_listen',
default="0.0.0.0",
help='IP address for OpenStack API to listen'),
cfg.IntOpt('osapi_compute_listen_port',
default=8774,
help='list port for osapi compute'),
cfg.IntOpt('osapi_compute_workers',
default=None,
help='Number of workers for OpenStack API service'),
cfg.StrOpt('metadata_manager',
default='nova.api.manager.MetadataManager',
help='OpenStack metadata service manager'),
cfg.StrOpt('metadata_listen',
default="0.0.0.0",
help='IP address for metadata api to listen'),
cfg.IntOpt('metadata_listen_port',
default=8775,
help='port for metadata api to listen'),
cfg.IntOpt('metadata_workers',
default=None,
help='Number of workers for metadata service'),
cfg.StrOpt('compute_manager',
default='nova.compute.manager.ComputeManager',
help='full class name for the Manager for compute'),
cfg.StrOpt('console_manager',
default='nova.console.manager.ConsoleProxyManager',
help='full class name for the Manager for console proxy'),
cfg.StrOpt('cert_manager',
default='nova.cert.manager.CertManager',
help='full class name for the Manager for cert'),
cfg.StrOpt('network_manager',
default='nova.network.manager.VlanManager',
help='full class name for the Manager for network'),
cfg.StrOpt('scheduler_manager',
default='nova.scheduler.manager.SchedulerManager',
help='full class name for the Manager for scheduler'),
cfg.IntOpt('service_down_time',
default=60,
help='maximum time since last check-in for up service'),
]
CONF = cfg.CONF
CONF.register_opts(service_opts)
CONF.import_opt('host', 'nova.netconf')
class Service(service.Service):
"""Service object for binaries running on hosts.
A service takes a manager and enables rpc by listening to queues based
on topic. It also periodically runs tasks on the manager and reports
it state to the database services table.
"""
def __init__(self, host, binary, topic, manager, report_interval=None,
periodic_enable=None, periodic_fuzzy_delay=None,
periodic_interval_max=None, db_allowed=True,
*args, **kwargs):
super(Service, self).__init__()
self.host = host
self.binary = binary
self.topic = topic
self.manager_class_name = manager
# NOTE(russellb) We want to make sure to create the servicegroup API
# instance early, before creating other things such as the manager,
# that will also create a servicegroup API instance. Internally, the
# servicegroup only allocates a single instance of the driver API and
# we want to make sure that our value of db_allowed is there when it
# gets created. For that to happen, this has to be the first instance
# of the servicegroup API.
self.servicegroup_api = servicegroup.API(db_allowed=db_allowed)
manager_class = importutils.import_class(self.manager_class_name)
self.manager = manager_class(host=self.host, *args, **kwargs)
self.report_interval = report_interval
self.periodic_enable = periodic_enable
self.periodic_fuzzy_delay = periodic_fuzzy_delay
self.periodic_interval_max = periodic_interval_max
self.saved_args, self.saved_kwargs = args, kwargs
self.backdoor_port = None
self.conductor_api = conductor.API(use_local=db_allowed)
self.conductor_api.wait_until_ready(context.get_admin_context())
def start(self):
verstr = version.version_string_with_package()
LOG.audit(_('Starting %(topic)s node (version %(version)s)'),
{'topic': self.topic, 'version': verstr})
self.basic_config_check()
self.manager.init_host()
self.model_disconnected = False
ctxt = context.get_admin_context()
try:
self.service_ref = self.conductor_api.service_get_by_args(ctxt,
self.host, self.binary)
self.service_id = self.service_ref['id']
except exception.NotFound:
self.service_ref = self._create_service_ref(ctxt)
if self.backdoor_port is not None:
self.manager.backdoor_port = self.backdoor_port
self.conn = rpc.create_connection(new=True)
LOG.debug(_("Creating Consumer connection for Service %s") %
self.topic)
self.manager.pre_start_hook(rpc_connection=self.conn)
rpc_dispatcher = self.manager.create_rpc_dispatcher(self.backdoor_port)
# Share this same connection for these Consumers
self.conn.create_consumer(self.topic, rpc_dispatcher, fanout=False)
node_topic = '%s.%s' % (self.topic, self.host)
self.conn.create_consumer(node_topic, rpc_dispatcher, fanout=False)
self.conn.create_consumer(self.topic, rpc_dispatcher, fanout=True)
# Consume from all consumers in a thread
self.conn.consume_in_thread()
self.manager.post_start_hook()
LOG.debug(_("Join ServiceGroup membership for this service %s")
% self.topic)
# Add service to the ServiceGroup membership group.
self.servicegroup_api.join(self.host, self.topic, self)
if self.periodic_enable:
if self.periodic_fuzzy_delay:
initial_delay = random.randint(0, self.periodic_fuzzy_delay)
else:
initial_delay = None
self.tg.add_dynamic_timer(self.periodic_tasks,
initial_delay=initial_delay,
periodic_interval_max=
self.periodic_interval_max)
def _create_service_ref(self, context):
svc_values = {
'host': self.host,
'binary': self.binary,
'topic': self.topic,
'report_count': 0
}
service = self.conductor_api.service_create(context, svc_values)
self.service_id = service['id']
return service
def __getattr__(self, key):
manager = self.__dict__.get('manager', None)
return getattr(manager, key)
@classmethod
def create(cls, host=None, binary=None, topic=None, manager=None,
report_interval=None, periodic_enable=None,
periodic_fuzzy_delay=None, periodic_interval_max=None,
db_allowed=True):
"""Instantiates class and passes back application object.
:param host: defaults to CONF.host
:param binary: defaults to basename of executable
:param topic: defaults to bin_name - 'nova-' part
:param manager: defaults to CONF.<topic>_manager
:param report_interval: defaults to CONF.report_interval
:param periodic_enable: defaults to CONF.periodic_enable
:param periodic_fuzzy_delay: defaults to CONF.periodic_fuzzy_delay
:param periodic_interval_max: if set, the max time to wait between runs
"""
if not host:
host = CONF.host
if not binary:
binary = os.path.basename(sys.argv[0])
if not topic:
topic = binary.rpartition('nova-')[2]
if not manager:
manager_cls = ('%s_manager' %
binary.rpartition('nova-')[2])
manager = CONF.get(manager_cls, None)
if report_interval is None:
report_interval = CONF.report_interval
if periodic_enable is None:
periodic_enable = CONF.periodic_enable
if periodic_fuzzy_delay is None:
periodic_fuzzy_delay = CONF.periodic_fuzzy_delay
service_obj = cls(host, binary, topic, manager,
report_interval=report_interval,
periodic_enable=periodic_enable,
periodic_fuzzy_delay=periodic_fuzzy_delay,
periodic_interval_max=periodic_interval_max,
db_allowed=db_allowed)
return service_obj
def kill(self):
"""Destroy the service object in the datastore."""
self.stop()
try:
self.conductor_api.service_destroy(context.get_admin_context(),
self.service_id)
except exception.NotFound:
LOG.warn(_('Service killed that has no database entry'))
def stop(self):
try:
self.conn.close()
except Exception:
pass
super(Service, self).stop()
def periodic_tasks(self, raise_on_error=False):
"""Tasks to be run at a periodic interval."""
ctxt = context.get_admin_context()
return self.manager.periodic_tasks(ctxt, raise_on_error=raise_on_error)
def basic_config_check(self):
"""Perform basic config checks before starting processing."""
# Make sure the tempdir exists and is writable
try:
with utils.tempdir():
pass
except Exception as e:
LOG.error(_('Temporary directory is invalid: %s'), e)
sys.exit(1)
class WSGIService(object):
"""Provides ability to launch API from a 'paste' configuration."""
def __init__(self, name, loader=None, use_ssl=False, max_url_len=None):
"""Initialize, but do not start the WSGI server.
:param name: The name of the WSGI server given to the loader.
:param loader: Loads the WSGI application using the given name.
:returns: None
"""
self.name = name
self.manager = self._get_manager()
self.loader = loader or wsgi.Loader()
self.app = self.loader.load_app(name)
self.host = getattr(CONF, '%s_listen' % name, "0.0.0.0")
self.port = getattr(CONF, '%s_listen_port' % name, 0)
self.workers = getattr(CONF, '%s_workers' % name, None)
self.use_ssl = use_ssl
self.server = wsgi.Server(name,
self.app,
host=self.host,
port=self.port,
use_ssl=self.use_ssl,
max_url_len=max_url_len)
# Pull back actual port used
self.port = self.server.port
self.backdoor_port = None
def _get_manager(self):
"""Initialize a Manager object appropriate for this service.
Use the service name to look up a Manager subclass from the
configuration and initialize an instance. If no class name
is configured, just return None.
:returns: a Manager instance, or None.
"""
fl = '%s_manager' % self.name
if fl not in CONF:
return None
manager_class_name = CONF.get(fl, None)
if not manager_class_name:
return None
manager_class = importutils.import_class(manager_class_name)
return manager_class()
def start(self):
"""Start serving this service using loaded configuration.
Also, retrieve updated port number in case '0' was passed in, which
indicates a random port should be used.
:returns: None
"""
if self.manager:
self.manager.init_host()
self.manager.pre_start_hook()
if self.backdoor_port is not None:
self.manager.backdoor_port = self.backdoor_port
self.server.start()
if self.manager:
self.manager.post_start_hook()
def stop(self):
"""Stop serving this API.
:returns: None
"""
self.server.stop()
def wait(self):
"""Wait for the service to stop serving this API.
:returns: None
"""
self.server.wait()
def process_launcher():
return service.ProcessLauncher()
# NOTE(vish): the global launcher is to maintain the existing
# functionality of calling service.serve +
# service.wait
_launcher = None
def serve(server, workers=None):
global _launcher
if _launcher:
raise RuntimeError(_('serve() can only be called once'))
_launcher = service.launch(server, workers=workers)
def wait():
_launcher.wait()
| Brocade-OpenSource/OpenStack-DNRM-Nova | nova/service.py | Python | apache-2.0 | 15,048 |
from flask import Blueprint, render_template, request, url_for
from CTFd.models import Users
from CTFd.utils import config
from CTFd.utils.decorators import authed_only
from CTFd.utils.decorators.visibility import (
check_account_visibility,
check_score_visibility,
)
from CTFd.utils.helpers import get_errors, get_infos
from CTFd.utils.user import get_current_user
users = Blueprint("users", __name__)
@users.route("/users")
@check_account_visibility
def listing():
q = request.args.get("q")
field = request.args.get("field", "name")
if field not in ("name", "affiliation", "website"):
field = "name"
filters = []
if q:
filters.append(getattr(Users, field).like("%{}%".format(q)))
users = (
Users.query.filter_by(banned=False, hidden=False)
.filter(*filters)
.order_by(Users.id.asc())
.paginate(per_page=50)
)
args = dict(request.args)
args.pop("page", 1)
return render_template(
"users/users.html",
users=users,
prev_page=url_for(request.endpoint, page=users.prev_num, **args),
next_page=url_for(request.endpoint, page=users.next_num, **args),
q=q,
field=field,
)
@users.route("/profile")
@users.route("/user")
@authed_only
def private():
infos = get_infos()
errors = get_errors()
user = get_current_user()
if config.is_scoreboard_frozen():
infos.append("Scoreboard has been frozen")
return render_template(
"users/private.html",
user=user,
account=user.account,
infos=infos,
errors=errors,
)
@users.route("/users/<int:user_id>")
@check_account_visibility
@check_score_visibility
def public(user_id):
infos = get_infos()
errors = get_errors()
user = Users.query.filter_by(id=user_id, banned=False, hidden=False).first_or_404()
if config.is_scoreboard_frozen():
infos.append("Scoreboard has been frozen")
return render_template(
"users/public.html", user=user, account=user.account, infos=infos, errors=errors
)
| LosFuzzys/CTFd | CTFd/users.py | Python | apache-2.0 | 2,090 |
# Copyright (c) 2020 Vestas Wind Systems A/S
#
# SPDX-License-Identifier: Apache-2.0
'''Runner for performing program download over CANopen (DSP 302-3).'''
import argparse
import os
import time
from runners.core import ZephyrBinaryRunner, RunnerCaps
try:
import canopen
from progress.bar import Bar
MISSING_REQUIREMENTS = False
except ImportError:
MISSING_REQUIREMENTS = True
# Default Python-CAN context to use, see python-can documentation for details
DEFAULT_CAN_CONTEXT = 'default'
# Default program number
DEFAULT_PROGRAM_NUMBER = 1
# Default timeouts and retries
DEFAULT_TIMEOUT = 10.0 # seconds
DEFAULT_SDO_TIMEOUT = 0.3 # seconds
DEFAULT_SDO_RETRIES = 1
# Object dictionary indexes
H1F50_PROGRAM_DATA = 0x1F50
H1F51_PROGRAM_CTRL = 0x1F51
H1F56_PROGRAM_SWID = 0x1F56
H1F57_FLASH_STATUS = 0x1F57
# Program control commands
PROGRAM_CTRL_STOP = 0x00
PROGRAM_CTRL_START = 0x01
PROGRAM_CTRL_RESET = 0x02
PROGRAM_CTRL_CLEAR = 0x03
PROGRAM_CTRL_ZEPHYR_CONFIRM = 0x80
class ToggleAction(argparse.Action):
'''Toggle argument parser'''
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, not option_string.startswith('--no-'))
class CANopenBinaryRunner(ZephyrBinaryRunner):
'''Runner front-end for CANopen.'''
def __init__(self, cfg, dev_id, can_context=DEFAULT_CAN_CONTEXT,
program_number=DEFAULT_PROGRAM_NUMBER, confirm=True,
confirm_only=True, timeout=DEFAULT_TIMEOUT,
sdo_retries=DEFAULT_SDO_RETRIES, sdo_timeout=DEFAULT_SDO_TIMEOUT):
if MISSING_REQUIREMENTS:
raise RuntimeError('one or more Python dependencies were missing; '
"see the getting started guide for details on "
"how to fix")
super().__init__(cfg)
self.dev_id = dev_id # Only use for error checking in do_run()
self.bin_file = cfg.bin_file
self.confirm = confirm
self.confirm_only = confirm_only
self.timeout = timeout
self.downloader = CANopenProgramDownloader(logger=self.logger,
node_id=dev_id,
can_context=can_context,
program_number=program_number,
sdo_retries=sdo_retries,
sdo_timeout=sdo_timeout)
@classmethod
def name(cls):
return 'canopen'
@classmethod
def capabilities(cls):
return RunnerCaps(commands={'flash'}, dev_id=True, flash_addr=False)
@classmethod
def dev_id_help(cls) -> str:
return 'CANopen Node ID.'
@classmethod
def do_add_parser(cls, parser):
# Optional:
parser.add_argument('--node-id', dest='dev_id',
help=cls.dev_id_help())
parser.add_argument('--can-context', default=DEFAULT_CAN_CONTEXT,
help=f'Python-CAN context to use (default: {DEFAULT_CAN_CONTEXT})')
parser.add_argument('--program-number', type=int, default=DEFAULT_PROGRAM_NUMBER,
help=f'program number (default: {DEFAULT_PROGRAM_NUMBER})')
parser.add_argument('--confirm', '--no-confirm',
dest='confirm', nargs=0,
action=ToggleAction,
help='confirm after starting? (default: yes)')
parser.add_argument('--confirm-only', default=False, action='store_true',
help='confirm only, no program download (default: no)')
parser.add_argument('--timeout', type=float, default=DEFAULT_TIMEOUT,
help=f'Timeout in seconds (default: {DEFAULT_TIMEOUT})')
parser.add_argument('--sdo-retries', type=int, default=DEFAULT_SDO_RETRIES,
help=f'CANopen SDO request retries (default: {DEFAULT_SDO_RETRIES})')
parser.add_argument('--sdo-timeout', type=float, default=DEFAULT_SDO_TIMEOUT,
help=f'''CANopen SDO response timeout in seconds
(default: {DEFAULT_SDO_TIMEOUT})''')
parser.set_defaults(confirm=True)
@classmethod
def do_create(cls, cfg, args):
return CANopenBinaryRunner(cfg, int(args.dev_id),
can_context=args.can_context,
program_number=args.program_number,
confirm=args.confirm,
confirm_only=args.confirm_only,
timeout=args.timeout,
sdo_retries=args.sdo_retries,
sdo_timeout=args.sdo_timeout)
def do_run(self, command, **kwargs):
if not self.dev_id:
raise RuntimeError('Please specify a CANopen node ID with the '
'-i/--dev-id or --node-id command-line switch.')
if command == 'flash':
self.flash(**kwargs)
def flash(self, **kwargs):
'''Download program to flash over CANopen'''
self.ensure_output('bin')
self.logger.info('Using Node ID %d, program number %d',
self.downloader.node_id,
self.downloader.program_number)
self.downloader.connect()
status = self.downloader.wait_for_flash_status_ok(self.timeout)
if status == 0:
self.downloader.swid()
else:
self.logger.warning('Flash status 0x{:02x}, '
'skipping software identification'.format(status))
self.downloader.enter_pre_operational()
if self.confirm_only:
self.downloader.zephyr_confirm_program()
self.downloader.disconnect()
return
if self.bin_file is None:
raise ValueError('Cannot download program; bin_file is missing')
self.downloader.stop_program()
self.downloader.clear_program()
self.downloader.wait_for_flash_status_ok(self.timeout)
self.downloader.download(self.bin_file)
status = self.downloader.wait_for_flash_status_ok(self.timeout)
if status != 0:
raise ValueError('Program download failed: '
'flash status 0x{:02x}'.format(status))
self.downloader.swid()
self.downloader.start_program()
self.downloader.wait_for_bootup(self.timeout)
self.downloader.swid()
if self.confirm:
self.downloader.enter_pre_operational()
self.downloader.zephyr_confirm_program()
self.downloader.disconnect()
class CANopenProgramDownloader(object):
'''CANopen program downloader'''
def __init__(self, logger, node_id, can_context=DEFAULT_CAN_CONTEXT,
program_number=DEFAULT_PROGRAM_NUMBER,
sdo_retries=DEFAULT_SDO_RETRIES, sdo_timeout=DEFAULT_SDO_TIMEOUT):
super(CANopenProgramDownloader, self).__init__()
self.logger = logger
self.node_id = node_id
self.can_context = can_context
self.program_number = program_number
self.network = canopen.Network()
self.node = self.network.add_node(self.node_id,
self.create_object_dictionary())
self.data_sdo = self.node.sdo[H1F50_PROGRAM_DATA][self.program_number]
self.ctrl_sdo = self.node.sdo[H1F51_PROGRAM_CTRL][self.program_number]
self.swid_sdo = self.node.sdo[H1F56_PROGRAM_SWID][self.program_number]
self.flash_sdo = self.node.sdo[H1F57_FLASH_STATUS][self.program_number]
self.node.sdo.MAX_RETRIES = sdo_retries
self.node.sdo.RESPONSE_TIMEOUT = sdo_timeout
def connect(self):
'''Connect to CAN network'''
try:
self.network.connect(context=self.can_context)
except:
raise ValueError('Unable to connect to CAN network')
def disconnect(self):
'''Disconnect from CAN network'''
self.network.disconnect()
def enter_pre_operational(self):
'''Enter pre-operational NMT state'''
self.logger.info("Entering pre-operational mode")
try:
self.node.nmt.state = 'PRE-OPERATIONAL'
except:
raise ValueError('Failed to enter pre-operational mode')
def _ctrl_program(self, cmd):
'''Write program control command to CANopen object dictionary (0x1f51)'''
try:
self.ctrl_sdo.raw = cmd
except:
raise ValueError('Unable to write control command 0x{:02x}'.format(cmd))
def stop_program(self):
'''Write stop control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Stopping program')
self._ctrl_program(PROGRAM_CTRL_STOP)
def start_program(self):
'''Write start control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Starting program')
self._ctrl_program(PROGRAM_CTRL_START)
def clear_program(self):
'''Write clear control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Clearing program')
self._ctrl_program(PROGRAM_CTRL_CLEAR)
def zephyr_confirm_program(self):
'''Write confirm control command to CANopen object dictionary (0x1f51)'''
self.logger.info('Confirming program')
self._ctrl_program(PROGRAM_CTRL_ZEPHYR_CONFIRM)
def swid(self):
'''Read software identification from CANopen object dictionary (0x1f56)'''
try:
swid = self.swid_sdo.raw
except:
raise ValueError('Failed to read software identification')
self.logger.info('Program software identification: 0x{:08x}'.format(swid))
return swid
def flash_status(self):
'''Read flash status identification'''
try:
status = self.flash_sdo.raw
except:
raise ValueError('Failed to read flash status identification')
return status
def download(self, bin_file):
'''Download program to CANopen object dictionary (0x1f50)'''
self.logger.info('Downloading program: %s', bin_file)
try:
size = os.path.getsize(bin_file)
infile = open(bin_file, 'rb')
outfile = self.data_sdo.open('wb', size=size)
progress = Bar('%(percent)d%%', max=size, suffix='%(index)d/%(max)dB')
while True:
chunk = infile.read(1024)
if not chunk:
break
outfile.write(chunk)
progress.next(n=len(chunk))
except:
raise ValueError('Failed to download program')
finally:
progress.finish()
infile.close()
outfile.close()
def wait_for_bootup(self, timeout=DEFAULT_TIMEOUT):
'''Wait for boot-up message reception'''
self.logger.info('Waiting for boot-up message...')
try:
self.node.nmt.wait_for_bootup(timeout=timeout)
except:
raise ValueError('Timeout waiting for boot-up message')
def wait_for_flash_status_ok(self, timeout=DEFAULT_TIMEOUT):
'''Wait for flash status ok'''
self.logger.info('Waiting for flash status ok')
end_time = time.time() + timeout
while True:
now = time.time()
status = self.flash_status()
if status == 0:
break
if now > end_time:
return status
return status
@staticmethod
def create_object_dictionary():
'''Create a synthetic CANopen object dictionary for program download'''
objdict = canopen.objectdictionary.ObjectDictionary()
array = canopen.objectdictionary.Array('Program data', 0x1f50)
member = canopen.objectdictionary.Variable('', 0x1f50, subindex=1)
member.data_type = canopen.objectdictionary.DOMAIN
array.add_member(member)
objdict.add_object(array)
array = canopen.objectdictionary.Array('Program control', 0x1f51)
member = canopen.objectdictionary.Variable('', 0x1f51, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED8
array.add_member(member)
objdict.add_object(array)
array = canopen.objectdictionary.Array('Program sofware ID', 0x1f56)
member = canopen.objectdictionary.Variable('', 0x1f56, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED32
array.add_member(member)
objdict.add_object(array)
array = canopen.objectdictionary.Array('Flash error ID', 0x1f57)
member = canopen.objectdictionary.Variable('', 0x1f57, subindex=1)
member.data_type = canopen.objectdictionary.UNSIGNED32
array.add_member(member)
objdict.add_object(array)
return objdict
| zephyrproject-rtos/zephyr | scripts/west_commands/runners/canopen_program.py | Python | apache-2.0 | 13,074 |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinderclient.v2 import availability_zones
from cinderclient.v2 import consistencygroups
from cinderclient.v2 import pools
from cinderclient.v2 import qos_specs
from cinderclient.v2 import quotas
from cinderclient.v2 import services
from cinderclient.v2 import volume_backups as vol_backups
from cinderclient.v2 import volume_encryption_types as vol_enc_types
from cinderclient.v2 import volume_snapshots as vol_snaps
from cinderclient.v2 import volume_transfers
from cinderclient.v2 import volume_types
from cinderclient.v2 import volumes
from openstack_dashboard import api
from openstack_dashboard.usage import quotas as usage_quotas
from openstack_dashboard.test.test_data import utils
def data(TEST):
TEST.cinder_services = utils.TestDataContainer()
TEST.cinder_volumes = utils.TestDataContainer()
TEST.cinder_volume_backups = utils.TestDataContainer()
TEST.cinder_volume_encryption_types = utils.TestDataContainer()
TEST.cinder_volume_types = utils.TestDataContainer()
TEST.cinder_volume_encryption = utils.TestDataContainer()
TEST.cinder_bootable_volumes = utils.TestDataContainer()
TEST.cinder_qos_specs = utils.TestDataContainer()
TEST.cinder_qos_spec_associations = utils.TestDataContainer()
TEST.cinder_volume_snapshots = utils.TestDataContainer()
TEST.cinder_quotas = utils.TestDataContainer()
TEST.cinder_quota_usages = utils.TestDataContainer()
TEST.cinder_availability_zones = utils.TestDataContainer()
TEST.cinder_volume_transfers = utils.TestDataContainer()
TEST.cinder_pools = utils.TestDataContainer()
TEST.cinder_consistencygroups = utils.TestDataContainer()
TEST.cinder_cgroup_volumes = utils.TestDataContainer()
# Services
service_1 = services.Service(services.ServiceManager(None), {
"service": "cinder-scheduler",
"status": "enabled",
"binary": "cinder-scheduler",
"zone": "internal",
"state": "up",
"updated_at": "2013-07-08T05:21:00.000000",
"host": "devstack001",
"disabled_reason": None
})
service_2 = services.Service(services.ServiceManager(None), {
"service": "cinder-volume",
"status": "enabled",
"binary": "cinder-volume",
"zone": "nova",
"state": "up",
"updated_at": "2013-07-08T05:20:51.000000",
"host": "devstack001",
"disabled_reason": None
})
TEST.cinder_services.add(service_1)
TEST.cinder_services.add(service_2)
# Volumes - Cinder v1
volume = volumes.Volume(
volumes.VolumeManager(None),
{'id': "11023e92-8008-4c8b-8059-7f2293ff3887",
'status': 'available',
'size': 40,
'display_name': 'Volume name',
'display_description': 'Volume description',
'created_at': '2014-01-27 10:30:00',
'volume_type': None,
'attachments': []})
nameless_volume = volumes.Volume(
volumes.VolumeManager(None),
{"id": "4b069dd0-6eaa-4272-8abc-5448a68f1cce",
"status": 'available',
"size": 10,
"display_name": '',
"display_description": '',
"device": "/dev/hda",
"created_at": '2010-11-21 18:34:25',
"volume_type": 'vol_type_1',
"attachments": []})
other_volume = volumes.Volume(
volumes.VolumeManager(None),
{'id': "21023e92-8008-1234-8059-7f2293ff3889",
'status': 'in-use',
'size': 10,
'display_name': u'my_volume',
'display_description': '',
'created_at': '2013-04-01 10:30:00',
'volume_type': None,
'attachments': [{"id": "1", "server_id": '1',
"device": "/dev/hda"}]})
volume_with_type = volumes.Volume(
volumes.VolumeManager(None),
{'id': "7dcb47fd-07d9-42c2-9647-be5eab799ebe",
'name': 'my_volume2',
'status': 'in-use',
'size': 10,
'display_name': u'my_volume2',
'display_description': '',
'created_at': '2013-04-01 10:30:00',
'volume_type': 'vol_type_2',
'attachments': [{"id": "2", "server_id": '2',
"device": "/dev/hdb"}]})
non_bootable_volume = volumes.Volume(
volumes.VolumeManager(None),
{'id': "21023e92-8008-1234-8059-7f2293ff3890",
'status': 'in-use',
'size': 10,
'display_name': u'my_volume',
'display_description': '',
'created_at': '2013-04-01 10:30:00',
'volume_type': None,
'bootable': False,
'attachments': [{"id": "1", "server_id": '1',
"device": "/dev/hda"}]})
volume.bootable = 'true'
nameless_volume.bootable = 'true'
other_volume.bootable = 'true'
TEST.cinder_volumes.add(api.cinder.Volume(volume))
TEST.cinder_volumes.add(api.cinder.Volume(nameless_volume))
TEST.cinder_volumes.add(api.cinder.Volume(other_volume))
TEST.cinder_volumes.add(api.cinder.Volume(volume_with_type))
TEST.cinder_bootable_volumes.add(api.cinder.Volume(non_bootable_volume))
vol_type1 = volume_types.VolumeType(volume_types.VolumeTypeManager(None),
{'id': u'1',
'name': u'vol_type_1',
'description': 'type 1 description',
'extra_specs': {'foo': 'bar'}})
vol_type2 = volume_types.VolumeType(volume_types.VolumeTypeManager(None),
{'id': u'2',
'name': u'vol_type_2',
'description': 'type 2 description'})
TEST.cinder_volume_types.add(vol_type1, vol_type2)
# Volumes - Cinder v2
volume_v2 = volumes.Volume(
volumes.VolumeManager(None),
{'id': "31023e92-8008-4c8b-8059-7f2293ff1234",
'name': 'v2_volume',
'description': "v2 Volume Description",
'status': 'available',
'size': 20,
'created_at': '2014-01-27 10:30:00',
'volume_type': None,
'os-vol-host-attr:host': 'host@backend-name#pool',
'bootable': 'true',
'attachments': []})
volume_v2.bootable = 'true'
TEST.cinder_volumes.add(api.cinder.Volume(volume_v2))
snapshot = vol_snaps.Snapshot(
vol_snaps.SnapshotManager(None),
{'id': '5f3d1c33-7d00-4511-99df-a2def31f3b5d',
'display_name': 'test snapshot',
'display_description': 'volume snapshot',
'size': 40,
'status': 'available',
'volume_id': '11023e92-8008-4c8b-8059-7f2293ff3887'})
snapshot2 = vol_snaps.Snapshot(
vol_snaps.SnapshotManager(None),
{'id': 'c9d0881a-4c0b-4158-a212-ad27e11c2b0f',
'name': '',
'description': 'v2 volume snapshot description',
'size': 80,
'status': 'available',
'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'})
snapshot3 = vol_snaps.Snapshot(
vol_snaps.SnapshotManager(None),
{'id': 'c9d0881a-4c0b-4158-a212-ad27e11c2b0e',
'name': '',
'description': 'v2 volume snapshot description 2',
'size': 80,
'status': 'available',
'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'})
snapshot.bootable = 'true'
snapshot2.bootable = 'true'
TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot))
TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot2))
TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot3))
TEST.cinder_volume_snapshots.first()._volume = volume
# Volume Type Encryption
vol_enc_type1 = vol_enc_types.VolumeEncryptionType(
vol_enc_types.VolumeEncryptionTypeManager(None),
{'volume_type_id': u'1',
'control_location': "front-end",
'key_size': 512,
'provider': "a-provider",
'cipher': "a-cipher"})
vol_enc_type2 = vol_enc_types.VolumeEncryptionType(
vol_enc_types.VolumeEncryptionTypeManager(None),
{'volume_type_id': u'2',
'control_location': "front-end",
'key_size': 256,
'provider': "a-provider",
'cipher': "a-cipher"})
vol_unenc_type1 = vol_enc_types.VolumeEncryptionType(
vol_enc_types.VolumeEncryptionTypeManager(None), {})
TEST.cinder_volume_encryption_types.add(vol_enc_type1, vol_enc_type2,
vol_unenc_type1)
volume_backup1 = vol_backups.VolumeBackup(
vol_backups.VolumeBackupManager(None),
{'id': 'a374cbb8-3f99-4c3f-a2ef-3edbec842e31',
'name': 'backup1',
'description': 'volume backup 1',
'size': 10,
'status': 'available',
'container_name': 'volumebackups',
'volume_id': '11023e92-8008-4c8b-8059-7f2293ff3887'})
volume_backup2 = vol_backups.VolumeBackup(
vol_backups.VolumeBackupManager(None),
{'id': 'c321cbb8-3f99-4c3f-a2ef-3edbec842e52',
'name': 'backup2',
'description': 'volume backup 2',
'size': 20,
'status': 'available',
'container_name': 'volumebackups',
'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'})
volume_backup3 = vol_backups.VolumeBackup(
vol_backups.VolumeBackupManager(None),
{'id': 'c321cbb8-3f99-4c3f-a2ef-3edbec842e53',
'name': 'backup3',
'description': 'volume backup 3',
'size': 20,
'status': 'available',
'container_name': 'volumebackups',
'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'})
TEST.cinder_volume_backups.add(volume_backup1)
TEST.cinder_volume_backups.add(volume_backup2)
TEST.cinder_volume_backups.add(volume_backup3)
# Volume Encryption
vol_enc_metadata1 = volumes.Volume(
volumes.VolumeManager(None),
{'cipher': 'test-cipher',
'key_size': 512,
'provider': 'test-provider',
'control_location': 'front-end'})
vol_unenc_metadata1 = volumes.Volume(
volumes.VolumeManager(None),
{})
TEST.cinder_volume_encryption.add(vol_enc_metadata1)
TEST.cinder_volume_encryption.add(vol_unenc_metadata1)
# Quota Sets
quota_data = dict(volumes='1',
snapshots='1',
gigabytes='1000')
quota = quotas.QuotaSet(quotas.QuotaSetManager(None), quota_data)
TEST.cinder_quotas.add(api.base.QuotaSet(quota))
# Quota Usages
quota_usage_data = {'gigabytes': {'used': 0,
'quota': 1000},
'instances': {'used': 0,
'quota': 10},
'snapshots': {'used': 0,
'quota': 10}}
quota_usage = usage_quotas.QuotaUsage()
for k, v in quota_usage_data.items():
quota_usage.add_quota(api.base.Quota(k, v['quota']))
quota_usage.tally(k, v['used'])
TEST.cinder_quota_usages.add(quota_usage)
# Availability Zones
# Cinder returns the following structure from os-availability-zone
# {"availabilityZoneInfo":
# [{"zoneState": {"available": true}, "zoneName": "nova"}]}
# Note that the default zone is still "nova" even though this is cinder
TEST.cinder_availability_zones.add(
availability_zones.AvailabilityZone(
availability_zones.AvailabilityZoneManager(None),
{
'zoneName': 'nova',
'zoneState': {'available': True}
}
)
)
# Cinder Limits
limits = {"absolute": {"totalVolumesUsed": 1,
"totalGigabytesUsed": 5,
"maxTotalVolumeGigabytes": 1000,
"maxTotalVolumes": 10}}
TEST.cinder_limits = limits
# QOS Specs
qos_spec1 = qos_specs.QoSSpecs(
qos_specs.QoSSpecsManager(None),
{"id": "418db45d-6992-4674-b226-80aacad2073c",
"name": "high_iops",
"consumer": "back-end",
"specs": {"minIOPS": "1000", "maxIOPS": '100000'}})
qos_spec2 = qos_specs.QoSSpecs(
qos_specs.QoSSpecsManager(None),
{"id": "6ed7035f-992e-4075-8ed6-6eff19b3192d",
"name": "high_bws",
"consumer": "back-end",
"specs": {"maxBWS": '5000'}})
TEST.cinder_qos_specs.add(qos_spec1, qos_spec2)
vol_type1.associated_qos_spec = qos_spec1.name
TEST.cinder_qos_spec_associations.add(vol_type1)
# volume_transfers
transfer_1 = volume_transfers.VolumeTransfer(
volume_transfers.VolumeTransferManager(None), {
'id': '99999999-8888-7777-6666-555555555555',
'name': 'test transfer',
'volume_id': volume.id,
'auth_key': 'blah',
'created_at': ''})
TEST.cinder_volume_transfers.add(transfer_1)
# Pools
pool1 = pools.Pool(
pools.PoolManager(None), {
"QoS_support": False,
"allocated_capacity_gb": 0,
"driver_version": "3.0.0",
"free_capacity_gb": 10,
"extra_specs": {
"description": "LVM Extra specs",
"display_name": "LVMDriver",
"namespace": "OS::Cinder::LVMDriver",
"type": "object",
},
"name": "devstack@lvmdriver-1#lvmdriver-1",
"pool_name": "lvmdriver-1",
"reserved_percentage": 0,
"storage_protocol": "iSCSI",
"total_capacity_gb": 10,
"vendor_name": "Open Source",
"volume_backend_name": "lvmdriver-1"})
pool2 = pools.Pool(
pools.PoolManager(None), {
"QoS_support": False,
"allocated_capacity_gb": 2,
"driver_version": "3.0.0",
"free_capacity_gb": 15,
"extra_specs": {
"description": "LVM Extra specs",
"display_name": "LVMDriver",
"namespace": "OS::Cinder::LVMDriver",
"type": "object",
},
"name": "devstack@lvmdriver-2#lvmdriver-2",
"pool_name": "lvmdriver-2",
"reserved_percentage": 0,
"storage_protocol": "iSCSI",
"total_capacity_gb": 10,
"vendor_name": "Open Source",
"volume_backend_name": "lvmdriver-2"})
TEST.cinder_pools.add(pool1)
TEST.cinder_pools.add(pool2)
# volume consistency groups
cgroup_1 = consistencygroups.Consistencygroup(
consistencygroups.ConsistencygroupManager(None),
{'id': u'1',
'name': u'cg_1',
'description': 'cg 1 description',
'volume_types': u'1',
'volume_type_names': []})
cgroup_2 = consistencygroups.Consistencygroup(
consistencygroups.ConsistencygroupManager(None),
{'id': u'2',
'name': u'cg_2',
'description': 'cg 2 description',
'volume_types': u'1',
'volume_type_names': []})
TEST.cinder_consistencygroups.add(cgroup_1)
TEST.cinder_consistencygroups.add(cgroup_2)
volume_for_consistency_group = volumes.Volume(
volumes.VolumeManager(None),
{'id': "11023e92-8008-4c8b-8059-7f2293ff3881",
'status': 'available',
'size': 40,
'display_name': 'Volume name',
'display_description': 'Volume description',
'created_at': '2014-01-27 10:30:00',
'volume_type': None,
'attachments': [],
'consistencygroup_id': u'1'})
TEST.cinder_cgroup_volumes.add(api.cinder.Volume(
volume_for_consistency_group))
| yangleo/cloud-github | openstack_dashboard/test/test_data/cinder_data.py | Python | apache-2.0 | 16,279 |
# Copyright 2014, Doug Wiegley, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import acos_client.errors as acos_errors
import acos_client.v21.base as base
class BasePersistence(base.BaseV21):
def __init__(self, client):
super(BasePersistence, self).__init__(client)
self.prefix = "slb.template.%s_persistence" % self.pers_type
def get(self, name, **kwargs):
return self._post(("%s.search" % self.prefix), {'name': name},
**kwargs)
def exists(self, name, **kwargs):
try:
self.get(name, **kwargs)
return True
except acos_errors.NotFound:
return False
def create(self, name, **kwargs):
self._post(("%s.create" % self.prefix), self.get_params(name),
**kwargs)
def delete(self, name, **kwargs):
self._post(("%s.delete" % self.prefix), {'name': name}, **kwargs)
class CookiePersistence(BasePersistence):
def __init__(self, client):
self.pers_type = 'cookie'
super(CookiePersistence, self).__init__(client)
def get_params(self, name):
return {
"cookie_persistence_template": {
"name": name
}
}
class SourceIpPersistence(BasePersistence):
def __init__(self, client):
self.pers_type = 'src_ip'
super(SourceIpPersistence, self).__init__(client)
def get_params(self, name):
return {
"src_ip_persistence_template": {
"name": name
}
}
| dougwig/acos-client | acos_client/v21/slb/template/persistence.py | Python | apache-2.0 | 2,099 |
# Copyright (c) 2015 OpenStack Foundation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron import context
from neutron.db.quota import api as quota_api
from neutron.tests.unit import testlib_api
class TestQuotaDbApi(testlib_api.SqlTestCaseLight):
def _set_context(self):
self.tenant_id = 'Higuain'
self.context = context.Context('Gonzalo', self.tenant_id,
is_admin=False, is_advsvc=False)
def _create_quota_usage(self, resource, used, reserved, tenant_id=None):
tenant_id = tenant_id or self.tenant_id
return quota_api.set_quota_usage(
self.context, resource, tenant_id,
in_use=used, reserved=reserved)
def _verify_quota_usage(self, usage_info,
expected_resource=None,
expected_used=None,
expected_reserved=None,
expected_dirty=None):
self.assertEqual(self.tenant_id, usage_info.tenant_id)
if expected_resource:
self.assertEqual(expected_resource, usage_info.resource)
if expected_dirty is not None:
self.assertEqual(expected_dirty, usage_info.dirty)
if expected_used is not None:
self.assertEqual(expected_used, usage_info.used)
if expected_reserved is not None:
self.assertEqual(expected_reserved, usage_info.reserved)
if expected_used is not None and expected_reserved is not None:
self.assertEqual(expected_used + expected_reserved,
usage_info.total)
def setUp(self):
super(TestQuotaDbApi, self).setUp()
self._set_context()
def test_create_quota_usage(self):
usage_info = self._create_quota_usage('goals', 26, 10)
self._verify_quota_usage(usage_info,
expected_resource='goals',
expected_used=26,
expected_reserved=10)
def test_update_quota_usage(self):
self._create_quota_usage('goals', 26, 10)
# Higuain scores a double
usage_info_1 = quota_api.set_quota_usage(
self.context, 'goals', self.tenant_id,
in_use=28)
self._verify_quota_usage(usage_info_1,
expected_used=28,
expected_reserved=10)
usage_info_2 = quota_api.set_quota_usage(
self.context, 'goals', self.tenant_id,
reserved=8)
self._verify_quota_usage(usage_info_2,
expected_used=28,
expected_reserved=8)
def test_update_quota_usage_with_deltas(self):
self._create_quota_usage('goals', 26, 10)
# Higuain scores a double
usage_info_1 = quota_api.set_quota_usage(
self.context, 'goals', self.tenant_id,
in_use=2, delta=True)
self._verify_quota_usage(usage_info_1,
expected_used=28,
expected_reserved=10)
usage_info_2 = quota_api.set_quota_usage(
self.context, 'goals', self.tenant_id,
reserved=-2, delta=True)
self._verify_quota_usage(usage_info_2,
expected_used=28,
expected_reserved=8)
def test_set_quota_usage_dirty(self):
self._create_quota_usage('goals', 26, 10)
# Higuain needs a shower after the match
self.assertEqual(1, quota_api.set_quota_usage_dirty(
self.context, 'goals', self.tenant_id))
usage_info = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id)
self._verify_quota_usage(usage_info,
expected_dirty=True)
# Higuain is clean now
self.assertEqual(1, quota_api.set_quota_usage_dirty(
self.context, 'goals', self.tenant_id, dirty=False))
usage_info = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id)
self._verify_quota_usage(usage_info,
expected_dirty=False)
def test_set_dirty_non_existing_quota_usage(self):
self.assertEqual(0, quota_api.set_quota_usage_dirty(
self.context, 'meh', self.tenant_id))
def test_set_resources_quota_usage_dirty(self):
self._create_quota_usage('goals', 26, 10)
self._create_quota_usage('assists', 11, 5)
self._create_quota_usage('bookings', 3, 1)
self.assertEqual(2, quota_api.set_resources_quota_usage_dirty(
self.context, ['goals', 'bookings'], self.tenant_id))
usage_info_goals = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id)
usage_info_assists = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'assists', self.tenant_id)
usage_info_bookings = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'bookings', self.tenant_id)
self._verify_quota_usage(usage_info_goals, expected_dirty=True)
self._verify_quota_usage(usage_info_assists, expected_dirty=False)
self._verify_quota_usage(usage_info_bookings, expected_dirty=True)
def test_set_resources_quota_usage_dirty_with_empty_list(self):
self._create_quota_usage('goals', 26, 10)
self._create_quota_usage('assists', 11, 5)
self._create_quota_usage('bookings', 3, 1)
# Expect all the resources for the tenant to be set dirty
self.assertEqual(3, quota_api.set_resources_quota_usage_dirty(
self.context, [], self.tenant_id))
usage_info_goals = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id)
usage_info_assists = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'assists', self.tenant_id)
usage_info_bookings = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'bookings', self.tenant_id)
self._verify_quota_usage(usage_info_goals, expected_dirty=True)
self._verify_quota_usage(usage_info_assists, expected_dirty=True)
self._verify_quota_usage(usage_info_bookings, expected_dirty=True)
# Higuain is clean now
self.assertEqual(1, quota_api.set_quota_usage_dirty(
self.context, 'goals', self.tenant_id, dirty=False))
usage_info = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id)
self._verify_quota_usage(usage_info,
expected_dirty=False)
def _test_set_all_quota_usage_dirty(self, expected):
self._create_quota_usage('goals', 26, 10)
self._create_quota_usage('goals', 12, 6, tenant_id='Callejon')
self.assertEqual(expected, quota_api.set_all_quota_usage_dirty(
self.context, 'goals'))
def test_set_all_quota_usage_dirty(self):
# All goal scorers need a shower after the match, but since this is not
# admin context we can clean only one
self._test_set_all_quota_usage_dirty(expected=1)
def test_get_quota_usage_by_tenant(self):
self._create_quota_usage('goals', 26, 10)
self._create_quota_usage('assists', 11, 5)
# Create a resource for a different tenant
self._create_quota_usage('mehs', 99, 99, tenant_id='buffon')
usage_infos = quota_api.get_quota_usage_by_tenant_id(
self.context, self.tenant_id)
self.assertEqual(2, len(usage_infos))
resources = [info.resource for info in usage_infos]
self.assertIn('goals', resources)
self.assertIn('assists', resources)
def test_get_quota_usage_by_resource(self):
self._create_quota_usage('goals', 26, 10)
self._create_quota_usage('assists', 11, 5)
self._create_quota_usage('goals', 12, 6, tenant_id='Callejon')
usage_infos = quota_api.get_quota_usage_by_resource(
self.context, 'goals')
# Only 1 result expected in tenant context
self.assertEqual(1, len(usage_infos))
self._verify_quota_usage(usage_infos[0],
expected_resource='goals',
expected_used=26,
expected_reserved=10)
def test_get_quota_usage_by_tenant_and_resource(self):
self._create_quota_usage('goals', 26, 10)
usage_info = quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id)
self._verify_quota_usage(usage_info,
expected_resource='goals',
expected_used=26,
expected_reserved=10)
def test_get_non_existing_quota_usage_returns_none(self):
self.assertIsNone(quota_api.get_quota_usage_by_resource_and_tenant(
self.context, 'goals', self.tenant_id))
class TestQuotaDbApiAdminContext(TestQuotaDbApi):
def _set_context(self):
self.tenant_id = 'Higuain'
self.context = context.Context('Gonzalo', self.tenant_id,
is_admin=True, is_advsvc=True,
load_admin_roles=False)
def test_get_quota_usage_by_resource(self):
self._create_quota_usage('goals', 26, 10)
self._create_quota_usage('assists', 11, 5)
self._create_quota_usage('goals', 12, 6, tenant_id='Callejon')
usage_infos = quota_api.get_quota_usage_by_resource(
self.context, 'goals')
# 2 results expected in admin context
self.assertEqual(2, len(usage_infos))
for usage_info in usage_infos:
self.assertEqual('goals', usage_info.resource)
def test_set_all_quota_usage_dirty(self):
# All goal scorers need a shower after the match, and with admin
# context we should be able to clean all of them
self._test_set_all_quota_usage_dirty(expected=2)
| paninetworks/neutron | neutron/tests/unit/db/quota/test_api.py | Python | apache-2.0 | 10,763 |