repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
peastman/deepchem | examples/tox21/tox21_IRV.py | 6 | 1286 | """
Script that trains multitask models on Tox21 dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import shutil
import numpy as np
import deepchem as dc
import time
from deepchem.molnet import load_tox21
# Only for debug!
np.random.seed(123)
# Load Tox21 dataset
n_features = 1024
tox21_tasks, tox21_datasets, transformers = load_tox21()
train_dataset, valid_dataset, test_dataset = tox21_datasets
K = 10
# Fit models
metric = dc.metrics.Metric(dc.metrics.roc_auc_score, np.mean)
transformers = [dc.trans.IRVTransformer(K, len(tox21_tasks), train_dataset)]
for transformer in transformers:
train_dataset = transformer.transform(train_dataset)
valid_dataset = transformer.transform(valid_dataset)
test_dataset = transformer.transform(test_dataset)
model = dc.models.TensorflowMultitaskIRVClassifier(
len(tox21_tasks), K=K, learning_rate=0.001, penalty=0.05, batch_size=32)
# Fit trained model
model.fit(train_dataset, nb_epoch=10)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
| mit | 4,005,920,291,098,748,000 | 4,514,207,688,818,674,000 | 26.956522 | 76 | 0.767496 | false |
wehkamp/ansible-modules-core | cloud/amazon/ec2_elb_lb.py | 7 | 33856 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: ec2_elb_lb
description:
- Returns information about the load balancer.
- Will be marked changed when called only if state is changed.
short_description: Creates or destroys Amazon ELB.
version_added: "1.5"
author: Jim Dalton
options:
state:
description:
- Create or destroy the ELB
required: true
name:
description:
- The name of the ELB
required: true
listeners:
description:
- List of ports/protocols for this ELB to listen on (see example)
required: false
purge_listeners:
description:
- Purge existing listeners on ELB that are not found in listeners
required: false
default: true
zones:
description:
- List of availability zones to enable on this ELB
required: false
purge_zones:
description:
- Purge existing availability zones on ELB that are not found in zones
required: false
default: false
security_group_ids:
description:
- A list of security groups to apply to the elb
require: false
default: None
version_added: "1.6"
health_check:
description:
- An associative array of health check configuration settings (see example)
require: false
default: None
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
subnets:
description:
- A list of VPC subnets to use when creating ELB. Zones should be empty if using this.
required: false
default: None
aliases: []
version_added: "1.7"
purge_subnets:
description:
- Purge existing subnet on ELB that are not found in subnets
required: false
default: false
version_added: "1.7"
scheme:
description:
- The scheme to use when creating the ELB. For a private VPC-visible ELB use 'internal'.
required: false
default: 'internet-facing'
version_added: "1.7"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
aliases: []
version_added: "1.5"
connection_draining_timeout:
description:
- Wait a specified timeout allowing connections to drain before terminating an instance
required: false
aliases: []
version_added: "1.8"
cross_az_load_balancing:
description:
- Distribute load across all configured Availability Zones
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
version_added: "1.8"
stickiness:
description:
- An associative array of stickness policy settings. Policy will be applied to all listeners ( see example )
required: false
version_added: "2.0"
extends_documentation_fragment: aws
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic provisioning example (non-VPC)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
- protocol: https
load_balancer_port: 443
instance_protocol: http # optional, defaults to value of protocol setting
instance_port: 80
# ssl certificate required for https or ssl
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert"
# Internal ELB example
- local_action:
module: ec2_elb_lb
name: "test-vpc"
scheme: internal
state: present
subnets:
- subnet-abcd1234
- subnet-1a2b3c4d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
# Configure a health check
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
health_check:
ping_protocol: http # options are http, https, ssl, tcp
ping_port: 80
ping_path: "/index.html" # not required for tcp or ssl
response_timeout: 5 # seconds
interval: 30 # seconds
unhealthy_threshold: 2
healthy_threshold: 10
# Ensure ELB is gone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
# Normally, this module will purge any listeners that exist on the ELB
# but aren't specified in the listeners parameter. If purge_listeners is
# false it leaves them alone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_listeners: no
# Normally, this module will leave availability zones that are enabled
# on the ELB alone. If purge_zones is true, then any extraneous zones
# will be removed
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_zones: yes
# Creates a ELB and assigns a list of subnets to it.
- local_action:
module: ec2_elb_lb
state: present
name: 'New ELB'
security_group_ids: 'sg-123456, sg-67890'
region: us-west-2
subnets: 'subnet-123456,subnet-67890'
purge_subnets: yes
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
# Create an ELB with connection draining and cross availability
# zone load balancing
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
connection_draining_timeout: 60
cross_az_load_balancing: "yes"
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocols: http
- load_balancer_port: 80
- instance_port: 80
# Create an ELB with load balanacer stickiness enabled
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocols: http
- load_balancer_port: 80
- instance_port: 80
stickiness:
type: loadbalancer
enabled: yes
expiration: 300
# Create an ELB with application stickiness enabled
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocols: http
- load_balancer_port: 80
- instance_port: 80
stickiness:
type: application
enabled: yes
cookie: SESSIONID
"""
try:
import boto
import boto.ec2.elb
import boto.ec2.elb.attributes
from boto.ec2.elb.healthcheck import HealthCheck
from boto.regioninfo import RegionInfo
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class ElbManager(object):
"""Handles ELB creation and destruction"""
def __init__(self, module, name, listeners=None, purge_listeners=None,
zones=None, purge_zones=None, security_group_ids=None,
health_check=None, subnets=None, purge_subnets=None,
scheme="internet-facing", connection_draining_timeout=None,
cross_az_load_balancing=None,
stickiness=None, region=None, **aws_connect_params):
self.module = module
self.name = name
self.listeners = listeners
self.purge_listeners = purge_listeners
self.zones = zones
self.purge_zones = purge_zones
self.security_group_ids = security_group_ids
self.health_check = health_check
self.subnets = subnets
self.purge_subnets = purge_subnets
self.scheme = scheme
self.connection_draining_timeout = connection_draining_timeout
self.cross_az_load_balancing = cross_az_load_balancing
self.stickiness = stickiness
self.aws_connect_params = aws_connect_params
self.region = region
self.changed = False
self.status = 'gone'
self.elb_conn = self._get_elb_connection()
self.elb = self._get_elb()
def ensure_ok(self):
"""Create the ELB"""
if not self.elb:
# Zones and listeners will be added at creation
self._create_elb()
else:
self._set_zones()
self._set_security_groups()
self._set_elb_listeners()
self._set_subnets()
self._set_health_check()
# boto has introduced support for some ELB attributes in
# different versions, so we check first before trying to
# set them to avoid errors
if self._check_attribute_support('connection_draining'):
self._set_connection_draining_timeout()
if self._check_attribute_support('cross_zone_load_balancing'):
self._set_cross_az_load_balancing()
# add sitcky options
self.select_stickiness_policy()
def ensure_gone(self):
"""Destroy the ELB"""
if self.elb:
self._delete_elb()
def get_info(self):
try:
check_elb = self.elb_conn.get_all_load_balancers(self.name)[0]
except:
check_elb = None
if not check_elb:
info = {
'name': self.name,
'status': self.status
}
else:
try:
lb_cookie_policy = check_elb.policies.lb_cookie_stickiness_policies[0].__dict__['policy_name']
except:
lb_cookie_policy = None
try:
app_cookie_policy = check_elb.policies.app_cookie_stickiness_policies[0].__dict__['policy_name']
except:
app_cookie_policy = None
info = {
'name': check_elb.name,
'dns_name': check_elb.dns_name,
'zones': check_elb.availability_zones,
'security_group_ids': check_elb.security_groups,
'status': self.status,
'subnets': self.subnets,
'scheme': check_elb.scheme,
'hosted_zone_name': check_elb.canonical_hosted_zone_name,
'hosted_zone_id': check_elb.canonical_hosted_zone_name_id,
'lb_cookie_policy': lb_cookie_policy,
'app_cookie_policy': app_cookie_policy
}
if check_elb.health_check:
info['health_check'] = {
'target': check_elb.health_check.target,
'interval': check_elb.health_check.interval,
'timeout': check_elb.health_check.timeout,
'healthy_threshold': check_elb.health_check.healthy_threshold,
'unhealthy_threshold': check_elb.health_check.unhealthy_threshold,
}
if check_elb.listeners:
info['listeners'] = [self._api_listener_as_tuple(l)
for l in check_elb.listeners]
elif self.status == 'created':
# When creating a new ELB, listeners don't show in the
# immediately returned result, so just include the
# ones that were added
info['listeners'] = [self._listener_as_tuple(l)
for l in self.listeners]
else:
info['listeners'] = []
if self._check_attribute_support('connection_draining'):
info['connection_draining_timeout'] = self.elb_conn.get_lb_attribute(self.name, 'ConnectionDraining').timeout
if self._check_attribute_support('cross_zone_load_balancing'):
is_cross_az_lb_enabled = self.elb_conn.get_lb_attribute(self.name, 'CrossZoneLoadBalancing')
if is_cross_az_lb_enabled:
info['cross_az_load_balancing'] = 'yes'
else:
info['cross_az_load_balancing'] = 'no'
# return stickiness info?
return info
def _get_elb(self):
elbs = self.elb_conn.get_all_load_balancers()
for elb in elbs:
if self.name == elb.name:
self.status = 'ok'
return elb
def _get_elb_connection(self):
try:
return connect_to_aws(boto.ec2.elb, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError), e:
self.module.fail_json(msg=str(e))
def _delete_elb(self):
# True if succeeds, exception raised if not
result = self.elb_conn.delete_load_balancer(name=self.name)
if result:
self.changed = True
self.status = 'deleted'
def _create_elb(self):
listeners = [self._listener_as_tuple(l) for l in self.listeners]
self.elb = self.elb_conn.create_load_balancer(name=self.name,
zones=self.zones,
security_groups=self.security_group_ids,
complex_listeners=listeners,
subnets=self.subnets,
scheme=self.scheme)
if self.elb:
self.changed = True
self.status = 'created'
def _create_elb_listeners(self, listeners):
"""Takes a list of listener tuples and creates them"""
# True if succeeds, exception raised if not
self.changed = self.elb_conn.create_load_balancer_listeners(self.name,
complex_listeners=listeners)
def _delete_elb_listeners(self, listeners):
"""Takes a list of listener tuples and deletes them from the elb"""
ports = [l[0] for l in listeners]
# True if succeeds, exception raised if not
self.changed = self.elb_conn.delete_load_balancer_listeners(self.name,
ports)
def _set_elb_listeners(self):
"""
Creates listeners specified by self.listeners; overwrites existing
listeners on these ports; removes extraneous listeners
"""
listeners_to_add = []
listeners_to_remove = []
listeners_to_keep = []
# Check for any listeners we need to create or overwrite
for listener in self.listeners:
listener_as_tuple = self._listener_as_tuple(listener)
# First we loop through existing listeners to see if one is
# already specified for this port
existing_listener_found = None
for existing_listener in self.elb.listeners:
# Since ELB allows only one listener on each incoming port, a
# single match on the incoming port is all we're looking for
if existing_listener[0] == listener['load_balancer_port']:
existing_listener_found = self._api_listener_as_tuple(existing_listener)
break
if existing_listener_found:
# Does it match exactly?
if listener_as_tuple != existing_listener_found:
# The ports are the same but something else is different,
# so we'll remove the existing one and add the new one
listeners_to_remove.append(existing_listener_found)
listeners_to_add.append(listener_as_tuple)
else:
# We already have this listener, so we're going to keep it
listeners_to_keep.append(existing_listener_found)
else:
# We didn't find an existing listener, so just add the new one
listeners_to_add.append(listener_as_tuple)
# Check for any extraneous listeners we need to remove, if desired
if self.purge_listeners:
for existing_listener in self.elb.listeners:
existing_listener_tuple = self._api_listener_as_tuple(existing_listener)
if existing_listener_tuple in listeners_to_remove:
# Already queued for removal
continue
if existing_listener_tuple in listeners_to_keep:
# Keep this one around
continue
# Since we're not already removing it and we don't need to keep
# it, let's get rid of it
listeners_to_remove.append(existing_listener_tuple)
if listeners_to_remove:
self._delete_elb_listeners(listeners_to_remove)
if listeners_to_add:
self._create_elb_listeners(listeners_to_add)
def _api_listener_as_tuple(self, listener):
"""Adds ssl_certificate_id to ELB API tuple if present"""
base_tuple = listener.get_complex_tuple()
if listener.ssl_certificate_id and len(base_tuple) < 5:
return base_tuple + (listener.ssl_certificate_id,)
return base_tuple
def _listener_as_tuple(self, listener):
"""Formats listener as a 4- or 5-tuples, in the order specified by the
ELB API"""
# N.B. string manipulations on protocols below (str(), upper()) is to
# ensure format matches output from ELB API
listener_list = [
listener['load_balancer_port'],
listener['instance_port'],
str(listener['protocol'].upper()),
]
# Instance protocol is not required by ELB API; it defaults to match
# load balancer protocol. We'll mimic that behavior here
if 'instance_protocol' in listener:
listener_list.append(str(listener['instance_protocol'].upper()))
else:
listener_list.append(str(listener['protocol'].upper()))
if 'ssl_certificate_id' in listener:
listener_list.append(str(listener['ssl_certificate_id']))
return tuple(listener_list)
def _enable_zones(self, zones):
try:
self.elb.enable_zones(zones)
except boto.exception.BotoServerError, e:
if "Invalid Availability Zone" in e.error_message:
self.module.fail_json(msg=e.error_message)
else:
self.module.fail_json(msg="an unknown server error occurred, please try again later")
self.changed = True
def _disable_zones(self, zones):
try:
self.elb.disable_zones(zones)
except boto.exception.BotoServerError, e:
if "Invalid Availability Zone" in e.error_message:
self.module.fail_json(msg=e.error_message)
else:
self.module.fail_json(msg="an unknown server error occurred, please try again later")
self.changed = True
def _attach_subnets(self, subnets):
self.elb_conn.attach_lb_to_subnets(self.name, subnets)
self.changed = True
def _detach_subnets(self, subnets):
self.elb_conn.detach_lb_from_subnets(self.name, subnets)
self.changed = True
def _set_subnets(self):
"""Determine which subnets need to be attached or detached on the ELB"""
if self.subnets:
if self.purge_subnets:
subnets_to_detach = list(set(self.elb.subnets) - set(self.subnets))
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
else:
subnets_to_detach = None
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
if subnets_to_attach:
self._attach_subnets(subnets_to_attach)
if subnets_to_detach:
self._detach_subnets(subnets_to_detach)
def _set_zones(self):
"""Determine which zones need to be enabled or disabled on the ELB"""
if self.zones:
if self.purge_zones:
zones_to_disable = list(set(self.elb.availability_zones) -
set(self.zones))
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
else:
zones_to_disable = None
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
if zones_to_enable:
self._enable_zones(zones_to_enable)
# N.B. This must come second, in case it would have removed all zones
if zones_to_disable:
self._disable_zones(zones_to_disable)
def _set_security_groups(self):
if self.security_group_ids != None and set(self.elb.security_groups) != set(self.security_group_ids):
self.elb_conn.apply_security_groups_to_lb(self.name, self.security_group_ids)
self.Changed = True
def _set_health_check(self):
"""Set health check values on ELB as needed"""
if self.health_check:
# This just makes it easier to compare each of the attributes
# and look for changes. Keys are attributes of the current
# health_check; values are desired values of new health_check
health_check_config = {
"target": self._get_health_check_target(),
"timeout": self.health_check['response_timeout'],
"interval": self.health_check['interval'],
"unhealthy_threshold": self.health_check['unhealthy_threshold'],
"healthy_threshold": self.health_check['healthy_threshold'],
}
update_health_check = False
# The health_check attribute is *not* set on newly created
# ELBs! So we have to create our own.
if not self.elb.health_check:
self.elb.health_check = HealthCheck()
for attr, desired_value in health_check_config.iteritems():
if getattr(self.elb.health_check, attr) != desired_value:
setattr(self.elb.health_check, attr, desired_value)
update_health_check = True
if update_health_check:
self.elb.configure_health_check(self.elb.health_check)
self.changed = True
def _check_attribute_support(self, attr):
return hasattr(boto.ec2.elb.attributes.LbAttributes(), attr)
def _set_cross_az_load_balancing(self):
attributes = self.elb.get_attributes()
if self.cross_az_load_balancing:
attributes.cross_zone_load_balancing.enabled = True
else:
attributes.cross_zone_load_balancing.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'CrossZoneLoadBalancing',
attributes.cross_zone_load_balancing.enabled)
def _set_connection_draining_timeout(self):
attributes = self.elb.get_attributes()
if self.connection_draining_timeout is not None:
attributes.connection_draining.enabled = True
attributes.connection_draining.timeout = self.connection_draining_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
else:
attributes.connection_draining.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
def _policy_name(self, policy_type):
return __file__.split('/')[-1].replace('_', '-') + '-' + policy_type
def _create_policy(self, policy_param, policy_meth, policy):
getattr(self.elb_conn, policy_meth )(policy_param, self.elb.name, policy)
def _delete_policy(self, elb_name, policy):
self.elb_conn.delete_lb_policy(elb_name, policy)
def _update_policy(self, policy_param, policy_meth, policy_attr, policy):
self._delete_policy(self.elb.name, policy)
self._create_policy(policy_param, policy_meth, policy)
def _set_listener_policy(self, listeners_dict, policy=[]):
for listener_port in listeners_dict:
if listeners_dict[listener_port].startswith('HTTP'):
self.elb_conn.set_lb_policies_of_listener(self.elb.name, listener_port, policy)
def _set_stickiness_policy(self, elb_info, listeners_dict, policy, **policy_attrs):
for p in getattr(elb_info.policies, policy_attrs['attr']):
if str(p.__dict__['policy_name']) == str(policy[0]):
if str(p.__dict__[policy_attrs['dict_key']]) != str(policy_attrs['param_value']):
self._set_listener_policy(listeners_dict)
self._update_policy(policy_attrs['param_value'], policy_attrs['method'], policy_attrs['attr'], policy[0])
self.changed = True
break
else:
self._create_policy(policy_attrs['param_value'], policy_attrs['method'], policy[0])
self.changed = True
self._set_listener_policy(listeners_dict, policy)
def select_stickiness_policy(self):
if self.stickiness:
if 'cookie' in self.stickiness and 'expiration' in self.stickiness:
self.module.fail_json(msg='\'cookie\' and \'expiration\' can not be set at the same time')
elb_info = self.elb_conn.get_all_load_balancers(self.elb.name)[0]
d = {}
for listener in elb_info.listeners:
d[listener[0]] = listener[2]
listeners_dict = d
if self.stickiness['type'] == 'loadbalancer':
policy = []
policy_type = 'LBCookieStickinessPolicyType'
if self.stickiness['enabled'] == True:
if 'expiration' not in self.stickiness:
self.module.fail_json(msg='expiration must be set when type is loadbalancer')
policy_attrs = {
'type': policy_type,
'attr': 'lb_cookie_stickiness_policies',
'method': 'create_lb_cookie_stickiness_policy',
'dict_key': 'cookie_expiration_period',
'param_value': self.stickiness['expiration']
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif self.stickiness['enabled'] == False:
if len(elb_info.policies.lb_cookie_stickiness_policies):
if elb_info.policies.lb_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
else:
self.changed = False
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
elif self.stickiness['type'] == 'application':
policy = []
policy_type = 'AppCookieStickinessPolicyType'
if self.stickiness['enabled'] == True:
if 'cookie' not in self.stickiness:
self.module.fail_json(msg='cookie must be set when type is application')
policy_attrs = {
'type': policy_type,
'attr': 'app_cookie_stickiness_policies',
'method': 'create_app_cookie_stickiness_policy',
'dict_key': 'cookie_name',
'param_value': self.stickiness['cookie']
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif self.stickiness['enabled'] == False:
if len(elb_info.policies.app_cookie_stickiness_policies):
if elb_info.policies.app_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
else:
self._set_listener_policy(listeners_dict)
def _get_health_check_target(self):
"""Compose target string from healthcheck parameters"""
protocol = self.health_check['ping_protocol'].upper()
path = ""
if protocol in ['HTTP', 'HTTPS'] and 'ping_path' in self.health_check:
path = self.health_check['ping_path']
return "%s:%s%s" % (protocol, self.health_check['ping_port'], path)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state={'required': True, 'choices': ['present', 'absent']},
name={'required': True},
listeners={'default': None, 'required': False, 'type': 'list'},
purge_listeners={'default': True, 'required': False, 'type': 'bool'},
zones={'default': None, 'required': False, 'type': 'list'},
purge_zones={'default': False, 'required': False, 'type': 'bool'},
security_group_ids={'default': None, 'required': False, 'type': 'list'},
health_check={'default': None, 'required': False, 'type': 'dict'},
subnets={'default': None, 'required': False, 'type': 'list'},
purge_subnets={'default': False, 'required': False, 'type': 'bool'},
scheme={'default': 'internet-facing', 'required': False},
connection_draining_timeout={'default': None, 'required': False},
cross_az_load_balancing={'default': None, 'required': False},
stickiness={'default': None, 'required': False, 'type': 'dict'}
)
)
module = AnsibleModule(
argument_spec=argument_spec,
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
name = module.params['name']
state = module.params['state']
listeners = module.params['listeners']
purge_listeners = module.params['purge_listeners']
zones = module.params['zones']
purge_zones = module.params['purge_zones']
security_group_ids = module.params['security_group_ids']
health_check = module.params['health_check']
subnets = module.params['subnets']
purge_subnets = module.params['purge_subnets']
scheme = module.params['scheme']
connection_draining_timeout = module.params['connection_draining_timeout']
cross_az_load_balancing = module.params['cross_az_load_balancing']
stickiness = module.params['stickiness']
if state == 'present' and not listeners:
module.fail_json(msg="At least one port is required for ELB creation")
if state == 'present' and not (zones or subnets):
module.fail_json(msg="At least one availability zone or subnet is required for ELB creation")
elb_man = ElbManager(module, name, listeners, purge_listeners, zones,
purge_zones, security_group_ids, health_check,
subnets, purge_subnets, scheme,
connection_draining_timeout, cross_az_load_balancing,
stickiness,
region=region, **aws_connect_params)
# check for unsupported attributes for this version of boto
if cross_az_load_balancing and not elb_man._check_attribute_support('cross_zone_load_balancing'):
module.fail_json(msg="You must install boto >= 2.18.0 to use the cross_az_load_balancing attribute")
if connection_draining_timeout and not elb_man._check_attribute_support('connection_draining'):
module.fail_json(msg="You must install boto >= 2.28.0 to use the connection_draining_timeout attribute")
if state == 'present':
elb_man.ensure_ok()
elif state == 'absent':
elb_man.ensure_gone()
ansible_facts = {'ec2_elb': 'info'}
ec2_facts_result = dict(changed=elb_man.changed,
elb=elb_man.get_info(),
ansible_facts=ansible_facts)
module.exit_json(**ec2_facts_result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 | -7,920,482,568,965,267,000 | 3,945,935,407,756,943,000 | 38.049596 | 152 | 0.592066 | false |
arista-eosplus/pyeapi | test/system/test_api_ospf.py | 1 | 9101 | #
# Copyright (c) 2016, Arista Networks, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of Arista Networks nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from random import randint
from systestlib import DutSystemTest
def clear_ospf_config(dut, pid=None):
if pid is None:
try:
pid = int(dut.get_config(params="section ospf")[0].split()[2])
dut.config(['no router ospf %d' % pid])
except IndexError:
'''No OSPF configured'''
pass
else:
dut.config(['no router ospf %d' % pid])
class TestApiOspf(DutSystemTest):
def test_get(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1", "router-id 1.1.1.1",
"network 2.2.2.0/24 area 0", "redistribute bgp"])
ospf_response = dut.api('ospf').get()
config = dict(router_id="1.1.1.1", ospf_process_id=1,
vrf='default',
networks=[dict(netmask='24', network="2.2.2.0",
area="0.0.0.0")],
redistributions=[dict(protocol="bgp")],
shutdown=False)
self.assertEqual(ospf_response, config)
def test_get_with_vrf(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 10 vrf test", "router-id 1.1.1.2",
"network 2.2.2.0/24 area 0", "redistribute bgp"])
ospf_response = dut.api('ospf').get()
config = dict(router_id="1.1.1.2", ospf_process_id=10, vrf='test',
networks=[dict(netmask='24', network="2.2.2.0",
area="0.0.0.0")],
redistributions=[dict(protocol="bgp")],
shutdown=False)
self.assertEqual(ospf_response, config)
clear_ospf_config(dut, 10)
def test_shutdown(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1", "network 1.1.1.1/32 area 0"])
ospf = dut.api('ospf')
response = ospf.set_shutdown()
self.assertTrue(response)
self.assertIn('shutdown', ospf.get_block("router ospf 1"))
def test_no_shutown(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 10", "network 1.1.1.0/24 area 0",
"shutdown"])
ospf = dut.api('ospf')
response = ospf.set_no_shutdown()
self.assertTrue(response)
self.assertIn('no shutdown', ospf.get_block("router ospf 10"))
def test_delete(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 10"])
ospf = dut.api("ospf")
response = ospf.delete()
self.assertTrue(response)
self.assertEqual(None, ospf.get_block("router ospf"))
def test_create_valid_id(self):
for dut in self.duts:
clear_ospf_config(dut)
pid = randint(1, 65536)
ospf = dut.api("ospf")
response = ospf.create(pid)
self.assertTrue(response)
self.assertIn("router ospf {}".format(pid), dut.get_config())
def test_create_invalid_id(self):
for dut in self.duts:
clear_ospf_config(dut)
pid = randint(70000, 100000)
with self.assertRaises(ValueError):
dut.api("ospf").create(pid)
def test_create_with_vrf(self):
for dut in self.duts:
clear_ospf_config(dut)
pid = randint(1, 65536)
ospf = dut.api("ospf")
response = ospf.create(pid, vrf='test')
self.assertTrue(response)
self.assertIn("router ospf {} vrf {}".format(pid, 'test'),
dut.get_config())
def test_configure_ospf(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1"])
ospf = dut.api("ospf")
response = ospf.configure_ospf("router-id 1.1.1.1")
self.assertTrue(response)
self.assertIn("router-id 1.1.1.1", ospf.get_block("router ospf 1"))
def test_set_router_id(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1"])
ospf = dut.api("ospf")
response = ospf.set_router_id(randint(1, 65536))
self.assertFalse(response)
response = ospf.set_router_id("2.2.2.2")
self.assertTrue(response)
self.assertIn("router-id 2.2.2.2", ospf.get_block("router ospf 1"))
response = ospf.set_router_id(default=True)
self.assertTrue(response)
self.assertIn("no router-id", ospf.get_block("router ospf 1"))
response = ospf.set_router_id(disable=True)
self.assertTrue(response)
self.assertIn("no router-id", ospf.get_block("router ospf 1"))
def test_add_network(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1"])
ospf = dut.api("ospf")
response = ospf.add_network("2.2.2.0", "24", 1234)
self.assertTrue(response)
self.assertIn("network 2.2.2.0/24 area 0.0.4.210", ospf.get_block("router ospf 1"))
response = ospf.add_network("10.10.10.0", "24")
self.assertTrue(response)
self.assertIn("network 10.10.10.0/24 area 0.0.0.0", ospf.get_block("router ospf 1"))
def test_remove_network(self):
for dut in self.duts:
clear_ospf_config(dut)
ospf_config = ["router ospf 1", "network 2.2.2.0/24 area 0.0.0.0",
"network 3.3.3.1/32 area 1.1.1.1"]
dut.config(ospf_config)
ospf = dut.api("ospf")
response = ospf.remove_network("2.2.2.0", "24")
self.assertTrue(response)
response = ospf.remove_network("3.3.3.1", "32", "1.1.1.1")
self.assertTrue(response)
for config in ospf_config:
if "router ospf" not in config:
self.assertNotIn(config, ospf.get_block("router ospf 1"))
def test_add_redistribution(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1"])
ospf = dut.api("ospf")
protos = ['bgp', 'rip', 'static', 'connected']
for proto in protos:
if randint(1, 10) % 2 == 0:
response = ospf.add_redistribution(proto, 'test')
else:
response = ospf.add_redistribution(proto)
self.assertTrue(response)
for proto in protos:
self.assertIn("redistribute {}".format(proto), ospf.get_block("router ospf 1"))
with self.assertRaises(ValueError):
ospf.add_redistribution("NOT VALID")
def test_remove_redistribution(self):
for dut in self.duts:
clear_ospf_config(dut)
dut.config(["router ospf 1", "redistribute bgp", "redistribute static route-map test"])
ospf = dut.api("ospf")
response = ospf.remove_redistribution('bgp')
self.assertTrue(response)
response = ospf.remove_redistribution('static')
self.assertTrue(response)
self.assertNotIn("redistribute", ospf.get_block("router ospf 1"))
| bsd-3-clause | 7,123,533,092,923,747,000 | -697,010,952,761,051,400 | 40.940092 | 99 | 0.572465 | false |
praekelt/txtalert | txtalert/apps/gateway/migrations/0002_auto__add_field_sendsms_group__add_field_pleasecallme_group.py | 1 | 4637 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# all previous data belongs to Temba Lethu Clinic
from django.contrib.auth.models import Group
group, created = Group.objects.get_or_create(name="Temba Lethu")
# Adding field 'SendSMS.group'
db.add_column('gateway_sendsms', 'group', self.gf('django.db.models.fields.related.ForeignKey')(default=group.pk, to=orm['auth.Group']), keep_default=False)
# Adding field 'PleaseCallMe.group'
db.add_column('gateway_pleasecallme', 'group', self.gf('django.db.models.fields.related.ForeignKey')(default=group.pk, related_name='gateway_pleasecallme_set', to=orm['auth.Group']), keep_default=False)
def backwards(self, orm):
# Deleting field 'SendSMS.group'
db.delete_column('gateway_sendsms', 'group_id')
# Deleting field 'PleaseCallMe.group'
db.delete_column('gateway_pleasecallme', 'group_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'gateway.pleasecallme': {
'Meta': {'ordering': "['created_at']", 'object_name': 'PleaseCallMe'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'gateway_pleasecallme_set'", 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'recipient_msisdn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sender_msisdn': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_id': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'gateway.sendsms': {
'Meta': {'object_name': 'SendSMS'},
'delivery': ('django.db.models.fields.DateTimeField', [], {}),
'delivery_timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'expiry': ('django.db.models.fields.DateTimeField', [], {}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'msisdn': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'priority': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'receipt': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'smstext': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'default': "'v'", 'max_length': '1'})
}
}
complete_apps = ['gateway']
| gpl-3.0 | 9,121,463,421,997,063,000 | -3,208,717,268,299,249,700 | 58.448718 | 210 | 0.569334 | false |
M4rtinK/pyside-android | tests/QtCore/qflags_test.py | 6 | 3405 | #!/usr/bin/python
'''Test cases for QFlags'''
import unittest
from PySide.QtCore import Qt, QTemporaryFile, QFile, QIODevice, QObject
class QFlagTest(unittest.TestCase):
'''Test case for usage of flags'''
def testCallFunction(self):
f = QTemporaryFile()
self.assertTrue(f.open())
fileName = f.fileName()
f.close()
f = QFile(fileName)
self.assertEqual(f.open(QIODevice.Truncate | QIODevice.Text | QIODevice.ReadWrite), True)
om = f.openMode()
self.assertEqual(om & QIODevice.Truncate, QIODevice.Truncate)
self.assertEqual(om & QIODevice.Text, QIODevice.Text)
self.assertEqual(om & QIODevice.ReadWrite, QIODevice.ReadWrite)
self.assertTrue(om == QIODevice.Truncate | QIODevice.Text | QIODevice.ReadWrite)
f.close()
class QFlagOperatorTest(unittest.TestCase):
'''Test case for operators in QFlags'''
def testInvert(self):
'''QFlags ~ (invert) operator'''
self.assertEqual(type(~QIODevice.ReadOnly), QIODevice.OpenMode)
def testOr(self):
'''QFlags | (or) operator'''
self.assertEqual(type(QIODevice.ReadOnly | QIODevice.WriteOnly), QIODevice.OpenMode)
def testAnd(self):
'''QFlags & (and) operator'''
self.assertEqual(type(QIODevice.ReadOnly & QIODevice.WriteOnly), QIODevice.OpenMode)
def testIOr(self):
'''QFlags |= (ior) operator'''
flag = Qt.WindowFlags()
self.assertTrue(Qt.Widget == 0)
self.assertFalse(flag & Qt.Widget)
result = flag & Qt.Widget
self.assertTrue(result == 0)
flag |= Qt.WindowMinimizeButtonHint
self.assertTrue(flag & Qt.WindowMinimizeButtonHint)
def testInvertOr(self):
'''QFlags ~ (invert) operator over the result of an | (or) operator'''
self.assertEqual(type(~(Qt.ItemIsSelectable | Qt.ItemIsEditable)), Qt.ItemFlags)
def testEqual(self):
'''QFlags == operator'''
flags = Qt.Window
flags |= Qt.WindowMinimizeButtonHint
flag_type = (flags & Qt.WindowType_Mask)
self.assertEqual(flag_type, Qt.Window)
self.assertEqual(Qt.KeyboardModifiers(Qt.ControlModifier), Qt.ControlModifier)
def testOperatorBetweenFlags(self):
'''QFlags & QFlags'''
flags = Qt.NoItemFlags | Qt.ItemIsUserCheckable
newflags = Qt.NoItemFlags | Qt.ItemIsUserCheckable
self.assertTrue(flags & newflags)
def testOperatorDifferentOrder(self):
'''Different ordering of arguments'''
flags = Qt.NoItemFlags | Qt.ItemIsUserCheckable
self.assertEqual(flags | Qt.ItemIsEnabled, Qt.ItemIsEnabled | flags)
class QFlagsOnQVariant(unittest.TestCase):
def testQFlagsOnQVariant(self):
o = QObject()
o.setProperty("foo", QIODevice.ReadOnly | QIODevice.WriteOnly)
self.assertEqual(type(o.property("foo")), QIODevice.OpenMode)
class QFlagsWrongType(unittest.TestCase):
def testWrongType(self):
'''Wrong type passed to QFlags binary operators'''
self.assertRaises(TypeError, Qt.NoItemFlags | '43')
self.assertRaises(TypeError, Qt.NoItemFlags & '43')
self.assertRaises(TypeError, 'jabba' & Qt.NoItemFlags)
self.assertRaises(TypeError, 'hut' & Qt.NoItemFlags)
self.assertRaises(TypeError, Qt.NoItemFlags & QObject())
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | 7,075,662,824,957,770,000 | -1,605,409,666,401,724,400 | 36.01087 | 97 | 0.66138 | false |
pombredanne/MOG | tools/regression_tester.py | 14 | 3537 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tool for checking if patch contains a regression test.
By default runs against current patch but can be set to use any gerrit review
as specified by change number (uses 'git review -d').
Idea: take tests from patch to check, and run against code from previous patch.
If new tests pass, then no regression test, if new tests fails against old code
then either
* new tests depend on new code and cannot confirm regression test is valid
(false positive)
* new tests detects the bug being fixed (detect valid regression test)
Due to the risk of false positives, the results from this need some human
interpretation.
"""
import optparse
import string
import subprocess
import sys
def run(cmd, fail_ok=False):
print "running: %s" % cmd
obj = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True)
obj.wait()
if obj.returncode != 0 and not fail_ok:
print "The above command terminated with an error."
sys.exit(obj.returncode)
return obj.stdout.read()
def main():
usage = """
Tool for checking if a patch includes a regression test.
Usage: %prog [options]"""
parser = optparse.OptionParser(usage)
parser.add_option("-r", "--review", dest="review",
help="gerrit review number to test")
(options, args) = parser.parse_args()
if options.review:
original_branch = run("git rev-parse --abbrev-ref HEAD")
run("git review -d %s" % options.review)
else:
print ("no gerrit review number specified, running on latest commit"
"on current branch.")
test_works = False
# run new tests with old code
run("git checkout HEAD^ nova")
run("git checkout HEAD nova/tests")
# identify which tests have changed
tests = run("git whatchanged --format=oneline -1 | grep \"nova/tests\" "
"| cut -f2").split()
test_list = []
for test in tests:
test_list.append(string.replace(test[0:-3], '/', '.'))
if test_list == []:
test_works = False
expect_failure = ""
else:
# run new tests, expect them to fail
expect_failure = run(("tox -epy27 %s 2>&1" % string.join(test_list)),
fail_ok=True)
if "FAILED (id=" in expect_failure:
test_works = True
# cleanup
run("git checkout HEAD nova")
if options.review:
new_branch = run("git status | head -1 | cut -d ' ' -f 4")
run("git checkout %s" % original_branch)
run("git branch -D %s" % new_branch)
print expect_failure
print ""
print "*******************************"
if test_works:
print "FOUND a regression test"
else:
print "NO regression test"
sys.exit(1)
if __name__ == "__main__":
main()
| apache-2.0 | -7,251,277,937,778,693,000 | -7,377,208,177,958,680,000 | 31.449541 | 79 | 0.635567 | false |
rghe/ansible | test/runner/lib/sanity/sanity_docs.py | 78 | 1312 | """Sanity test for documentation of sanity tests."""
from __future__ import absolute_import, print_function
import os
from lib.sanity import (
SanitySingleVersion,
SanityMessage,
SanityFailure,
SanitySuccess,
sanity_get_tests,
)
from lib.config import (
SanityConfig,
)
class SanityDocsTest(SanitySingleVersion):
"""Sanity test for documentation of sanity tests."""
# noinspection PyUnusedLocal
def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: TestResult
"""
sanity_dir = 'docs/docsite/rst/dev_guide/testing/sanity'
sanity_docs = set(part[0] for part in (os.path.splitext(name) for name in os.listdir(sanity_dir)) if part[1] == '.rst')
sanity_tests = set(sanity_test.name for sanity_test in sanity_get_tests())
missing = sanity_tests - sanity_docs
results = []
results += [SanityMessage(
message='missing docs for ansible-test sanity --test %s' % r,
path=os.path.join(sanity_dir, '%s.rst' % r),
) for r in sorted(missing)]
if results:
return SanityFailure(self.name, messages=results)
return SanitySuccess(self.name)
| gpl-3.0 | 4,808,937,239,580,606,000 | -7,638,433,165,672,497,000 | 28.818182 | 127 | 0.643293 | false |
DazWorrall/ansible | hacking/report.py | 46 | 6657 | #!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""A tool to aggregate data about Ansible source and testing into a sqlite DB for reporting."""
from __future__ import (absolute_import, print_function)
import argparse
import os
import requests
import sqlite3
import sys
DATABASE_PATH = os.path.expanduser('~/.ansible/report.db')
BASE_PATH = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) + '/'
ANSIBLE_PATH = os.path.join(BASE_PATH, 'lib')
ANSIBLE_TEST_PATH = os.path.join(BASE_PATH, 'test/runner')
if ANSIBLE_PATH not in sys.path:
sys.path.insert(0, ANSIBLE_PATH)
if ANSIBLE_TEST_PATH not in sys.path:
sys.path.insert(0, ANSIBLE_TEST_PATH)
from ansible.parsing.metadata import extract_metadata
from lib.target import walk_integration_targets
def main():
os.chdir(BASE_PATH)
args = parse_args()
args.func()
def parse_args():
try:
import argcomplete
except ImportError:
argcomplete = None
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(metavar='COMMAND')
subparsers.required = True # work-around for python 3 bug which makes subparsers optional
populate = subparsers.add_parser('populate',
help='populate report database')
populate.set_defaults(func=populate_database)
query = subparsers.add_parser('query',
help='query report database')
query.set_defaults(func=query_database)
if argcomplete:
argcomplete.autocomplete(parser)
args = parser.parse_args()
return args
def query_database():
if not os.path.exists(DATABASE_PATH):
exit('error: Database not found. Did you run `report.py populate` first?')
os.execvp('sqlite3', ('sqlite3', DATABASE_PATH))
def populate_database():
populate_modules()
populate_coverage()
populate_integration_targets()
def populate_modules():
module_dir = os.path.join(BASE_PATH, 'lib/ansible/modules/')
modules_rows = []
module_statuses_rows = []
for root, dir_names, file_names in os.walk(module_dir):
for file_name in file_names:
module, extension = os.path.splitext(file_name)
if module == '__init__' or extension != '.py':
continue
if module.startswith('_'):
module = module[1:]
namespace = os.path.join(root.replace(module_dir, '')).replace('/', '.')
path = os.path.join(root, file_name)
with open(path, 'rb') as module_fd:
module_data = module_fd.read()
result = extract_metadata(module_data=module_data)
metadata = result[0]
if not metadata:
if module == 'async_wrapper':
continue
raise Exception('no metadata for: %s' % path)
modules_rows.append(dict(
module=module,
namespace=namespace,
path=path.replace(BASE_PATH, ''),
supported_by=metadata['supported_by'],
))
for status in metadata['status']:
module_statuses_rows.append(dict(
module=module,
status=status,
))
populate_data(dict(
modules=dict(
rows=modules_rows,
schema=(
('module', 'TEXT'),
('namespace', 'TEXT'),
('path', 'TEXT'),
('supported_by', 'TEXT'),
)),
module_statuses=dict(
rows=module_statuses_rows,
schema=(
('module', 'TEXT'),
('status', 'TEXT'),
)),
))
def populate_coverage():
response = requests.get('https://codecov.io/api/gh/ansible/ansible/tree/devel/?src=extension')
data = response.json()
files = data['commit']['report']['files']
coverage_rows = []
for path, data in files.items():
report = data['t']
coverage_rows.append(dict(
path=path,
coverage=float(report['c']),
lines=report['n'],
hit=report['h'],
partial=report['p'],
missed=report['m'],
))
populate_data(dict(
coverage=dict(
rows=coverage_rows,
schema=(
('path', 'TEXT'),
('coverage', 'REAL'),
('lines', 'INTEGER'),
('hit', 'INTEGER'),
('partial', 'INTEGER'),
('missed', 'INTEGER'),
)),
))
def populate_integration_targets():
targets = list(walk_integration_targets())
integration_targets_rows = [dict(
target=target.name,
type=target.type,
path=target.path,
script_path=target.script_path,
) for target in targets]
integration_target_aliases_rows = [dict(
target=target.name,
alias=alias,
) for target in targets for alias in target.aliases]
integration_target_modules_rows = [dict(
target=target.name,
module=module,
) for target in targets for module in target.modules]
populate_data(dict(
integration_targets=dict(
rows=integration_targets_rows,
schema=(
('target', 'TEXT'),
('type', 'TEXT'),
('path', 'TEXT'),
('script_path', 'TEXT'),
)),
integration_target_aliases=dict(
rows=integration_target_aliases_rows,
schema=(
('target', 'TEXT'),
('alias', 'TEXT'),
)),
integration_target_modules=dict(
rows=integration_target_modules_rows,
schema=(
('target', 'TEXT'),
('module', 'TEXT'),
)),
))
def create_table(cursor, name, columns):
schema = ', '.join('%s %s' % column for column in columns)
cursor.execute('DROP TABLE IF EXISTS %s' % name)
cursor.execute('CREATE TABLE %s (%s)' % (name, schema))
def populate_table(cursor, rows, name, columns):
create_table(cursor, name, columns)
values = ', '.join([':%s' % column[0] for column in columns])
for row in rows:
cursor.execute('INSERT INTO %s VALUES (%s)' % (name, values), row)
def populate_data(data):
connection = sqlite3.connect(DATABASE_PATH)
cursor = connection.cursor()
for table in data:
populate_table(cursor, data[table]['rows'], table, data[table]['schema'])
connection.commit()
connection.close()
if __name__ == '__main__':
main()
| gpl-3.0 | 3,457,784,396,009,382,400 | 6,507,620,216,207,888,000 | 26.282787 | 98 | 0.553102 | false |
nohe427/developer-support | arcsde-sql/python/create-sdo-geometry-golden-ratio/golden.py | 10 | 2731 | """
In Oracle 12c, using the SDO Geometry type, this script will create the
polygons from the golden ratio spiral.
Authors:
Danny B
-- original concept and code
-- added and subtracted coordinates to generate new polygons
Ashley S
-- cleared up rough spots by using phi to create perfect squares
-- translated code to use SDO Geometry instead of arcpy geometry
Tested in Python 2.7 32 bit
"""
#Change these two parameters
uin ="golden" #Table name
connection ="connectionstring" #connection, i.e. "dataowner/dataowner@instance/sid
import cx_Oracle
print("Creating connection and cursor")
db = cx_Oracle.connect(connection)
cursor = db.cursor();
print("Creating table")
cursor.execute("""
CREATE TABLE {0}(
FID INTEGER GENERATED ALWAYS AS IDENTITY START WITH 1 INCREMENT BY 1,
SHAPE SDO_GEOMETRY)
""".format(uin))
m = 100
phi0 = (1 + 5 ** 0.5) /2
feature_info = [[[0,0], [0,1*m], [1*m, 1*m], [1*m, 0]]]
count = 1
exponent = 1
print("Doing some math")
for i in range(24):
phi = (1.0/(phi0**exponent))*m
a, b = feature_info[-1][2]
a_plus = a + phi
a_minus = a - phi
b_plus = b + phi
b_minus = b - phi
if count == 1:
coord = [[a, b], [a_plus, b], [a_plus, b_minus], [a, b_minus]]
elif count == 2:
coord = [[a, b], [a, b_minus], [a_minus, b_minus], [a_minus, b]]
elif count == 3:
coord = [[a, b], [a_minus, b], [a_minus, b_plus], [a, b_plus]]
else:
coord = [[a, b], [a, b_plus], [a_plus, b_plus], [a_plus, b]]
feature_info.append(coord)
count += 1
exponent += 1
if count == 5: count = 1
print("Inserting coordinates")
for coord in feature_info:
coord2 = "{0},{1}, {2},{3}, {4},{5}, {6},{7}, {0},{1}".format(
coord[0][0], coord[0][1], coord[1][0], coord[1][1], coord[2][0],
coord[2][1], coord[3][0], coord[3][1])
sdogeometry = """SDO_GEOMETRY(2003,NULL,NULL,SDO_ELEM_INFO_ARRAY(1,1003,1),SDO_ORDINATE_ARRAY({0}))""".format(coord2)
statement = "INSERT INTO {0} ( SHAPE ) VALUES ( {1} )".format(uin, sdogeometry)
cursor.execute(statement)
db.commit()
print("Adding to user_sdo_geom_metadata")
cursor.execute("""
INSERT INTO user_sdo_geom_metadata
(TABLE_NAME,
COLUMN_NAME,
DIMINFO,
SRID)
VALUES ('{0}', 'shape',
SDO_DIM_ARRAY( -- 600X600 grid
SDO_DIM_ELEMENT('X', 0, 200, 0.0000001),
SDO_DIM_ELEMENT('Y', 0, 200, 0.0000001)
),
NULL)
""".format(uin))
print("Making the spatial index")
cursor.execute("""CREATE INDEX {0}_spatial_idx ON {0}(SHAPE) INDEXTYPE IS MDSYS.SPATIAL_INDEX""".format(uin))
db.commit()
cursor.close()
db.close()
print("Check the table!")
| apache-2.0 | -5,798,033,944,900,678,000 | -3,184,114,078,785,048,600 | 25.77451 | 121 | 0.596851 | false |
plotly/plotly.py | packages/python/plotly/plotly/validators/mesh3d/__init__.py | 1 | 6255 | import sys
if sys.version_info < (3, 7):
from ._zsrc import ZsrcValidator
from ._zhoverformat import ZhoverformatValidator
from ._zcalendar import ZcalendarValidator
from ._z import ZValidator
from ._ysrc import YsrcValidator
from ._yhoverformat import YhoverformatValidator
from ._ycalendar import YcalendarValidator
from ._y import YValidator
from ._xsrc import XsrcValidator
from ._xhoverformat import XhoverformatValidator
from ._xcalendar import XcalendarValidator
from ._x import XValidator
from ._visible import VisibleValidator
from ._vertexcolorsrc import VertexcolorsrcValidator
from ._vertexcolor import VertexcolorValidator
from ._uirevision import UirevisionValidator
from ._uid import UidValidator
from ._textsrc import TextsrcValidator
from ._text import TextValidator
from ._stream import StreamValidator
from ._showscale import ShowscaleValidator
from ._showlegend import ShowlegendValidator
from ._scene import SceneValidator
from ._reversescale import ReversescaleValidator
from ._opacity import OpacityValidator
from ._name import NameValidator
from ._metasrc import MetasrcValidator
from ._meta import MetaValidator
from ._lightposition import LightpositionValidator
from ._lighting import LightingValidator
from ._legendrank import LegendrankValidator
from ._legendgrouptitle import LegendgrouptitleValidator
from ._legendgroup import LegendgroupValidator
from ._ksrc import KsrcValidator
from ._k import KValidator
from ._jsrc import JsrcValidator
from ._j import JValidator
from ._isrc import IsrcValidator
from ._intensitysrc import IntensitysrcValidator
from ._intensitymode import IntensitymodeValidator
from ._intensity import IntensityValidator
from ._idssrc import IdssrcValidator
from ._ids import IdsValidator
from ._i import IValidator
from ._hovertextsrc import HovertextsrcValidator
from ._hovertext import HovertextValidator
from ._hovertemplatesrc import HovertemplatesrcValidator
from ._hovertemplate import HovertemplateValidator
from ._hoverlabel import HoverlabelValidator
from ._hoverinfosrc import HoverinfosrcValidator
from ._hoverinfo import HoverinfoValidator
from ._flatshading import FlatshadingValidator
from ._facecolorsrc import FacecolorsrcValidator
from ._facecolor import FacecolorValidator
from ._delaunayaxis import DelaunayaxisValidator
from ._customdatasrc import CustomdatasrcValidator
from ._customdata import CustomdataValidator
from ._contour import ContourValidator
from ._colorscale import ColorscaleValidator
from ._colorbar import ColorbarValidator
from ._coloraxis import ColoraxisValidator
from ._color import ColorValidator
from ._cmin import CminValidator
from ._cmid import CmidValidator
from ._cmax import CmaxValidator
from ._cauto import CautoValidator
from ._autocolorscale import AutocolorscaleValidator
from ._alphahull import AlphahullValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._zsrc.ZsrcValidator",
"._zhoverformat.ZhoverformatValidator",
"._zcalendar.ZcalendarValidator",
"._z.ZValidator",
"._ysrc.YsrcValidator",
"._yhoverformat.YhoverformatValidator",
"._ycalendar.YcalendarValidator",
"._y.YValidator",
"._xsrc.XsrcValidator",
"._xhoverformat.XhoverformatValidator",
"._xcalendar.XcalendarValidator",
"._x.XValidator",
"._visible.VisibleValidator",
"._vertexcolorsrc.VertexcolorsrcValidator",
"._vertexcolor.VertexcolorValidator",
"._uirevision.UirevisionValidator",
"._uid.UidValidator",
"._textsrc.TextsrcValidator",
"._text.TextValidator",
"._stream.StreamValidator",
"._showscale.ShowscaleValidator",
"._showlegend.ShowlegendValidator",
"._scene.SceneValidator",
"._reversescale.ReversescaleValidator",
"._opacity.OpacityValidator",
"._name.NameValidator",
"._metasrc.MetasrcValidator",
"._meta.MetaValidator",
"._lightposition.LightpositionValidator",
"._lighting.LightingValidator",
"._legendrank.LegendrankValidator",
"._legendgrouptitle.LegendgrouptitleValidator",
"._legendgroup.LegendgroupValidator",
"._ksrc.KsrcValidator",
"._k.KValidator",
"._jsrc.JsrcValidator",
"._j.JValidator",
"._isrc.IsrcValidator",
"._intensitysrc.IntensitysrcValidator",
"._intensitymode.IntensitymodeValidator",
"._intensity.IntensityValidator",
"._idssrc.IdssrcValidator",
"._ids.IdsValidator",
"._i.IValidator",
"._hovertextsrc.HovertextsrcValidator",
"._hovertext.HovertextValidator",
"._hovertemplatesrc.HovertemplatesrcValidator",
"._hovertemplate.HovertemplateValidator",
"._hoverlabel.HoverlabelValidator",
"._hoverinfosrc.HoverinfosrcValidator",
"._hoverinfo.HoverinfoValidator",
"._flatshading.FlatshadingValidator",
"._facecolorsrc.FacecolorsrcValidator",
"._facecolor.FacecolorValidator",
"._delaunayaxis.DelaunayaxisValidator",
"._customdatasrc.CustomdatasrcValidator",
"._customdata.CustomdataValidator",
"._contour.ContourValidator",
"._colorscale.ColorscaleValidator",
"._colorbar.ColorbarValidator",
"._coloraxis.ColoraxisValidator",
"._color.ColorValidator",
"._cmin.CminValidator",
"._cmid.CmidValidator",
"._cmax.CmaxValidator",
"._cauto.CautoValidator",
"._autocolorscale.AutocolorscaleValidator",
"._alphahull.AlphahullValidator",
],
)
| mit | 4,557,576,266,053,295,000 | -7,207,701,649,580,732,000 | 41.263514 | 60 | 0.667306 | false |
lizardsystem/freq | freq/lizard_connector.py | 1 | 30266 | import copy
import datetime as dt
import json
import logging
from pprint import pprint # left here for debugging purposes
from time import time
from time import sleep
import urllib
import numpy as np
import django.core.exceptions
from freq import jsdatetime
try:
from django.conf import settings
USR, PWD = settings.USR, settings.PWD
except django.core.exceptions.ImproperlyConfigured:
print('WARNING: no USR and PWD found in settings. USR and PWD should have'
'been set beforehand')
USR = None
PWD = None
# When you use this script stand alone, please set your login information here:
# USR = ****** # Replace the stars with your user name.
# PWD = ****** # Replace the stars with your password.
logger = logging.getLogger(__name__)
def join_urls(*args):
return '/'.join(args)
class LizardApiError(Exception):
pass
class Base(object):
"""
Base class to connect to the different endpoints of the lizard-api.
:param data_type: endpoint of the lizard-api one wishes to connect to.
:param username: login username
:param password: login password
:param use_header: no login and password is send with the query when set
to False
:param extra_queries: In case one wishes to set default queries for a
certain data type this is the plase.
:param max_results:
"""
username = USR
password = PWD
max_results = 1000
@property
def extra_queries(self):
"""
Overwrite class to add queries
:return: dictionary with extra queries
"""
return {}
def organisation_query(self, organisation, add_query_string='location__'):
org_query = {}
if isinstance(organisation, str):
org_query.update({add_query_string + "organisation__unique_id":
organisation})
elif organisation:
org_query.update({
add_query_string + "organisation__unique_id": ','.join(
org for org in organisation)
})
if org_query:
return dict([urllib.parse.urlencode(org_query).split('=')])
else:
return {}
def __init__(self, base="https://ggmn.lizard.net", use_header=False,
data_type=None):
"""
:param base: the site one wishes to connect to. Defaults to the
Lizard ggmn production site.
"""
if data_type:
self.data_type = data_type
self.use_header = use_header
self.queries = {}
self.results = []
if base.startswith('http'):
self.base = base
else:
self.base = join_urls('https:/', base)
# without extra '/' ^^, this is added in join_urls
self.base_url = join_urls(self.base, 'api/v2', self.data_type) + '/'
def get(self, count=True, uuid=None, **queries):
"""
Query the api.
For possible queries see: https://nxt.staging.lizard.net/doc/api.html
Stores the api-response as a dict in the results attribute.
:param queries: all keyword arguments are used as queries.
:return: a dictionary of the api-response.
"""
if self.max_results:
queries.update({'page_size': self.max_results, 'format': 'json'})
queries.update(self.extra_queries)
queries.update(getattr(self, "queries", {}))
query = '?' + '&'.join(str(key) + '=' +
(('&' + str(key) + '=').join(value)
if isinstance(value, list) else str(value))
for key, value in queries.items())
url = urllib.parse.urljoin(self.base_url, str(uuid)) if uuid else \
self.base_url + query
try:
self.fetch(url)
except urllib.error.HTTPError: # TODO remove hack to prevent 420 error
self.json = {'results': [], 'count': 0}
try:
logger.debug('Number found %s : %s with URL: %s', self.data_type,
self.json.get('count', 0), url)
except (KeyError, AttributeError):
logger.debug('Got results from %s with URL: %s',
self.data_type, url)
self.parse()
return self.results
def fetch(self, url):
"""
GETs parameters from the api based on an url in a JSON format.
Stores the JSON response in the json attribute.
:param url: full query url: should be of the form:
[base_url]/api/v2/[endpoint]/?[query_key]=[query_value]&...
:return: the JSON from the response
"""
if self.use_header:
request_obj = urllib.request.Request(url, headers=self.header)
else:
request_obj = urllib.request.Request(url)
try:
with urllib.request.urlopen(request_obj) as resp:
encoding = resp.headers.get_content_charset()
encoding = encoding if encoding else 'UTF-8'
content = resp.read().decode(encoding)
self.json = json.loads(content)
except Exception:
logger.exception("got error from: %s", url)
raise
return self.json
def parse(self):
"""
Parse the json attribute and store it to the results attribute.
All pages of a query are parsed. If the max_results attribute is
exceeded an ApiError is raised.
"""
while True:
try:
if self.json['count'] > self.max_results:
raise LizardApiError(
'Too many results: {} found, while max {} are '
'accepted'.format(self.json['count'], self.max_results)
)
self.results += self.json['results']
next_url = self.json.get('next')
if next_url:
self.fetch(next_url)
else:
break
except KeyError:
self.results += [self.json]
break
except IndexError:
break
def parse_elements(self, element):
"""
Get a list of a certain element from the root of the results attribute.
:param element: the element you wish to get.
:return: A list of all elements in the root of the results attribute.
"""
self.parse()
return [x[element] for x in self.results]
@property
def header(self):
"""
The header with credentials for the api.
"""
if self.use_header:
return {
"username": self.username,
"password": self.password
}
return {}
class Organisations(Base):
"""
Makes a connection to the organisations endpoint of the lizard api.
"""
data_type = 'organisations'
def all(self, organisation=None):
"""
:return: a list of organisations belonging one has access to
(with the credentials from the header attribute)
"""
if organisation:
self.get(unique_id=organisation)
else:
self.get()
self.parse()
return self.parse_elements('unique_id')
class Locations(Base):
"""
Makes a connection to the locations endpoint of the lizard api.
"""
def __init__(self, base="https://ggmn.lizard.net", use_header=False):
self.data_type = 'locations'
self.uuids = []
super().__init__(base, use_header)
def bbox(self, south_west, north_east, organisation=None):
"""
Find all locations within a certain bounding box.
returns records within bounding box using Bounding Box format (min Lon,
min Lat, max Lon, max Lat). Also returns features with overlapping
geometry.
:param south_west: lattitude and longtitude of the south-western point
:param north_east: lattitude and longtitude of the north-eastern point
:return: a dictionary of the api-response.
"""
min_lat, min_lon = south_west
max_lat, max_lon = north_east
coords = self.commaify(min_lon, min_lat, max_lon, max_lat)
org_query = self.organisation_query(organisation, '')
self.get(in_bbox=coords, **org_query)
def distance_to_point(self, distance, lat, lon, organisation=None):
"""
Returns records with distance meters from point. Distance in meters
is converted to WGS84 degrees and thus an approximation.
:param distance: meters from point
:param lon: longtitude of point
:param lat: latitude of point
:return: a dictionary of the api-response.
"""
coords = self.commaify(lon, lat)
org_query = self.organisation_query(organisation, '')
self.get(distance=distance, point=coords, **org_query)
def commaify(self, *args):
"""
:return: a comma-seperated string of the given arguments
"""
return ','.join(str(x) for x in args)
def coord_uuid_name(self):
"""
Filters out the coordinates UUIDs and names of locations in results.
Use after a query is made.
:return: a dictionary with coordinates, UUIDs and names
"""
result = {}
for x in self.results:
if x['uuid'] not in self.uuids:
geom = x.get('geometry') or {}
result[x['uuid']] = {
'coordinates': geom.get(
'coordinates', ['','']),
'name': x['name']
}
self.uuids.append(x['uuid'])
return result
class TaskAPI(Base):
data_type = 'tasks'
def poll(self, url=None):
if url is None or not url.startswith('http'):
return
self.fetch(url)
@property
def status(self):
try:
logger.debug('Task status: %s', self.json.get("task_status"))
status = self.json.get("task_status")
if status is None:
logger.debug('Task status: NONE')
return "NONE"
return status
except AttributeError:
logger.debug('Task status: NONE')
return "NONE"
def timeseries_csv(self, organisation, extra_queries_ts):
if self.status != "SUCCESS":
raise LizardApiError('Download not ready.')
url = self.json.get("result_url")
self.fetch(url)
self.results = []
self.parse()
csv = (
[result['name'], result['uuid'],
jsdatetime.js_to_datestring(event['timestamp']), event['max']]
for result in self.results for event in result['events']
)
loc = Locations(use_header=self.use_header)
extra_queries = {
key if not key.startswith("location__") else key[10:]: value
for key, value in extra_queries_ts.items()
}
org_query = self.organisation_query(organisation, '')
extra_queries.update(**org_query)
loc.get(**extra_queries)
coords = loc.coord_uuid_name()
headers = (
[
r['uuid'], r['name'], coords[r['location']['uuid']]['name'],
coords[r['location']['uuid']]['coordinates'][0],
coords[r['location']['uuid']]['coordinates'][1]
]
for r in self.results
)
return headers, csv
class TimeSeries(Base):
"""
Makes a connection to the timeseries endpoint of the lizard api.
"""
def __init__(self, base="https://ggmn.lizard.net", use_header=False):
self.data_type = 'timeseries'
self.uuids = []
self.statistic = None
super().__init__(base, use_header)
def location_name(self, name, organisation=None):
"""
Returns time series metadata for a location by name.
:param name: name of a location
:return: a dictionary of with nested location, aquo quantities and
events.
"""
org_query = self.organisation_query(organisation)
return self.get(location__name=name, **org_query)
def location_uuid(self, loc_uuid, start='0001-01-01T00:00:00Z', end=None,
organisation=None):
"""
Returns time series for a location by location-UUID.
:param loc_uuid: name of a location
:param start: start timestamp in ISO 8601 format
:param end: end timestamp in ISO 8601 format, defaults to now
:return: a dictionary of with nested location, aquo quantities and
events.
"""
org_query = self.organisation_query(organisation)
self.get(location__uuid=loc_uuid, **org_query)
timeseries_uuids = [x['uuid'] for x in self.results]
self.results = []
for ts_uuid in timeseries_uuids:
ts = TimeSeries(self.base, use_header=self.use_header)
ts.uuid(ts_uuid, start, end, organisation)
self.results += ts.results
return self.results
def uuid(self, ts_uuid, start='0001-01-01T00:00:00Z', end=None,
organisation=None):
"""
Returns time series for a timeseries by timeseries-UUID.
:param ts_uuid: uuid of a timeseries
:param start: start timestamp in ISO 8601 format
:param end: end timestamp in ISO 8601 format
:return: a dictionary of with nested location, aquo quantities and
events.
"""
if not end:
end = jsdatetime.now_iso()
old_base_url = self.base_url
self.base_url += ts_uuid + "/"
org_query = self.organisation_query(organisation)
self.get(start=start, end=end, **org_query)
self.base_url = old_base_url
def start_csv_task(self, start='0001-01-01T00:00:00Z', end=None,
organisation=None):
if not end:
end = jsdatetime.now_iso()
if isinstance(start, int):
start -= 10000
if isinstance(end, int):
end += 10000
org_query = self.organisation_query(organisation)
poll_url = self.get(
start=start,
end=end,
async="true",
format="json",
**org_query
)[0]['url']
logger.debug("Async task url %s", poll_url)
return poll_url, self.extra_queries
def bbox(self, south_west, north_east, statistic=None,
start='0001-01-01T00:00:00Z', end=None, organisation=None):
"""
Find all timeseries within a certain bounding box.
Returns records within bounding box using Bounding Box format (min Lon,
min Lat, max Lon, max Lat). Also returns features with overlapping
geometry.
:param south_west: lattitude and longtitude of the south-western point
:param north_east: lattitude and longtitude of the north-eastern point
:param start_: start timestamp in ISO 8601 format
:param end: end timestamp in ISO 8601 format
:return: a dictionary of the api-response.
"""
if not end:
end = jsdatetime.now_iso()
if isinstance(start, int):
start -= 10000
if isinstance(end, int):
end += 10000
min_lat, min_lon = south_west
max_lat, max_lon = north_east
polygon_coordinates = [
[min_lon, min_lat],
[min_lon, max_lat],
[max_lon, max_lat],
[max_lon, min_lat],
[min_lon, min_lat],
]
points = [' '.join([str(x), str(y)]) for x, y in polygon_coordinates]
geom_within = {'a': 'POLYGON ((' + ', '.join(points) + '))'}
geom_within = urllib.parse.urlencode(geom_within).split('=')[1]
org_query = self.organisation_query(organisation)
self.statistic = statistic
if statistic == 'mean':
statistic = ['count', 'sum']
elif not statistic:
statistic = ['min', 'max', 'count', 'sum']
self.statistic = None
elif statistic == 'range (max - min)':
statistic = ['min', 'max']
elif statistic == 'difference (last - first)':
statistic = 'count'
elif statistic == 'difference (mean last - first year)':
year = dt.timedelta(days=366)
first_end = jsdatetime.datetime_to_js(jsdatetime.js_to_datetime(start) + year)
last_start = jsdatetime.datetime_to_js(jsdatetime.js_to_datetime(end) - year)
self.get(
start=start,
end=first_end,
min_points=1,
fields=['count', 'sum'],
location__geom_within=geom_within,
**org_query
)
first_year = {}
for r in self.results:
try:
first_year[r['location']['uuid']] = {
'first_value_timestamp': r['first_value_timestamp'],
'mean': r['events'][0]['sum'] / r['events'][0]['count']
}
except IndexError:
first_year[r['location']['uuid']] = {
'first_value_timestamp': np.nan,
'mean': np.nan
}
self.results = []
self.get(
start=last_start,
end=end,
min_points=1,
fields=['count', 'sum'],
location__geom_within=geom_within,
**org_query
)
for r in self.results:
try:
r['events'][0]['difference (mean last - first year)'] = \
r['events'][0]['sum'] / r['events'][0]['count'] - \
first_year[r['location']['uuid']]['mean']
r['first_value_timestamp'] = \
first_year[
r['location']['uuid']]['first_value_timestamp']
except IndexError:
r['events'] = [{
'difference (mean last - first year)': np.nan}]
r['first_value_timestamp'] = np.nan
r['last_value_timestamp'] = np.nan
return
self.get(
start=start,
end=end,
min_points=1,
fields=statistic,
location__geom_within=geom_within,
**org_query
)
def ts_to_dict(self, statistic=None, values=None,
start_date=None, end_date=None, date_time='js'):
"""
:param date_time: default: js. Several options:
'js': javascript integer datetime representation
'dt': python datetime object
'str': date in date format (dutch representation)
"""
if len(self.results) == 0:
self.response = {}
return self.response
if values:
values = values
else:
values = {}
if not statistic and self.statistic:
statistic = self.statistic
# np array with cols: 'min', 'max', 'sum', 'count', 'first', 'last'
if not statistic:
stats1 = ('min', 'max', 'sum', 'count')
stats2 = (
(0, 'min'),
(1, 'max'),
(2, 'mean'),
(3, 'range (max - min)'),
(4, 'difference (last - first)'),
(5, 'difference (mean last - first year)')
)
start_index = 6
else:
if statistic == 'mean':
stats1 = ('sum', 'count')
elif statistic == 'range (max - min)':
stats1 = ('min', 'max')
else:
stats1 = (statistic, )
stats2 = ((0, statistic), )
start_index = int(statistic == 'mean') + 1
ts = []
for result in self.results:
try:
timestamps = [int(result['first_value_timestamp']),
int(result['last_value_timestamp'])]
except (ValueError, TypeError):
timestamps = [np.nan, np.nan]
except TypeError:
# int(None)
timestamps = [np.nan, np.nan]
if not len(result['events']):
y = 2 if statistic == 'difference (mean last - first year)' \
else 0
ts.append(
[np.nan for _ in range(len(stats1) + y)] + timestamps)
else:
ts.append([float(result['events'][0][s]) for s in stats1] +
timestamps)
npts = np.array(ts)
if statistic:
if statistic == 'mean':
stat = (npts[:, 0] / npts[:, 1]).reshape(-1, 1)
elif statistic == 'range (max - min)':
stat = (npts[:, 1] - npts[:, 0]).reshape(-1, 1)
elif statistic == 'difference (last - first)':
stat = (npts[:, 1] - npts[:, 0]).reshape(-1, 1)
else:
stat = npts[:, 0].reshape(-1, 1)
npts_calculated = np.hstack(
(stat, npts[:, slice(start_index, -1)]))
else:
npts_calculated = np.hstack((
npts[:, 0:2],
(npts[:, 2] / npts[:, 3]).reshape(-1, 1),
(npts[:, 1] - npts[:, 0]).reshape(-1, 1),
npts[:, 4:]
))
for i, row in enumerate(npts_calculated):
location_uuid = self.results[i]['location']['uuid']
loc_dict = values.get(location_uuid, {})
loc_dict.update({stat: 'NaN' if np.isnan(row[i]) else row[i]
for i, stat in stats2})
loc_dict['timeseries_uuid'] = self.results[i]['uuid']
values[location_uuid] = loc_dict
npts_min = np.nanmin(npts_calculated, 0)
npts_max = np.nanmax(npts_calculated, 0)
extremes = {
stat: {
'min': npts_min[i] if not np.isnan(npts_min[i]) else 0,
'max': npts_max[i] if not np.isnan(npts_max[i]) else 0
} for i, stat in stats2
}
dt_conversion = {
'js': lambda x: x,
'dt': jsdatetime.js_to_datetime,
'str': jsdatetime.js_to_datestring
}[date_time]
if statistic != 'difference (mean last - first year)':
start = dt_conversion(max(jsdatetime.round_js_to_date(start_date),
jsdatetime.round_js_to_date(npts_min[-2])))
end = dt_conversion(min(jsdatetime.round_js_to_date(end_date),
jsdatetime.round_js_to_date(npts_max[-1])))
else:
start = dt_conversion(jsdatetime.round_js_to_date(start_date))
end = dt_conversion(jsdatetime.round_js_to_date(end_date))
self.response = {
"extremes": extremes,
"dates": {
"start": start,
"end": end
},
"values": values
}
return self.response
class GroundwaterLocations(Locations):
"""
Makes a connection to the locations endpoint of the lizard api.
Only selects GroundwaterStations.
"""
@property
def extra_queries(self):
return {
"object_type__model": 'filter'
}
class GroundwaterTimeSeries(TimeSeries):
"""
Makes a connection to the timeseries endpoint of the lizard api.
Only selects GroundwaterStations.
"""
@property
def extra_queries(self):
return {
"location__object_type__model": 'filter'
}
class GroundwaterTimeSeriesAndLocations(object):
def __init__(self):
self.locs = GroundwaterLocations()
self.ts = GroundwaterTimeSeries()
self.values = {}
def bbox(self, south_west, north_east, start='0001-01-01T00:00:00Z',
end=None, groundwater_type="GWmMSL"):
if not end:
self.end = jsdatetime.now_iso()
else:
self.end = end
self.start = start
self.ts.queries = {"name": groundwater_type}
self.locs.bbox(south_west, north_east)
self.ts.bbox(south_west=south_west, north_east=north_east,
start=start, end=self.end)
def locs_to_dict(self, values=None):
if values:
self.values = values
for loc in self.locs.results:
self.values.get(loc['uuid'], {}).update({
'coordinates': loc['geometry']['coordinates'],
'name': loc['name']
})
self.response = self.values
def results_to_dict(self):
self.locs_to_dict()
self.ts.ts_to_dict(values=self.values)
return self.ts.response
class RasterFeatureInfo(Base):
data_type = 'raster-aggregates'
def wms(self, lat, lng, layername, extra_params=None):
if 'igrac' in layername:
self.base_url = "https://raster.lizard.net/wms"
lat_f = float(lat)
lng_f = float(lng)
self.get(
request="getfeatureinfo",
layers=layername,
width=1,
height=1,
i=0,
j=0,
srs="epsg:4326",
bbox=','.join(
[lng, lat, str(lng_f+0.00001), str(lat_f+0.00001)]),
index="world"
)
try:
self.results = {"data": [self.results[1]]}
except IndexError:
self.results = {"data": ['null']}
elif layername == 'aquifers':
self.base_url = "https://ggis.un-igrac.org/geoserver/tbamap2015/wms"
extra_params.update({
'request': 'GetFeatureInfo',
'service': 'WMS',
'srs': 'EPSG:4326',
'info_format': 'application/json'
})
self.get(**extra_params)
self.results = {
'data': self.results['features'][0]['properties']['aq_name']}
else:
self.get(
agg='curve',
geom='POINT(' + lng + '+' + lat + ')',
srs='EPSG:4326',
raster_names=layername,
count=False
)
return self.results
def parse(self):
self.results = self.json
class RasterLimits(Base):
data_type = 'wms'
def __init__(self, base="https://raster.lizard.net",
use_header=False):
super().__init__(base, use_header)
self.base_url = join_urls(base, self.data_type)
self.max_results = None
def get_limits(self, layername, bbox):
try:
return self.get(
request='getlimits',
layers=layername,
bbox=bbox,
width=16,
height=16,
srs='epsg:4326'
)
except urllib.error.HTTPError:
return [[-1000, 1000]]
def parse(self):
self.results = self.json
class Filters(Base):
data_type = "filters"
def from_timeseries_uuid(self, uuid):
# We know the timeseries uuid. Timeseries are connected to locations
# and the locations are connected to the filters that contain the
# relevant information.
# first get the location uuid from the timeseries.
ts = Base(use_header=self.use_header, data_type='timeseries')
location_data = ts.get(uuid=uuid)[0]['location']
location_uuid = location_data.get('uuid')
# surface_level is stored in the extra_metadata field of a location
try:
surface_level = str(location_data.get("extra_metadata")
.get("surface_level")) + " (m)"
except AttributeError:
surface_level = None
# next get the location for the filter id
location = Base(use_header=self.use_header, data_type='locations')
try:
filter_id = location.get(uuid=location_uuid)[0].get(
'object').get('id')
except TypeError:
# the location doesn't connect to a filter, return empty
return {}
if filter_id:
# next get and return the filter metadata
gw_filter = Base(use_header=self.use_header, data_type='filters')
result = gw_filter.get(uuid=filter_id)[0]
result.update({
"surface_level": surface_level
})
return result
return {}
class Users(Base):
data_type = "users"
def get_organisations(self, username):
self.get(username=username)
if len(self.results) > 1 or len(self.results) == 0:
if len(self.results):
raise LizardApiError("Username is not unique")
raise LizardApiError("Username not found")
organisations_url = self.results[0].get("organisations_url")
organisations = {
org['name']: org['unique_id'] for org in
self.fetch(organisations_url)
}
logger.debug('Found %d organisations for url: %s', len(organisations),
organisations_url)
if settings.DEFAULT_ORGANISATION_NAME in organisations.keys():
default_org = [(
settings.DEFAULT_ORGANISATION_NAME,
organisations[settings.DEFAULT_ORGANISATION_NAME])
]
del organisations[settings.DEFAULT_ORGANISATION_NAME]
return default_org + sorted(organisations.items())
return sorted(organisations.items())
if __name__ == '__main__':
end = "1452470400000"
start = "-2208988800000"
start_time = time()
GWinfo = GroundwaterTimeSeriesAndLocations()
GWinfo.bbox(south_west=[-65.80277639340238, -223.9453125], north_east=[
81.46626086056541, 187.3828125], start=start, end=end)
x = GWinfo.results_to_dict()
print(time() - start_time)
pprint(x)
| gpl-3.0 | 3,718,812,504,835,255,300 | -7,798,761,446,702,416,000 | 34.988109 | 90 | 0.52858 | false |
steventimberman/masterDebater | env/lib/python2.7/site-packages/django/db/backends/base/operations.py | 44 | 23686 | import datetime
import decimal
import warnings
from importlib import import_module
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends import utils
from django.utils import six, timezone
from django.utils.dateparse import parse_duration
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
class BaseDatabaseOperations(object):
"""
This class encapsulates all backend-specific differences, such as the way
a backend performs ordering or calculates the ID of a recently-inserted
row.
"""
compiler_module = "django.db.models.sql.compiler"
# Integer field safe ranges by `internal_type` as documented
# in docs/ref/models/fields.txt.
integer_field_ranges = {
'SmallIntegerField': (-32768, 32767),
'IntegerField': (-2147483648, 2147483647),
'BigIntegerField': (-9223372036854775808, 9223372036854775807),
'PositiveSmallIntegerField': (0, 32767),
'PositiveIntegerField': (0, 2147483647),
}
set_operators = {
'union': 'UNION',
'intersection': 'INTERSECT',
'difference': 'EXCEPT',
}
def __init__(self, connection):
self.connection = connection
self._cache = None
def autoinc_sql(self, table, column):
"""
Returns any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def bulk_batch_size(self, fields, objs):
"""
Returns the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs)
def cache_key_culling_sql(self):
"""
Returns an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s"
def unification_cast_sql(self, output_field):
"""
Given a field instance, returns the SQL necessary to cast the result of
a union to that type. Note that the resulting string should contain a
'%s' placeholder for the expression being cast.
"""
return '%s'
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method')
def date_interval_sql(self, timedelta):
"""
Implements the date interval functionality for expressions
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_interval_sql() method')
def date_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
truncates the given date field field_name to a date object with only
the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetrunc_sql() method')
def datetime_cast_date_sql(self, field_name, tzname):
"""
Returns the SQL necessary to cast a datetime value to date value.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_date() method')
def datetime_cast_time_sql(self, field_name, tzname):
"""
Returns the SQL necessary to cast a datetime value to time value.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method')
def datetime_extract_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
'second', returns the SQL that extracts a value from the given
datetime field field_name, and a tuple of parameters.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method')
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute' or
'second', returns the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity, and
a tuple of parameters.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunk_sql() method')
def time_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute' or 'second', returns the SQL
that truncates the given time field field_name to a time object with
only the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a time_trunc_sql() method')
def time_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute' or 'second', returns the SQL
that extracts a value from the given time field field_name.
"""
return self.date_extract_sql(lookup_type, field_name)
def deferrable_sql(self):
"""
Returns the SQL necessary to make a constraint "initially deferred"
during a CREATE TABLE statement.
"""
return ''
def distinct_sql(self, fields):
"""
Returns an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only the given fields are being
checked for duplicates.
"""
if fields:
raise NotImplementedError('DISTINCT ON fields is not supported by this database backend')
else:
return 'DISTINCT'
def fetch_returned_insert_id(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table that has an auto-incrementing ID, returns the
newly created ID.
"""
return cursor.fetchone()[0]
def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR'), and an internal type
(e.g. 'GenericIPAddressField'), returns the SQL necessary to cast it
before using it in a WHERE statement. Note that the resulting string
should contain a '%s' placeholder for the column being searched against.
"""
return '%s'
def force_no_ordering(self):
"""
Returns a list used in the "ORDER BY" clause to force no ordering at
all. Returning an empty list means that nothing will be included in the
ordering.
"""
return []
def for_update_sql(self, nowait=False, skip_locked=False):
"""
Returns the FOR UPDATE SQL clause to lock rows for an update operation.
"""
if nowait:
return 'FOR UPDATE NOWAIT'
elif skip_locked:
return 'FOR UPDATE SKIP LOCKED'
else:
return 'FOR UPDATE'
def fulltext_search_sql(self, field_name):
"""
Returns the SQL WHERE clause to use in order to perform a full-text
search of the given field_name. Note that the resulting string should
contain a '%s' placeholder for the value being searched against.
"""
# RemovedInDjango20Warning
raise NotImplementedError('Full-text search is not implemented for this database backend')
def last_executed_query(self, cursor, sql, params):
"""
Returns a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders, and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain Unicode values.
def to_unicode(s):
return force_text(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple(to_unicode(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_unicode(k): to_unicode(v) for k, v in params.items()}
return six.text_type("QUERY = %r - PARAMS = %r") % (sql, u_params)
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, returns the newly created ID.
This method also receives the table name and the name of the primary-key
column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type, internal_type=None):
"""
Returns the string to use in a query when performing lookups
("contains", "like", etc.). The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Returns the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Returns the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Returns the value to use for the LIMIT when we are wanting "LIMIT
infinity". Returns None if the limit clause can be omitted in this case.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method')
def pk_default_value(self):
"""
Returns the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT'
def prepare_sql_script(self, sql):
"""
Takes an SQL script that may contain multiple lines and returns a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
try:
import sqlparse
except ImportError:
raise ImproperlyConfigured(
"sqlparse is required if you don't split your SQL "
"statements manually."
)
else:
return [sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql) if statement]
def process_clob(self, value):
"""
Returns the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_id(self):
"""
For backends that support returning the last insert ID as part
of an insert query, this method returns the SQL and params to
append to the INSERT query. The returned fragment should
contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Returns the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Returns a quoted version of the given table, index or column name. Does
not quote the given name if it's already been quoted.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method')
def random_function_sql(self):
"""
Returns an SQL expression that returns a random value.
"""
return 'RANDOM()'
def regex_lookup(self, lookup_type):
"""
Returns the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). The resulting string should
contain a '%s' placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), a
NotImplementedError exception can be raised.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method')
def savepoint_create_sql(self, sid):
"""
Returns the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid)
def savepoint_commit_sql(self, sid):
"""
Returns the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid)
def savepoint_rollback_sql(self, sid):
"""
Returns the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid)
def set_time_zone_sql(self):
"""
Returns the SQL that will set the connection's time zone.
Returns '' if the backend doesn't support time zones.
"""
return ''
def sql_flush(self, style, tables, sequences, allow_cascade=False):
"""
Returns a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The returned value also includes SQL statements required to reset DB
sequences passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide an sql_flush() method')
def sequence_reset_by_name_sql(self, style, sequences):
"""
Returns a list of the SQL statements required to reset sequences
passed in :param sequences:.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return []
def sequence_reset_sql(self, style, model_list):
"""
Returns a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""
Returns the SQL statement required to start a transaction.
"""
return "BEGIN;"
def end_transaction_sql(self, success=True):
"""
Returns the SQL statement required to end a transaction.
"""
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Returns the SQL that will be used in a query to define the tablespace.
Returns '' if the backend doesn't support tablespaces.
If inline is True, the SQL is appended to a row; otherwise it's appended
to the entire CREATE TABLE or CREATE INDEX statement.
"""
return ''
def prep_for_like_query(self, x):
"""Prepares a value for use in a LIKE query."""
return force_text(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). This method will raise a ValueError
if the value is invalid, otherwise returns validated value.
"""
return value
def adapt_unknown_value(self, value):
"""
Transforms a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
"""
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time):
return self.adapt_timefield_value(value)
elif isinstance(value, decimal.Decimal):
return self.adapt_decimalfield_value(value)
else:
return value
def adapt_datefield_value(self, value):
"""
Transforms a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return six.text_type(value)
def adapt_datetimefield_value(self, value):
"""
Transforms a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return six.text_type(value)
def adapt_timefield_value(self, value):
"""
Transforms a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return six.text_type(value)
def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
"""
Transforms a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places)
def adapt_ipaddressfield_value(self, value):
"""
Transforms a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None
def year_lookup_bounds_for_date_field(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
first = self.adapt_datefield_value(first)
second = self.adapt_datefield_value(second)
return [first, second]
def year_lookup_bounds_for_datetime_field(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
first = self.adapt_datetimefield_value(first)
second = self.adapt_datetimefield_value(second)
return [first, second]
def get_db_converters(self, expression):
"""
Get a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
"""
return []
def convert_durationfield_value(self, value, expression, connection, context):
if value is not None:
value = str(decimal.Decimal(value) / decimal.Decimal(1000000))
value = parse_duration(value)
return value
def check_aggregate_support(self, aggregate_func):
warnings.warn(
"check_aggregate_support has been deprecated. Use "
"check_expression_support instead.",
RemovedInDjango20Warning, stacklevel=2)
return self.check_expression_support(aggregate_func)
def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotImplementedError.
"""
pass
def combine_expression(self, connector, sub_expressions):
"""Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions)
"""
conn = ' %s ' % connector
return conn.join(sub_expressions)
def combine_duration_expression(self, connector, sub_expressions):
return self.combine_expression(connector, sub_expressions)
def binary_placeholder_sql(self, value):
"""
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
"""
return '%s'
def modify_insert_params(self, placeholder, params):
"""Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params
def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
returns a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type]
def subtract_temporals(self, internal_type, lhs, rhs):
if self.connection.features.supports_temporal_subtraction:
lhs_sql, lhs_params = lhs
rhs_sql, rhs_params = rhs
return "(%s - %s)" % (lhs_sql, rhs_sql), lhs_params + rhs_params
raise NotImplementedError("This backend does not support %s subtraction." % internal_type)
| mit | -8,527,020,948,276,633,000 | 6,370,154,403,617,459,000 | 37.513821 | 119 | 0.636832 | false |
lucashmorais/x-Bench | mozmill-env/python/Lib/site-packages/mercurial/sshpeer.py | 90 | 7356 | # sshpeer.py - ssh repository proxy class for mercurial
#
# Copyright 2005, 2006 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import re
from i18n import _
import util, error, wireproto
class remotelock(object):
def __init__(self, repo):
self.repo = repo
def release(self):
self.repo.unlock()
self.repo = None
def __del__(self):
if self.repo:
self.release()
def _serverquote(s):
'''quote a string for the remote shell ... which we assume is sh'''
if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
return s
return "'%s'" % s.replace("'", "'\\''")
class sshpeer(wireproto.wirepeer):
def __init__(self, ui, path, create=False):
self._url = path
self.ui = ui
self.pipeo = self.pipei = self.pipee = None
u = util.url(path, parsequery=False, parsefragment=False)
if u.scheme != 'ssh' or not u.host or u.path is None:
self._abort(error.RepoError(_("couldn't parse location %s") % path))
self.user = u.user
if u.passwd is not None:
self._abort(error.RepoError(_("password in URL not supported")))
self.host = u.host
self.port = u.port
self.path = u.path or "."
sshcmd = self.ui.config("ui", "ssh", "ssh")
remotecmd = self.ui.config("ui", "remotecmd", "hg")
args = util.sshargs(sshcmd, self.host, self.user, self.port)
if create:
cmd = '%s %s %s' % (sshcmd, args,
util.shellquote("%s init %s" %
(_serverquote(remotecmd), _serverquote(self.path))))
ui.note(_('running %s\n') % cmd)
res = util.system(cmd)
if res != 0:
self._abort(error.RepoError(_("could not create remote repo")))
self.validate_repo(ui, sshcmd, args, remotecmd)
def url(self):
return self._url
def validate_repo(self, ui, sshcmd, args, remotecmd):
# cleanup up previous run
self.cleanup()
cmd = '%s %s %s' % (sshcmd, args,
util.shellquote("%s -R %s serve --stdio" %
(_serverquote(remotecmd), _serverquote(self.path))))
ui.note(_('running %s\n') % cmd)
cmd = util.quotecommand(cmd)
# while self.subprocess isn't used, having it allows the subprocess to
# to clean up correctly later
self.pipeo, self.pipei, self.pipee, self.subprocess = util.popen4(cmd)
# skip any noise generated by remote shell
self._callstream("hello")
r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
lines = ["", "dummy"]
max_noise = 500
while lines[-1] and max_noise:
l = r.readline()
self.readerr()
if lines[-1] == "1\n" and l == "\n":
break
if l:
ui.debug("remote: ", l)
lines.append(l)
max_noise -= 1
else:
self._abort(error.RepoError(_('no suitable response from '
'remote hg')))
self._caps = set()
for l in reversed(lines):
if l.startswith("capabilities:"):
self._caps.update(l[:-1].split(":")[1].split())
break
def _capabilities(self):
return self._caps
def readerr(self):
while True:
size = util.fstat(self.pipee).st_size
if size == 0:
break
s = self.pipee.read(size)
if not s:
break
for l in s.splitlines():
self.ui.status(_("remote: "), l, '\n')
def _abort(self, exception):
self.cleanup()
raise exception
def cleanup(self):
if self.pipeo is None:
return
self.pipeo.close()
self.pipei.close()
try:
# read the error descriptor until EOF
for l in self.pipee:
self.ui.status(_("remote: "), l)
except (IOError, ValueError):
pass
self.pipee.close()
__del__ = cleanup
def _callstream(self, cmd, **args):
self.ui.debug("sending %s command\n" % cmd)
self.pipeo.write("%s\n" % cmd)
_func, names = wireproto.commands[cmd]
keys = names.split()
wireargs = {}
for k in keys:
if k == '*':
wireargs['*'] = args
break
else:
wireargs[k] = args[k]
del args[k]
for k, v in sorted(wireargs.iteritems()):
self.pipeo.write("%s %d\n" % (k, len(v)))
if isinstance(v, dict):
for dk, dv in v.iteritems():
self.pipeo.write("%s %d\n" % (dk, len(dv)))
self.pipeo.write(dv)
else:
self.pipeo.write(v)
self.pipeo.flush()
return self.pipei
def _call(self, cmd, **args):
self._callstream(cmd, **args)
return self._recv()
def _callpush(self, cmd, fp, **args):
r = self._call(cmd, **args)
if r:
return '', r
while True:
d = fp.read(4096)
if not d:
break
self._send(d)
self._send("", flush=True)
r = self._recv()
if r:
return '', r
return self._recv(), ''
def _decompress(self, stream):
return stream
def _recv(self):
l = self.pipei.readline()
if l == '\n':
err = []
while True:
line = self.pipee.readline()
if line == '-\n':
break
err.extend([line])
if len(err) > 0:
# strip the trailing newline added to the last line server-side
err[-1] = err[-1][:-1]
self._abort(error.OutOfBandError(*err))
self.readerr()
try:
l = int(l)
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), l))
return self.pipei.read(l)
def _send(self, data, flush=False):
self.pipeo.write("%d\n" % len(data))
if data:
self.pipeo.write(data)
if flush:
self.pipeo.flush()
self.readerr()
def lock(self):
self._call("lock")
return remotelock(self)
def unlock(self):
self._call("unlock")
def addchangegroup(self, cg, source, url, lock=None):
'''Send a changegroup to the remote server. Return an integer
similar to unbundle(). DEPRECATED, since it requires locking the
remote.'''
d = self._call("addchangegroup")
if d:
self._abort(error.RepoError(_("push refused: %s") % d))
while True:
d = cg.read(4096)
if not d:
break
self.pipeo.write(d)
self.readerr()
self.pipeo.flush()
self.readerr()
r = self._recv()
if not r:
return 1
try:
return int(r)
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), r))
instance = sshpeer
| mit | 1,967,027,304,342,717,000 | -7,676,095,304,471,082,000 | 29.396694 | 80 | 0.498777 | false |
mne-tools/mne-python | mne/preprocessing/realign.py | 1 | 4237 | # -*- coding: utf-8 -*-
# Authors: Eric Larson <[email protected]>
# License: BSD (3-clause)
import numpy as np
from numpy.polynomial.polynomial import Polynomial
from ..io import BaseRaw
from ..utils import _validate_type, warn, logger, verbose
@verbose
def realign_raw(raw, other, t_raw, t_other, verbose=None):
"""Realign two simultaneous recordings.
Due to clock drift, recordings at a given same sample rate made by two
separate devices simultaneously can become out of sync over time. This
function uses event times captured by both acquisition devices to resample
``other`` to match ``raw``.
Parameters
----------
raw : instance of Raw
The first raw instance.
other : instance of Raw
The second raw instance. It will be resampled to match ``raw``.
t_raw : array-like, shape (n_events,)
The times of shared events in ``raw`` relative to ``raw.times[0]`` (0).
Typically these could be events on some TTL channel like
``find_events(raw)[:, 0] - raw.first_event``.
t_other : array-like, shape (n_events,)
The times of shared events in ``other`` relative to ``other.times[0]``.
%(verbose)s
Notes
-----
This function operates inplace. It will:
1. Estimate the zero-order (start offset) and first-order (clock drift)
correction.
2. Crop the start of ``raw`` or ``other``, depending on which started
recording first.
3. Resample ``other`` to match ``raw`` based on the clock drift.
4. Crop the end of ``raw`` or ``other``, depending on which stopped
recording first (and the clock drift rate).
This function is primarily designed to work on recordings made at the same
sample rate, but it can also operate on recordings made at different
sample rates to resample and deal with clock drift simultaneously.
.. versionadded:: 0.22
"""
from scipy import stats
_validate_type(raw, BaseRaw, 'raw')
_validate_type(other, BaseRaw, 'other')
t_raw = np.array(t_raw, float)
t_other = np.array(t_other, float)
if t_raw.ndim != 1 or t_raw.shape != t_other.shape:
raise ValueError('t_raw and t_other must be 1D with the same shape, '
f'got shapes {t_raw.shape} and {t_other.shape}')
if len(t_raw) < 20:
warn('Fewer than 20 times passed, results may be unreliable')
# 1. Compute correction factors
poly = Polynomial.fit(x=t_other, y=t_raw, deg=1)
converted = poly.convert(domain=(-1, 1))
[zero_ord, first_ord] = converted.coef
logger.info(f'Zero order coefficient: {zero_ord} \n'
f'First order coefficient: {first_ord}')
r, p = stats.pearsonr(t_other, t_raw)
msg = f'Linear correlation computed as R={r:0.3f} and p={p:0.2e}'
if p > 0.05 or r <= 0:
raise ValueError(msg + ', cannot resample safely')
if p > 1e-6:
warn(msg + ', results may be unreliable')
else:
logger.info(msg)
dr_ms_s = 1000 * abs(1 - first_ord)
logger.info(
f'Drift rate: {1000 * dr_ms_s:0.1f} μs/sec '
f'(total drift over {raw.times[-1]:0.1f} sec recording: '
f'{raw.times[-1] * dr_ms_s:0.1f} ms)')
# 2. Crop start of recordings to match using the zero-order term
msg = f'Cropping {zero_ord:0.3f} sec from the start of '
if zero_ord > 0: # need to crop start of raw to match other
logger.info(msg + 'raw')
raw.crop(zero_ord, None)
t_raw -= zero_ord
else: # need to crop start of other to match raw
logger.info(msg + 'other')
other.crop(-zero_ord, None)
t_other += zero_ord
# 3. Resample data using the first-order term
logger.info('Resampling other')
sfreq_new = raw.info['sfreq'] * first_ord
other.load_data().resample(sfreq_new, verbose=True)
other.info['sfreq'] = raw.info['sfreq']
# 4. Crop the end of one of the recordings if necessary
delta = raw.times[-1] - other.times[-1]
msg = f'Cropping {abs(delta):0.3f} sec from the end of '
if delta > 0:
logger.info(msg + 'raw')
raw.crop(0, other.times[-1])
elif delta < 0:
logger.info(msg + 'other')
other.crop(0, raw.times[-1])
| bsd-3-clause | 6,564,557,355,319,484,000 | 8,894,050,561,187,564,000 | 37.509091 | 79 | 0.627007 | false |
asmacdo/pulp-automation | tests/general_tests/test_06_roles.py | 2 | 6147 | import unittest, json
from tests import pulp_test
from pulp_auto import Pulp
from pulp_auto.role import Role
from pulp_auto.user import User
from pulp_auto.repo import Repo
def setUpModule():
pass
class RoleTest(pulp_test.PulpTest):
@classmethod
def setUpClass(cls):
super(RoleTest, cls).setUpClass()
# create roles
with cls.pulp.asserting(True):
response = Role.create(cls.pulp, data={'role_id': cls.__name__ + "_role"})
cls.role = Role.from_response(response)
with cls.pulp.asserting(True):
response2 = Role.create(cls.pulp, data={'role_id': cls.__name__ + "_role2"})
cls.role2 = Role.from_response(response2)
with cls.pulp.asserting(True):
response3 = Role.create(cls.pulp, data={'role_id': cls.__name__ + "_role3"})
cls.role3 = Role.from_response(response3)
# users
cls.user = User(data={"login": cls.__name__ + "_user", "name": cls.__name__, "password": cls.__name__})
cls.user2 = User(data={"login": cls.__name__ + "_user2", "name": cls.__name__, "password": cls.__name__})
# a new session has to be created for the user as auth credeantials of admin are used by default
cls.user_pulp = Pulp(cls.pulp.url, auth=(cls.user.data['login'], cls.user.data['password']))
cls.user_pulp2 = Pulp(cls.pulp.url, auth=(cls.user2.data['login'], cls.user2.data['password']))
@classmethod
def tearDownClass(cls):
# delete users
with cls.pulp.asserting(True):
cls.user.delete(cls.pulp)
with cls.pulp.asserting(True):
cls.user2.delete(cls.pulp)
# delete roles
with cls.pulp.asserting(True):
cls.role2.delete(cls.pulp)
class SimpleRoleTest(RoleTest):
def test_01_no_dupl_role(self):
Role.create(self.pulp, data={'role_id': self.role.id})
self.assertPulp(code=409)
def test_02_get_role(self):
self.assertEqual(self.role, Role.get(self.pulp, self.role.id))
self.assertEqual(self.role2, Role.get(self.pulp, self.role2.id))
def test_03_get_unexistant_role(self):
with self.assertRaises(AssertionError):
Role.get(self.pulp, 'some_id')
self.assertPulp(code=404)
def test_04_list_roles(self):
self.assertIn(self.role, Role.list(self.pulp))
self.assertIn(self.role2, Role.list(self.pulp))
def test_05_update_role(self):
display_name = 'A %s role' % self.__class__.__name__
self.role |= {'display_name': display_name}
self.role.delta_update(self.pulp)
self.assertPulp(code=200)
self.assertEqual(Role.get(self.pulp, self.role.id).data['display_name'], display_name)
def test_05_update_role_permission_bz1066040(self):
# https://bugzilla.redhat.com/show_bug.cgi?id=1066040
self.role.data["permissions"] = {"/":["CREATE","DELETE"]}
self.role.delta_update(self.pulp)
self.assertPulp(code=400)
def test_06_update_unexistant_role(self):
self.role3.delete(self.pulp)
display_name = 'A %s role' % self.__class__.__name__
self.role3 |= {'display_name': display_name}
with self.assertRaises(AssertionError):
self.role3.delta_update(self.pulp)
self.assertPulp(code=404)
def test_07_add_user(self):
# create user
self.user.create(self.pulp)
self.assertPulpOK()
# add user to the role
self.role.add_user(
self.pulp,
self.user.id
)
self.assertPulp(code=200)
self.assertEqual(Role.get(self.pulp, self.role.id).data['users'], [self.user.id])
def test_08_add_unexistant_user_1116825(self):
# https://bugzilla.redhat.com/show_bug.cgi?id=1116825
# add user to the role
self.role.add_user(
self.pulp,
"Unexistant_user"
)
self.assertPulp(code=400)
def test_09_remove_user(self):
# remove user from the role
self.role.remove_user(
self.pulp,
self.user.id
)
self.assertPulp(code=200)
self.assertEqual(Role.get(self.pulp, self.role.id).data['users'], [])
def test_10_add_2_users(self):
# create second user
self.user2.create(self.pulp)
self.assertPulpOK()
# add users to the role
self.role.add_user(
self.pulp,
self.user.id
)
self.assertPulp(code=200)
self.role.add_user(
self.pulp,
self.user2.id
)
self.assertPulp(code=200)
self.assertEqual(Role.get(self.pulp, self.role.id).data['users'], [self.user.id, self.user2.id])
def test_11_add_role_perm(self):
self.role.grant_permission(self.pulp, self.role.id, "/", ["READ", "EXECUTE"])
self.role.grant_permission(self.pulp, self.role.id, "/repositories/", ["READ", "EXECUTE"])
self.assertPulpOK()
def test_12_check_user_perm(self):
with self.user_pulp.asserting(True):
Repo.list(self.user_pulp)
with self.user_pulp2.asserting(True):
Repo.list(self.user_pulp2)
def test_13_remove_user(self):
# remove user from the role
self.role.remove_user(
self.pulp,
self.user2.id
)
self.assertPulp(code=200)
def test_14_check_bindings_removed(self):
#check that after user2 removal from role user binding are also removed
with self.assertRaises(AssertionError):
with self.user_pulp2.asserting(True):
Repo.list(self.user_pulp2)
def test_15_check_bindings_removed(self):
self.role.delete(self.pulp)
self.assertPulpOK()
#check that after role deletion user binding are also removed
with self.assertRaises(AssertionError):
with self.user_pulp.asserting(True):
Repo.list(self.user_pulp)
def test_16_delete_unexistant_role(self):
#check you cannot delete role twice
self.role.delete(self.pulp)
self.assertPulp(code=404)
| gpl-2.0 | 2,642,479,761,995,447,300 | -4,984,782,519,177,547,000 | 33.728814 | 113 | 0.603709 | false |
jamii/inkling | jottinks/src/NoteTree2.py | 1 | 4804 | """
Copyright 2008 Jamie Brandon, Mark Haines
This file is part of jottinKs.
JottinKs is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
JottinKs is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with jottinKs. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
from Note import *
import Utils
from Writing import *
from PyKDE4.kdecore import *
from PyKDE4.kdeui import *
from PyQt4 import uic
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import cPickle
import pickle
class NoteTree(QTreeWidget):
def __init__(self, root=None):
QTreeWidget.__init__(self)
self.header().hide()
self.setColumnCount(1)
if root:
self.root = root
else:
self.root = NoteTreeRoot()
self.addTopLevelItem(self.root)
self.root.setTitle()
self.connect(self,SIGNAL("itemClicked (QTreeWidgetItem *,int)"),self.treeItemClicked)
self.actionList = None
self.selectedItem = self.root.next()
def treeItemClicked(self,item,column):
print "Got click", item.noteData.title
self.clearSelection()
self.selectedItem = item
item.setSelected(True)
self.scrollToItem(item)
self.showNote(item.noteData)
item.setTitle()
def showNote(self,noteData):
self.emit(SIGNAL("showNote(PyQt_PyObject)"),noteData)
def click(self,item):
print "Sent click", item.noteData.title
self.emit(SIGNAL("itemClicked (QTreeWidgetItem *,int)"),item,0)
# !!! Do I need this?
def addNote(self,note):
self.root.addChild(NoteTreeItem(note))
def newNote(self):
item = NoteTreeItem(Writing())
self.selectedItem.parent().insertChild(self.selectedItem.index()+1,item)
item.setTitle()
self.click(item)
print "added" , item, item.parent()
def newSubNote(self):
item = NoteTreeItem(Writing())
self.selectedItem.addChild(item)
item.setTitle()
self.click(item)
def deleteNote(self):
print "Will delete:", self.selectedItem
print "Parent is:" , self.selectedItem.parent()
deletee = self.selectedItem
self.click(deletee.previousItem())
deletee.remove()
def actions(self):
if not self.actionList:
newNote = KAction(KIcon("new"),i18n("New note"), self)
self.connect(newNote,SIGNAL("triggered()"),self.newNote)
newSubNote = KAction(KIcon("new"),i18n("New subnote"), self)
self.connect(newSubNote,SIGNAL("triggered()"),self.newSubNote)
deleteNote = KAction(KIcon("delete"),i18n("Delete note"), self)
self.connect(deleteNote,SIGNAL("triggered()"),self.deleteNote)
self.actionList = [newNote, newSubNote, deleteNote]
return self.actionList
def topLevelItems(self):
i = 0
length = self.root.childCount()
while i<length:
yield self.root.child(i)
i += 1
def __reduce__(self):
(NoteTree,(self.root,))
def __reduce_ex__(self,i):
return self.__reduce__()
class NoteTreeItem(QTreeWidgetItem):
def __init__(self, noteData=None, children = []):
QTreeWidgetItem.__init__(self)
self.noteData = noteData
for child in children:
self.addChild(child)
# Cant call this until the item has been added to the tree
def setTitle(self):
self.treeWidget().setItemWidget(self,0,QLabel("Bugger"))
for child in self.children():
child.setTitle()
def children(self):
children = []
for i in range(0,self.childCount()):
children.append(self.child(i))
return children
def index(self):
return self.parent().indexOfChild(self)
def previousItem(self):
i = self.index()
if i==0:
return self.parent()
else:
return self.parent().child(i-1)
def nextItem(self):
i = self.index()
if i+1 == self.parent().childCount():
return self.parent().nextItem()
else:
return self.parent().child(i+1)
def remove(self):
self.parent().removeChild(self)
def __reduce__(self):
return (NoteTreeItem,(self.noteData,self.children()))
class NoteTreeRoot(NoteTreeItem):
def __init__(self,children=[]):
NoteTreeItem.__init__(self,Writing(),children)
self.setText(0,"Root")
def parent(self):
return self
# This makes the new note function work.
# If we use index anywhere else it may cause some pain
def index(self):
return self.childCount() - 1
def previous(self):
return self
def next(self):
if self.childCount():
return self.child(0)
else:
return self
def remove(self):
pass
def __reduce__(self):
return (NoteTreeRoot,(self.children(),)) | gpl-3.0 | -6,951,976,147,975,526,000 | -2,990,242,846,821,584,400 | 24.289474 | 87 | 0.696295 | false |
ovnicraft/edx-platform | lms/djangoapps/instructor_task/models.py | 24 | 16357 | """
WE'RE USING MIGRATIONS!
If you make changes to this model, be sure to create an appropriate migration
file and check it in at the same time as your model changes. To do that,
1. Go to the edx-platform dir
2. ./manage.py schemamigration instructor_task --auto description_of_your_change
3. Add the migration file created in edx-platform/lms/djangoapps/instructor_task/migrations/
ASSUMPTIONS: modules have unique IDs, even across different module_types
"""
from cStringIO import StringIO
from gzip import GzipFile
from uuid import uuid4
import csv
import json
import hashlib
import os.path
import urllib
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models, transaction
from xmodule_django.models import CourseKeyField
# define custom states used by InstructorTask
QUEUING = 'QUEUING'
PROGRESS = 'PROGRESS'
class InstructorTask(models.Model):
"""
Stores information about background tasks that have been submitted to
perform work by an instructor (or course staff).
Examples include grading and rescoring.
`task_type` identifies the kind of task being performed, e.g. rescoring.
`course_id` uses the course run's unique id to identify the course.
`task_key` stores relevant input arguments encoded into key value for testing to see
if the task is already running (together with task_type and course_id).
`task_input` stores input arguments as JSON-serialized dict, for reporting purposes.
Examples include url of problem being rescored, id of student if only one student being rescored.
`task_id` stores the id used by celery for the background task.
`task_state` stores the last known state of the celery task
`task_output` stores the output of the celery task.
Format is a JSON-serialized dict. Content varies by task_type and task_state.
`requester` stores id of user who submitted the task
`created` stores date that entry was first created
`updated` stores date that entry was last modified
"""
task_type = models.CharField(max_length=50, db_index=True)
course_id = CourseKeyField(max_length=255, db_index=True)
task_key = models.CharField(max_length=255, db_index=True)
task_input = models.CharField(max_length=255)
task_id = models.CharField(max_length=255, db_index=True) # max_length from celery_taskmeta
task_state = models.CharField(max_length=50, null=True, db_index=True) # max_length from celery_taskmeta
task_output = models.CharField(max_length=1024, null=True)
requester = models.ForeignKey(User, db_index=True)
created = models.DateTimeField(auto_now_add=True, null=True)
updated = models.DateTimeField(auto_now=True)
subtasks = models.TextField(blank=True) # JSON dictionary
def __repr__(self):
return 'InstructorTask<%r>' % ({
'task_type': self.task_type,
'course_id': self.course_id,
'task_input': self.task_input,
'task_id': self.task_id,
'task_state': self.task_state,
'task_output': self.task_output,
},)
def __unicode__(self):
return unicode(repr(self))
@classmethod
def create(cls, course_id, task_type, task_key, task_input, requester):
"""
Create an instance of InstructorTask.
"""
# create the task_id here, and pass it into celery:
task_id = str(uuid4())
json_task_input = json.dumps(task_input)
# check length of task_input, and return an exception if it's too long:
if len(json_task_input) > 255:
fmt = 'Task input longer than 255: "{input}" for "{task}" of "{course}"'
msg = fmt.format(input=json_task_input, task=task_type, course=course_id)
raise ValueError(msg)
# create the task, then save it:
instructor_task = cls(
course_id=course_id,
task_type=task_type,
task_id=task_id,
task_key=task_key,
task_input=json_task_input,
task_state=QUEUING,
requester=requester
)
instructor_task.save_now()
return instructor_task
@transaction.atomic
def save_now(self):
"""
Writes InstructorTask immediately, ensuring the transaction is committed.
"""
self.save()
@staticmethod
def create_output_for_success(returned_result):
"""
Converts successful result to output format.
Raises a ValueError exception if the output is too long.
"""
# In future, there should be a check here that the resulting JSON
# will fit in the column. In the meantime, just return an exception.
json_output = json.dumps(returned_result)
if len(json_output) > 1023:
raise ValueError("Length of task output is too long: {0}".format(json_output))
return json_output
@staticmethod
def create_output_for_failure(exception, traceback_string):
"""
Converts failed result information to output format.
Traceback information is truncated or not included if it would result in an output string
that would not fit in the database. If the output is still too long, then the
exception message is also truncated.
Truncation is indicated by adding "..." to the end of the value.
"""
tag = '...'
task_progress = {'exception': type(exception).__name__, 'message': unicode(exception.message)}
if traceback_string is not None:
# truncate any traceback that goes into the InstructorTask model:
task_progress['traceback'] = traceback_string
json_output = json.dumps(task_progress)
# if the resulting output is too long, then first shorten the
# traceback, and then the message, until it fits.
too_long = len(json_output) - 1023
if too_long > 0:
if traceback_string is not None:
if too_long >= len(traceback_string) - len(tag):
# remove the traceback entry entirely (so no key or value)
del task_progress['traceback']
too_long -= (len(traceback_string) + len('traceback'))
else:
# truncate the traceback:
task_progress['traceback'] = traceback_string[:-(too_long + len(tag))] + tag
too_long = 0
if too_long > 0:
# we need to shorten the message:
task_progress['message'] = task_progress['message'][:-(too_long + len(tag))] + tag
json_output = json.dumps(task_progress)
return json_output
@staticmethod
def create_output_for_revoked():
"""Creates standard message to store in output format for revoked tasks."""
return json.dumps({'message': 'Task revoked before running'})
class ReportStore(object):
"""
Simple abstraction layer that can fetch and store CSV files for reports
download. Should probably refactor later to create a ReportFile object that
can simply be appended to for the sake of memory efficiency, rather than
passing in the whole dataset. Doing that for now just because it's simpler.
"""
@classmethod
def from_config(cls, config_name):
"""
Return one of the ReportStore subclasses depending on django
configuration. Look at subclasses for expected configuration.
"""
storage_type = getattr(settings, config_name).get("STORAGE_TYPE")
if storage_type.lower() == "s3":
return S3ReportStore.from_config(config_name)
elif storage_type.lower() == "localfs":
return LocalFSReportStore.from_config(config_name)
def _get_utf8_encoded_rows(self, rows):
"""
Given a list of `rows` containing unicode strings, return a
new list of rows with those strings encoded as utf-8 for CSV
compatibility.
"""
for row in rows:
yield [unicode(item).encode('utf-8') for item in row]
class S3ReportStore(ReportStore):
"""
Reports store backed by S3. The directory structure we use to store things
is::
`{bucket}/{root_path}/{sha1 hash of course_id}/filename`
We might later use subdirectories or metadata to do more intelligent
grouping and querying, but right now it simply depends on its own
conventions on where files are stored to know what to display. Clients using
this class can name the final file whatever they want.
"""
def __init__(self, bucket_name, root_path):
self.root_path = root_path
conn = S3Connection(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY
)
self.bucket = conn.get_bucket(bucket_name)
@classmethod
def from_config(cls, config_name):
"""
The expected configuration for an `S3ReportStore` is to have a
`GRADES_DOWNLOAD` dict in settings with the following fields::
STORAGE_TYPE : "s3"
BUCKET : Your bucket name, e.g. "reports-bucket"
ROOT_PATH : The path you want to store all course files under. Do not
use a leading or trailing slash. e.g. "staging" or
"staging/2013", not "/staging", or "/staging/"
Since S3 access relies on boto, you must also define `AWS_ACCESS_KEY_ID`
and `AWS_SECRET_ACCESS_KEY` in settings.
"""
return cls(
getattr(settings, config_name).get("BUCKET"),
getattr(settings, config_name).get("ROOT_PATH")
)
def key_for(self, course_id, filename):
"""Return the S3 key we would use to store and retrieve the data for the
given filename."""
hashed_course_id = hashlib.sha1(course_id.to_deprecated_string())
key = Key(self.bucket)
key.key = "{}/{}/{}".format(
self.root_path,
hashed_course_id.hexdigest(),
filename
)
return key
def store(self, course_id, filename, buff, config=None):
"""
Store the contents of `buff` in a directory determined by hashing
`course_id`, and name the file `filename`. `buff` is typically a
`StringIO`, but can be anything that implements `.getvalue()`.
This method assumes that the contents of `buff` are gzip-encoded (it
will add the appropriate headers to S3 to make the decompression
transparent via the browser). Filenames should end in whatever
suffix makes sense for the original file, so `.txt` instead of `.gz`
"""
key = self.key_for(course_id, filename)
_config = config if config else {}
content_type = _config.get('content_type', 'text/csv')
content_encoding = _config.get('content_encoding', 'gzip')
data = buff.getvalue()
key.size = len(data)
key.content_encoding = content_encoding
key.content_type = content_type
# Just setting the content encoding and type above should work
# according to the docs, but when experimenting, this was necessary for
# it to actually take.
key.set_contents_from_string(
data,
headers={
"Content-Encoding": content_encoding,
"Content-Length": len(data),
"Content-Type": content_type,
}
)
def store_rows(self, course_id, filename, rows):
"""
Given a `course_id`, `filename`, and `rows` (each row is an iterable of
strings), create a buffer that is a gzip'd csv file, and then `store()`
that buffer.
Even though we store it in gzip format, browsers will transparently
download and decompress it. Filenames should end in `.csv`, not `.gz`.
"""
output_buffer = StringIO()
gzip_file = GzipFile(fileobj=output_buffer, mode="wb")
csvwriter = csv.writer(gzip_file)
csvwriter.writerows(self._get_utf8_encoded_rows(rows))
gzip_file.close()
self.store(course_id, filename, output_buffer)
def links_for(self, course_id):
"""
For a given `course_id`, return a list of `(filename, url)` tuples. `url`
can be plugged straight into an href
"""
course_dir = self.key_for(course_id, '')
return [
(key.key.split("/")[-1], key.generate_url(expires_in=300))
for key in sorted(self.bucket.list(prefix=course_dir.key), reverse=True, key=lambda k: k.last_modified)
]
class LocalFSReportStore(ReportStore):
"""
LocalFS implementation of a ReportStore. This is meant for debugging
purposes and is *absolutely not for production use*. Use S3ReportStore for
that. We use this in tests and for local development. When it generates
links, it will make file:/// style links. That means you actually have to
copy them and open them in a separate browser window, for security reasons.
This lets us do the cheap thing locally for debugging without having to open
up a separate URL that would only be used to send files in dev.
"""
def __init__(self, root_path):
"""
Initialize with root_path where we're going to store our files. We
will build a directory structure under this for each course.
"""
self.root_path = root_path
if not os.path.exists(root_path):
os.makedirs(root_path)
@classmethod
def from_config(cls, config_name):
"""
Generate an instance of this object from Django settings. It assumes
that there is a dict in settings named GRADES_DOWNLOAD and that it has
a ROOT_PATH that maps to an absolute file path that the web app has
write permissions to. `LocalFSReportStore` will create any intermediate
directories as needed. Example::
STORAGE_TYPE : "localfs"
ROOT_PATH : /tmp/edx/report-downloads/
"""
return cls(getattr(settings, config_name).get("ROOT_PATH"))
def path_to(self, course_id, filename):
"""Return the full path to a given file for a given course."""
return os.path.join(self.root_path, urllib.quote(course_id.to_deprecated_string(), safe=''), filename)
def store(self, course_id, filename, buff, config=None): # pylint: disable=unused-argument
"""
Given the `course_id` and `filename`, store the contents of `buff` in
that file. Overwrite anything that was there previously. `buff` is
assumed to be a StringIO objecd (or anything that can flush its contents
to string using `.getvalue()`).
"""
full_path = self.path_to(course_id, filename)
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
os.mkdir(directory)
with open(full_path, "wb") as f:
f.write(buff.getvalue())
def store_rows(self, course_id, filename, rows):
"""
Given a course_id, filename, and rows (each row is an iterable of strings),
write this data out.
"""
output_buffer = StringIO()
csvwriter = csv.writer(output_buffer)
csvwriter.writerows(self._get_utf8_encoded_rows(rows))
self.store(course_id, filename, output_buffer)
def links_for(self, course_id):
"""
For a given `course_id`, return a list of `(filename, url)` tuples. `url`
can be plugged straight into an href. Note that `LocalFSReportStore`
will generate `file://` type URLs, so you'll need to copy the URL and
open it in a new browser window. Again, this class is only meant for
local development.
"""
course_dir = self.path_to(course_id, '')
if not os.path.exists(course_dir):
return []
files = [(filename, os.path.join(course_dir, filename)) for filename in os.listdir(course_dir)]
files.sort(key=lambda (filename, full_path): os.path.getmtime(full_path), reverse=True)
return [
(filename, ("file://" + urllib.quote(full_path)))
for filename, full_path in files
]
| agpl-3.0 | 1,812,668,215,691,681,000 | -1,184,282,267,020,182,500 | 39.090686 | 115 | 0.634774 | false |
LuminateWireless/grpc | src/python/grpcio_tests/tests/unit/framework/interfaces/face/_future_invocation_asynchronous_event_service.py | 23 | 25112 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test code for the Face layer of RPC Framework."""
from __future__ import division
import abc
import contextlib
import itertools
import threading
import unittest
from concurrent import futures
import six
# test_interfaces is referenced from specification in this module.
from grpc.framework.foundation import future
from grpc.framework.foundation import logging_pool
from grpc.framework.interfaces.face import face
from tests.unit.framework.common import test_constants
from tests.unit.framework.common import test_control
from tests.unit.framework.common import test_coverage
from tests.unit.framework.interfaces.face import _3069_test_constant
from tests.unit.framework.interfaces.face import _digest
from tests.unit.framework.interfaces.face import _stock_service
from tests.unit.framework.interfaces.face import test_interfaces # pylint: disable=unused-import
class _PauseableIterator(object):
def __init__(self, upstream):
self._upstream = upstream
self._condition = threading.Condition()
self._paused = False
@contextlib.contextmanager
def pause(self):
with self._condition:
self._paused = True
yield
with self._condition:
self._paused = False
self._condition.notify_all()
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self._condition:
while self._paused:
self._condition.wait()
return next(self._upstream)
class _Callback(object):
def __init__(self):
self._condition = threading.Condition()
self._called = False
self._passed_future = None
self._passed_other_stuff = None
def __call__(self, *args, **kwargs):
with self._condition:
self._called = True
if args:
self._passed_future = args[0]
if 1 < len(args) or kwargs:
self._passed_other_stuff = tuple(args[1:]), dict(kwargs)
self._condition.notify_all()
def future(self):
with self._condition:
while True:
if self._passed_other_stuff is not None:
raise ValueError(
'Test callback passed unexpected values: %s',
self._passed_other_stuff)
elif self._called:
return self._passed_future
else:
self._condition.wait()
class TestCase(
six.with_metaclass(abc.ABCMeta, test_coverage.Coverage,
unittest.TestCase)):
"""A test of the Face layer of RPC Framework.
Concrete subclasses must have an "implementation" attribute of type
test_interfaces.Implementation and an "invoker_constructor" attribute of type
_invocation.InvokerConstructor.
"""
NAME = 'FutureInvocationAsynchronousEventServiceTest'
def setUp(self):
"""See unittest.TestCase.setUp for full specification.
Overriding implementations must call this implementation.
"""
self._control = test_control.PauseFailControl()
self._digest_pool = logging_pool.pool(test_constants.POOL_SIZE)
self._digest = _digest.digest(_stock_service.STOCK_TEST_SERVICE,
self._control, self._digest_pool)
generic_stub, dynamic_stubs, self._memo = self.implementation.instantiate(
self._digest.methods, self._digest.event_method_implementations,
None)
self._invoker = self.invoker_constructor.construct_invoker(
generic_stub, dynamic_stubs, self._digest.methods)
def tearDown(self):
"""See unittest.TestCase.tearDown for full specification.
Overriding implementations must call this implementation.
"""
self._invoker = None
self.implementation.destantiate(self._memo)
self._digest_pool.shutdown(wait=True)
def testSuccessfulUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = _Callback()
response_future = self._invoker.future(group, method)(
request, test_constants.LONG_TIMEOUT)
response_future.add_done_callback(callback)
response = response_future.result()
test_messages.verify(request, response, self)
self.assertIs(callback.future(), response_future)
self.assertIsNone(response_future.exception())
self.assertIsNone(response_future.traceback())
def testSuccessfulUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
response_iterator = self._invoker.future(group, method)(
request, test_constants.LONG_TIMEOUT)
responses = list(response_iterator)
test_messages.verify(request, responses, self)
def testSuccessfulStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
request_iterator = _PauseableIterator(iter(requests))
callback = _Callback()
# Use of a paused iterator of requests allows us to test that control is
# returned to calling code before the iterator yields any requests.
with request_iterator.pause():
response_future = self._invoker.future(group, method)(
request_iterator, test_constants.LONG_TIMEOUT)
response_future.add_done_callback(callback)
future_passed_to_callback = callback.future()
response = future_passed_to_callback.result()
test_messages.verify(requests, response, self)
self.assertIs(future_passed_to_callback, response_future)
self.assertIsNone(response_future.exception())
self.assertIsNone(response_future.traceback())
def testSuccessfulStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
request_iterator = _PauseableIterator(iter(requests))
# Use of a paused iterator of requests allows us to test that control is
# returned to calling code before the iterator yields any requests.
with request_iterator.pause():
response_iterator = self._invoker.future(group, method)(
request_iterator, test_constants.LONG_TIMEOUT)
responses = list(response_iterator)
test_messages.verify(requests, responses, self)
def testSequentialInvocations(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
first_request = test_messages.request()
second_request = test_messages.request()
first_response_future = self._invoker.future(group, method)(
first_request, test_constants.LONG_TIMEOUT)
first_response = first_response_future.result()
test_messages.verify(first_request, first_response, self)
second_response_future = self._invoker.future(group, method)(
second_request, test_constants.LONG_TIMEOUT)
second_response = second_response_future.result()
test_messages.verify(second_request, second_response, self)
def testParallelInvocations(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
first_request = test_messages.request()
second_request = test_messages.request()
first_response_future = self._invoker.future(group, method)(
first_request, test_constants.LONG_TIMEOUT)
second_response_future = self._invoker.future(group, method)(
second_request, test_constants.LONG_TIMEOUT)
first_response = first_response_future.result()
second_response = second_response_future.result()
test_messages.verify(first_request, first_response, self)
test_messages.verify(second_request, second_response, self)
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = []
response_futures = []
for _ in range(test_constants.THREAD_CONCURRENCY):
request = test_messages.request()
response_future = self._invoker.future(group, method)(
request, test_constants.LONG_TIMEOUT)
requests.append(request)
response_futures.append(response_future)
responses = [
response_future.result()
for response_future in response_futures
]
for request, response in zip(requests, responses):
test_messages.verify(request, response, self)
def testWaitingForSomeButNotAllParallelInvocations(self):
pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = []
response_futures_to_indices = {}
for index in range(test_constants.THREAD_CONCURRENCY):
request = test_messages.request()
inner_response_future = self._invoker.future(group, method)(
request, test_constants.LONG_TIMEOUT)
outer_response_future = pool.submit(
inner_response_future.result)
requests.append(request)
response_futures_to_indices[outer_response_future] = index
some_completed_response_futures_iterator = itertools.islice(
futures.as_completed(response_futures_to_indices),
test_constants.THREAD_CONCURRENCY // 2)
for response_future in some_completed_response_futures_iterator:
index = response_futures_to_indices[response_future]
test_messages.verify(requests[index],
response_future.result(), self)
pool.shutdown(wait=True)
def testCancelledUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = _Callback()
with self._control.pause():
response_future = self._invoker.future(group, method)(
request, test_constants.LONG_TIMEOUT)
response_future.add_done_callback(callback)
cancel_method_return_value = response_future.cancel()
self.assertIs(callback.future(), response_future)
self.assertFalse(cancel_method_return_value)
self.assertTrue(response_future.cancelled())
with self.assertRaises(future.CancelledError):
response_future.result()
with self.assertRaises(future.CancelledError):
response_future.exception()
with self.assertRaises(future.CancelledError):
response_future.traceback()
def testCancelledUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
with self._control.pause():
response_iterator = self._invoker.future(group, method)(
request, test_constants.LONG_TIMEOUT)
response_iterator.cancel()
with self.assertRaises(face.CancellationError):
next(response_iterator)
def testCancelledStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = _Callback()
with self._control.pause():
response_future = self._invoker.future(group, method)(
iter(requests), test_constants.LONG_TIMEOUT)
response_future.add_done_callback(callback)
cancel_method_return_value = response_future.cancel()
self.assertIs(callback.future(), response_future)
self.assertFalse(cancel_method_return_value)
self.assertTrue(response_future.cancelled())
with self.assertRaises(future.CancelledError):
response_future.result()
with self.assertRaises(future.CancelledError):
response_future.exception()
with self.assertRaises(future.CancelledError):
response_future.traceback()
def testCancelledStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
with self._control.pause():
response_iterator = self._invoker.future(group, method)(
iter(requests), test_constants.LONG_TIMEOUT)
response_iterator.cancel()
with self.assertRaises(face.CancellationError):
next(response_iterator)
def testExpiredUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = _Callback()
with self._control.pause():
response_future = self._invoker.future(group, method)(
request, _3069_test_constant.REALLY_SHORT_TIMEOUT)
response_future.add_done_callback(callback)
self.assertIs(callback.future(), response_future)
self.assertIsInstance(response_future.exception(),
face.ExpirationError)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsInstance(response_future.exception(),
face.AbortionError)
self.assertIsNotNone(response_future.traceback())
def testExpiredUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
with self._control.pause():
response_iterator = self._invoker.future(group, method)(
request, _3069_test_constant.REALLY_SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
list(response_iterator)
def testExpiredStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = _Callback()
with self._control.pause():
response_future = self._invoker.future(group, method)(
iter(requests),
_3069_test_constant.REALLY_SHORT_TIMEOUT)
response_future.add_done_callback(callback)
self.assertIs(callback.future(), response_future)
self.assertIsInstance(response_future.exception(),
face.ExpirationError)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsInstance(response_future.exception(),
face.AbortionError)
self.assertIsNotNone(response_future.traceback())
def testExpiredStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
with self._control.pause():
response_iterator = self._invoker.future(group, method)(
iter(requests),
_3069_test_constant.REALLY_SHORT_TIMEOUT)
with self.assertRaises(face.ExpirationError):
list(response_iterator)
def testFailedUnaryRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_unary_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
callback = _Callback()
abortion_callback = _Callback()
with self._control.fail():
response_future = self._invoker.future(group, method)(
request, _3069_test_constant.REALLY_SHORT_TIMEOUT)
response_future.add_done_callback(callback)
response_future.add_abortion_callback(abortion_callback)
self.assertIs(callback.future(), response_future)
# Because the servicer fails outside of the thread from which the
# servicer-side runtime called into it its failure is
# indistinguishable from simply not having called its
# response_callback before the expiration of the RPC.
self.assertIsInstance(response_future.exception(),
face.ExpirationError)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsNotNone(response_future.traceback())
self.assertIsNotNone(abortion_callback.future())
def testFailedUnaryRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.unary_stream_messages_sequences)):
for test_messages in test_messages_sequence:
request = test_messages.request()
# Because the servicer fails outside of the thread from which the
# servicer-side runtime called into it its failure is indistinguishable
# from simply not having called its response_consumer before the
# expiration of the RPC.
with self._control.fail(), self.assertRaises(
face.ExpirationError):
response_iterator = self._invoker.future(group, method)(
request, _3069_test_constant.REALLY_SHORT_TIMEOUT)
list(response_iterator)
def testFailedStreamRequestUnaryResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_unary_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
callback = _Callback()
abortion_callback = _Callback()
with self._control.fail():
response_future = self._invoker.future(group, method)(
iter(requests),
_3069_test_constant.REALLY_SHORT_TIMEOUT)
response_future.add_done_callback(callback)
response_future.add_abortion_callback(abortion_callback)
self.assertIs(callback.future(), response_future)
# Because the servicer fails outside of the thread from which the
# servicer-side runtime called into it its failure is
# indistinguishable from simply not having called its
# response_callback before the expiration of the RPC.
self.assertIsInstance(response_future.exception(),
face.ExpirationError)
with self.assertRaises(face.ExpirationError):
response_future.result()
self.assertIsNotNone(response_future.traceback())
self.assertIsNotNone(abortion_callback.future())
def testFailedStreamRequestStreamResponse(self):
for (group, method), test_messages_sequence in (
six.iteritems(self._digest.stream_stream_messages_sequences)):
for test_messages in test_messages_sequence:
requests = test_messages.requests()
# Because the servicer fails outside of the thread from which the
# servicer-side runtime called into it its failure is indistinguishable
# from simply not having called its response_consumer before the
# expiration of the RPC.
with self._control.fail(), self.assertRaises(
face.ExpirationError):
response_iterator = self._invoker.future(group, method)(
iter(requests),
_3069_test_constant.REALLY_SHORT_TIMEOUT)
list(response_iterator)
| bsd-3-clause | -4,295,427,254,600,255,000 | 2,987,466,988,907,170,300 | 47.015296 | 97 | 0.603058 | false |
raphaelmerx/django | tests/view_tests/tests/test_debug.py | 99 | 40145 | # -*- coding: utf-8 -*-
# This coding header is significant for tests, as the debug view is parsing
# files to search for such a header to decode the source file content
from __future__ import unicode_literals
import importlib
import inspect
import os
import re
import sys
import tempfile
from unittest import skipIf
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.db import DatabaseError, connection
from django.template import TemplateDoesNotExist
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import LoggingCaptureMixin
from django.utils import six
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import SimpleLazyObject
from django.views.debug import (
CallableSettingWrapper, ExceptionReporter, technical_500_response,
)
from .. import BrokenException, except_args
from ..views import (
custom_exception_reporter_filter_view, multivalue_dict_key_error,
non_sensitive_view, paranoid_view, sensitive_args_function_caller,
sensitive_kwargs_function_caller, sensitive_method_view, sensitive_view,
)
if six.PY3:
from .py3_test_debug import Py3ExceptionReporterTests # NOQA
class CallableSettingWrapperTests(SimpleTestCase):
""" Unittests for CallableSettingWrapper
"""
def test_repr(self):
class WrappedCallable(object):
def __repr__(self):
return "repr from the wrapped callable"
def __call__(self):
pass
actual = repr(CallableSettingWrapper(WrappedCallable()))
self.assertEqual(actual, "repr from the wrapped callable")
@override_settings(DEBUG=True, ROOT_URLCONF="view_tests.urls")
class DebugViewTests(LoggingCaptureMixin, SimpleTestCase):
def test_files(self):
response = self.client.get('/raises/')
self.assertEqual(response.status_code, 500)
data = {
'file_data.txt': SimpleUploadedFile('file_data.txt', b'haha'),
}
response = self.client.post('/raises/', data)
self.assertContains(response, 'file_data.txt', status_code=500)
self.assertNotContains(response, 'haha', status_code=500)
def test_400(self):
# Ensure that when DEBUG=True, technical_500_template() is called.
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
# Ensure no 403.html template exists to test the default case.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
}])
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
# Set up a test 403.html template.
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
('django.template.loaders.locmem.Loader', {
'403.html': 'This is a test template for a 403 error ({{ exception }}).',
}),
],
},
}])
def test_403_template(self):
response = self.client.get('/raises403/')
self.assertContains(response, 'test template', status_code=403)
self.assertContains(response, '(Insufficient Permissions).', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_raised_404(self):
response = self.client.get('/views/raises404/')
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
def test_404_not_in_urls(self):
response = self.client.get('/not-in-urls')
self.assertNotContains(response, "Raised by:", status_code=404)
self.assertContains(response, "<code>not-in-urls</code>, didn't match", status_code=404)
def test_technical_404(self):
response = self.client.get('/views/technical404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.technical404", status_code=404)
def test_classbased_technical_404(self):
response = self.client.get('/views/classbased404/')
self.assertContains(response, "Raised by:", status_code=404)
self.assertContains(response, "view_tests.views.Http404View", status_code=404)
def test_view_exceptions(self):
for n in range(len(except_args)):
self.assertRaises(BrokenException, self.client.get,
reverse('view_exception', args=(n,)))
def test_non_l10ned_numeric_ids(self):
"""
Numeric IDs and fancy traceback context blocks line numbers shouldn't be localized.
"""
with self.settings(DEBUG=True, USE_L10N=True):
response = self.client.get('/raises500/')
# We look for a HTML fragment of the form
# '<div class="context" id="c38123208">', not '<div class="context" id="c38,123,208"'
self.assertContains(response, '<div class="context" id="', status_code=500)
match = re.search(b'<div class="context" id="(?P<id>[^"]+)">', response.content)
self.assertIsNotNone(match)
id_repr = match.group('id')
self.assertFalse(re.search(b'[^c0-9]', id_repr),
"Numeric IDs in debug response HTML page shouldn't be localized (value: %s)." % id_repr)
def test_template_exceptions(self):
for n in range(len(except_args)):
try:
self.client.get(reverse('template_exception', args=(n,)))
except Exception:
raising_loc = inspect.trace()[-1][-2][0].strip()
self.assertNotEqual(raising_loc.find('raise BrokenException'), -1,
"Failed to find 'raise BrokenException' in last frame of traceback, instead found: %s" %
raising_loc)
def test_template_loader_postmortem(self):
"""Tests for not existing file"""
template_name = "notfound.html"
with tempfile.NamedTemporaryFile(prefix=template_name) as tmpfile:
tempdir = os.path.dirname(tmpfile.name)
template_path = os.path.join(tempdir, template_name)
with override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [tempdir],
}]):
response = self.client.get(reverse('raises_template_does_not_exist', kwargs={"path": template_name}))
self.assertContains(response, "%s (Source does not exist)" % template_path, status_code=500, count=2)
def test_no_template_source_loaders(self):
"""
Make sure if you don't specify a template, the debug view doesn't blow up.
"""
self.assertRaises(TemplateDoesNotExist, self.client.get, '/render_no_template/')
@override_settings(ROOT_URLCONF='view_tests.default_urls')
def test_default_urlconf_template(self):
"""
Make sure that the default urlconf template is shown shown instead
of the technical 404 page, if the user has not altered their
url conf yet.
"""
response = self.client.get('/')
self.assertContains(
response,
"<h2>Congratulations on your first Django-powered page.</h2>"
)
@override_settings(ROOT_URLCONF='view_tests.regression_21530_urls')
def test_regression_21530(self):
"""
Regression test for bug #21530.
If the admin app include is replaced with exactly one url
pattern, then the technical 404 template should be displayed.
The bug here was that an AttributeError caused a 500 response.
"""
response = self.client.get('/')
self.assertContains(
response,
"Page not found <span>(404)</span>",
status_code=404
)
class DebugViewQueriesAllowedTests(SimpleTestCase):
# May need a query to initialize MySQL connection
allow_database_queries = True
def test_handle_db_exception(self):
"""
Ensure the debug view works when a database exception is raised by
performing an invalid query and passing the exception to the debug view.
"""
with connection.cursor() as cursor:
try:
cursor.execute('INVALID SQL')
except DatabaseError:
exc_info = sys.exc_info()
rf = RequestFactory()
response = technical_500_response(rf.get('/'), *exc_info)
self.assertContains(response, 'OperationalError at /', status_code=500)
@override_settings(
DEBUG=True,
ROOT_URLCONF="view_tests.urls",
# No template directories are configured, so no templates will be found.
TEMPLATES=[{
'BACKEND': 'django.template.backends.dummy.TemplateStrings',
}],
)
class NonDjangoTemplatesDebugViewTests(SimpleTestCase):
def test_400(self):
# Ensure that when DEBUG=True, technical_500_template() is called.
response = self.client.get('/raises400/')
self.assertContains(response, '<div class="context" id="', status_code=400)
def test_403(self):
response = self.client.get('/raises403/')
self.assertContains(response, '<h1>403 Forbidden</h1>', status_code=403)
def test_404(self):
response = self.client.get('/raises404/')
self.assertEqual(response.status_code, 404)
def test_template_not_found_error(self):
# Raises a TemplateDoesNotExist exception and shows the debug view.
url = reverse('raises_template_does_not_exist', kwargs={"path": "notfound.html"})
response = self.client.get(url)
self.assertContains(response, '<div class="context" id="', status_code=500)
class ExceptionReporterTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ValueError at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ValueError</h1>', html)
self.assertIn('<pre class="exception_value">Can't find my keys</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertIn('<th>Exception Type:</th>', html)
self.assertIn('<th>Exception Value:</th>', html)
self.assertIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_eol_support(self):
"""Test that the ExceptionReporter supports Unix, Windows and Macintosh EOL markers"""
LINES = list('print %d' % i for i in range(1, 6))
reporter = ExceptionReporter(None, None, None, None)
for newline in ['\n', '\r\n', '\r']:
fd, filename = tempfile.mkstemp(text=False)
os.write(fd, force_bytes(newline.join(LINES) + newline))
os.close(fd)
try:
self.assertEqual(
reporter._get_lines_from_file(filename, 3, 2),
(1, LINES[1:3], LINES[3], LINES[4:])
)
finally:
os.unlink(filename)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">No exception message supplied</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report at /test_view/</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertIn('<th>Request Method:</th>', html)
self.assertIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertNotIn('<p>Request data not supplied</p>', html)
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
html = reporter.get_traceback_html()
self.assertIn('<h1>Report</h1>', html)
self.assertIn('<pre class="exception_value">I'm a little teapot</pre>', html)
self.assertNotIn('<th>Request Method:</th>', html)
self.assertNotIn('<th>Request URL:</th>', html)
self.assertNotIn('<th>Exception Type:</th>', html)
self.assertNotIn('<th>Exception Value:</th>', html)
self.assertNotIn('<h2>Traceback ', html)
self.assertIn('<h2>Request information</h2>', html)
self.assertIn('<p>Request data not supplied</p>', html)
def test_non_utf8_values_handling(self):
"Non-UTF-8 exceptions/values should not make the output generation choke."
try:
class NonUtf8Output(Exception):
def __repr__(self):
return b'EXC\xe9EXC'
somevar = b'VAL\xe9VAL' # NOQA
raise NonUtf8Output()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('VAL\\xe9VAL', html)
self.assertIn('EXC\\xe9EXC', html)
def test_unprintable_values_handling(self):
"Unprintable values should not make the output generation choke."
try:
class OomOutput(object):
def __repr__(self):
raise MemoryError('OOM')
oomvalue = OomOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<td class="code"><pre>Error in formatting', html)
def test_too_large_values_handling(self):
"Large values should not create a large HTML."
large = 256 * 1024
repr_of_str_adds = len(repr(''))
try:
class LargeOutput(object):
def __repr__(self):
return repr('A' * large)
largevalue = LargeOutput() # NOQA
raise ValueError()
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertEqual(len(html) // 1024 // 128, 0) # still fit in 128Kb
self.assertIn('<trimmed %d bytes string>' % (large + repr_of_str_adds,), html)
@skipIf(six.PY2, 'Bug manifests on PY3 only')
def test_unfrozen_importlib(self):
"""
importlib is not a frozen app, but its loader thinks it's frozen which
results in an ImportError on Python 3. Refs #21443.
"""
try:
request = self.rf.get('/test_view/')
importlib.import_module('abc.def.invalid.name')
except Exception:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
self.assertIn('<h1>ImportError at /test_view/</h1>', html)
def test_ignore_traceback_evaluation_exceptions(self):
"""
Don't trip over exceptions generated by crafted objects when
evaluating them while cleansing (#24455).
"""
class BrokenEvaluation(Exception):
pass
def broken_setup():
raise BrokenEvaluation
request = self.rf.get('/test_view/')
broken_lazy = SimpleLazyObject(broken_setup)
try:
bool(broken_lazy)
except BrokenEvaluation:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
try:
html = reporter.get_traceback_html()
except BrokenEvaluation:
self.fail("Broken evaluation in traceback is not caught.")
self.assertIn(
"BrokenEvaluation",
html,
"Evaluation exception reason not mentioned in traceback"
)
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
html = reporter.get_traceback_html()
self.assertIn("http://evil.com/", html)
class PlainTextReportTests(SimpleTestCase):
rf = RequestFactory()
def test_request_and_exception(self):
"A simple exception report can be generated"
try:
request = self.rf.get('/test_view/')
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError at /test_view/', text)
self.assertIn("Can't find my keys", text)
self.assertIn('Request Method:', text)
self.assertIn('Request URL:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request information:', text)
self.assertNotIn('Request data not supplied', text)
def test_no_request(self):
"An exception report can be generated without request"
try:
raise ValueError("Can't find my keys")
except ValueError:
exc_type, exc_value, tb = sys.exc_info()
reporter = ExceptionReporter(None, exc_type, exc_value, tb)
text = reporter.get_traceback_text()
self.assertIn('ValueError', text)
self.assertIn("Can't find my keys", text)
self.assertNotIn('Request Method:', text)
self.assertNotIn('Request URL:', text)
self.assertIn('Exception Type:', text)
self.assertIn('Exception Value:', text)
self.assertIn('Traceback:', text)
self.assertIn('Request data not supplied', text)
def test_no_exception(self):
"An exception report can be generated for just a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, None, None)
reporter.get_traceback_text()
def test_request_and_message(self):
"A message can be provided in addition to a request"
request = self.rf.get('/test_view/')
reporter = ExceptionReporter(request, None, "I'm a little teapot", None)
reporter.get_traceback_text()
def test_message_only(self):
reporter = ExceptionReporter(None, None, "I'm a little teapot", None)
reporter.get_traceback_text()
@override_settings(ALLOWED_HOSTS='example.com')
def test_disallowed_host(self):
"An exception report can be generated even for a disallowed host."
request = self.rf.get('/', HTTP_HOST='evil.com')
reporter = ExceptionReporter(request, None, None, None)
text = reporter.get_traceback_text()
self.assertIn("http://evil.com/", text)
class ExceptionReportTestMixin(object):
# Mixin used in the ExceptionReporterFilterTests and
# AjaxResponseExceptionReporterFilter tests below
breakfast_data = {'sausage-key': 'sausage-value',
'baked-beans-key': 'baked-beans-value',
'hash-brown-key': 'hash-brown-value',
'bacon-key': 'bacon-value'}
def verify_unsafe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# All variables are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertContains(response, k, status_code=500)
self.assertContains(response, v, status_code=500)
def verify_safe_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Non-sensitive variable's name and value are shown.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertContains(response, 'scrambled', status_code=500)
# Sensitive variable's name is shown but not its value.
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown.
self.assertContains(response, 'baked-beans-value', status_code=500)
self.assertContains(response, 'hash-brown-value', status_code=500)
# Sensitive POST parameters' values are not shown.
self.assertNotContains(response, 'sausage-value', status_code=500)
self.assertNotContains(response, 'bacon-value', status_code=500)
def verify_paranoid_response(self, view, check_for_vars=True,
check_for_POST_params=True):
"""
Asserts that no variables or POST parameters are displayed in the response.
"""
request = self.rf.post('/some_url/', self.breakfast_data)
response = view(request)
if check_for_vars:
# Show variable names but not their values.
self.assertContains(response, 'cooked_eggs', status_code=500)
self.assertNotContains(response, 'scrambled', status_code=500)
self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertContains(response, k, status_code=500)
# No POST parameters' values are shown.
self.assertNotContains(response, v, status_code=500)
def verify_unsafe_email(self, view, check_for_POST_params=True):
"""
Asserts that potentially sensitive info are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = force_text(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = force_text(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters are shown.
self.assertIn(k, body_plain)
self.assertIn(v, body_plain)
self.assertIn(k, body_html)
self.assertIn(v, body_html)
def verify_safe_email(self, view, check_for_POST_params=True):
"""
Asserts that certain sensitive info are not displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body_plain = force_text(email.body)
self.assertNotIn('cooked_eggs', body_plain)
self.assertNotIn('scrambled', body_plain)
self.assertNotIn('sauce', body_plain)
self.assertNotIn('worcestershire', body_plain)
# Frames vars are shown in html email reports.
body_html = force_text(email.alternatives[0][0])
self.assertIn('cooked_eggs', body_html)
self.assertIn('scrambled', body_html)
self.assertIn('sauce', body_html)
self.assertNotIn('worcestershire', body_html)
if check_for_POST_params:
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown.
self.assertIn('baked-beans-value', body_plain)
self.assertIn('hash-brown-value', body_plain)
self.assertIn('baked-beans-value', body_html)
self.assertIn('hash-brown-value', body_html)
# Sensitive POST parameters' values are not shown.
self.assertNotIn('sausage-value', body_plain)
self.assertNotIn('bacon-value', body_plain)
self.assertNotIn('sausage-value', body_html)
self.assertNotIn('bacon-value', body_html)
def verify_paranoid_email(self, view):
"""
Asserts that no variables or POST parameters are displayed in the email report.
"""
with self.settings(ADMINS=[('Admin', '[email protected]')]):
mail.outbox = [] # Empty outbox
request = self.rf.post('/some_url/', self.breakfast_data)
view(request)
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
# Frames vars are never shown in plain text email reports.
body = force_text(email.body)
self.assertNotIn('cooked_eggs', body)
self.assertNotIn('scrambled', body)
self.assertNotIn('sauce', body)
self.assertNotIn('worcestershire', body)
for k, v in self.breakfast_data.items():
# All POST parameters' names are shown.
self.assertIn(k, body)
# No POST parameters' values are shown.
self.assertNotIn(v, body)
@override_settings(ROOT_URLCONF='view_tests.urls')
class ExceptionReporterFilterTests(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Ensure that sensitive information can be filtered out of error reports.
Refs #14614.
"""
rf = RequestFactory()
def test_non_sensitive_request(self):
"""
Ensure that everything (request info and frame variables) can bee seen
in the default error reports for non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view)
self.verify_unsafe_email(non_sensitive_view)
def test_sensitive_request(self):
"""
Ensure that sensitive POST parameters and frame variables cannot be
seen in the default error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view)
self.verify_unsafe_email(sensitive_view)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view)
self.verify_safe_email(sensitive_view)
def test_paranoid_request(self):
"""
Ensure that no POST parameters and frame variables can be seen in the
default error reports for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view)
self.verify_unsafe_email(paranoid_view)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view)
self.verify_paranoid_email(paranoid_view)
def test_multivalue_dict_key_error(self):
"""
#21098 -- Ensure that sensitive POST parameters cannot be seen in the
error reports for if request.POST['nonexistent_key'] throws an error.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(multivalue_dict_key_error)
self.verify_unsafe_email(multivalue_dict_key_error)
with self.settings(DEBUG=False):
self.verify_safe_response(multivalue_dict_key_error)
self.verify_safe_email(multivalue_dict_key_error)
def test_custom_exception_reporter_filter(self):
"""
Ensure that it's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view)
self.verify_unsafe_email(custom_exception_reporter_filter_view)
def test_sensitive_method(self):
"""
Ensure that the sensitive_variables decorator works with object
methods.
Refs #18379.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_method_view,
check_for_POST_params=False)
self.verify_unsafe_email(sensitive_method_view,
check_for_POST_params=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_method_view,
check_for_POST_params=False)
self.verify_safe_email(sensitive_method_view,
check_for_POST_params=False)
def test_sensitive_function_arguments(self):
"""
Ensure that sensitive variables don't leak in the sensitive_variables
decorator's frame, when those variables are passed as arguments to the
decorated function.
Refs #19453.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_args_function_caller)
self.verify_unsafe_email(sensitive_args_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_args_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_args_function_caller, check_for_POST_params=False)
def test_sensitive_function_keyword_arguments(self):
"""
Ensure that sensitive variables don't leak in the sensitive_variables
decorator's frame, when those variables are passed as keyword arguments
to the decorated function.
Refs #19453.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_kwargs_function_caller)
self.verify_unsafe_email(sensitive_kwargs_function_caller)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_kwargs_function_caller, check_for_POST_params=False)
self.verify_safe_email(sensitive_kwargs_function_caller, check_for_POST_params=False)
def test_callable_settings(self):
"""
Callable settings should not be evaluated in the debug page (#21345).
"""
def callable_setting():
return "This should not be displayed"
with self.settings(DEBUG=True, FOOBAR=callable_setting):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_callable_settings_forbidding_to_set_attributes(self):
"""
Callable settings which forbid to set attributes should not break
the debug page (#23070).
"""
class CallableSettingWithSlots(object):
__slots__ = []
def __call__(self):
return "This should not be displayed"
with self.settings(DEBUG=True, WITH_SLOTS=CallableSettingWithSlots()):
response = self.client.get('/raises500/')
self.assertNotContains(response, "This should not be displayed", status_code=500)
def test_dict_setting_with_non_str_key(self):
"""
A dict setting containing a non-string key should not break the
debug page (#12744).
"""
with self.settings(DEBUG=True, FOOBAR={42: None}):
response = self.client.get('/raises500/')
self.assertContains(response, 'FOOBAR', status_code=500)
def test_sensitive_settings(self):
"""
The debug page should not show some sensitive settings
(password, secret key, ...).
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
with self.settings(DEBUG=True, **{setting: "should not be displayed"}):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
def test_settings_with_sensitive_keys(self):
"""
The debug page should filter out some sensitive information found in
dict settings.
"""
sensitive_settings = [
'SECRET_KEY',
'PASSWORD',
'API_KEY',
'AUTH_TOKEN',
]
for setting in sensitive_settings:
FOOBAR = {
setting: "should not be displayed",
'recursive': {setting: "should not be displayed"},
}
with self.settings(DEBUG=True, FOOBAR=FOOBAR):
response = self.client.get('/raises500/')
self.assertNotContains(response, 'should not be displayed', status_code=500)
class AjaxResponseExceptionReporterFilter(ExceptionReportTestMixin, LoggingCaptureMixin, SimpleTestCase):
"""
Ensure that sensitive information can be filtered out of error reports.
Here we specifically test the plain text 500 debug-only error page served
when it has been detected the request was sent by JS code. We don't check
for (non)existence of frames vars in the traceback information section of
the response content because we don't include them in these error pages.
Refs #14614.
"""
rf = RequestFactory(HTTP_X_REQUESTED_WITH='XMLHttpRequest')
def test_non_sensitive_request(self):
"""
Ensure that request info can bee seen in the default error reports for
non-sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(non_sensitive_view, check_for_vars=False)
def test_sensitive_request(self):
"""
Ensure that sensitive POST parameters cannot be seen in the default
error reports for sensitive requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(sensitive_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_safe_response(sensitive_view, check_for_vars=False)
def test_paranoid_request(self):
"""
Ensure that no POST parameters can be seen in the default error reports
for "paranoid" requests.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(paranoid_view, check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_paranoid_response(paranoid_view, check_for_vars=False)
def test_custom_exception_reporter_filter(self):
"""
Ensure that it's possible to assign an exception reporter filter to
the request to bypass the one set in DEFAULT_EXCEPTION_REPORTER_FILTER.
"""
with self.settings(DEBUG=True):
self.verify_unsafe_response(custom_exception_reporter_filter_view,
check_for_vars=False)
with self.settings(DEBUG=False):
self.verify_unsafe_response(custom_exception_reporter_filter_view,
check_for_vars=False)
| bsd-3-clause | -3,679,555,455,563,020,000 | -6,681,140,780,323,797,000 | 42.027867 | 117 | 0.619131 | false |
sharad/calibre | src/calibre/ebooks/metadata/sources/big_book_search.py | 8 | 2177 | #!/usr/bin/env python
# vim:fileencoding=UTF-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
from calibre.ebooks.metadata.sources.base import Source, Option
def get_urls(br, tokens):
from urllib import quote_plus
from mechanize import Request
from lxml import html
escaped = [quote_plus(x.encode('utf-8')) for x in tokens if x and x.strip()]
q = b'+'.join(escaped)
url = 'http://bigbooksearch.com/books/'+q
br.open(url).read()
req = Request('http://bigbooksearch.com/query.php?SearchIndex=books&Keywords=%s&ItemPage=1'%q)
req.add_header('X-Requested-With', 'XMLHttpRequest')
req.add_header('Referer', url)
raw = br.open(req).read()
root = html.fromstring(raw.decode('utf-8'))
urls = [i.get('src') for i in root.xpath('//img[@src]')]
return urls
class BigBookSearch(Source):
name = 'Big Book Search'
description = _('Downloads multiple book covers from Amazon. Useful to find alternate covers.')
capabilities = frozenset(['cover'])
config_help_message = _('Configure the Big Book Search plugin')
can_get_multiple_covers = True
options = (Option('max_covers', 'number', 5, _('Maximum number of covers to get'),
_('The maximum number of covers to process from the search result')),
)
supports_gzip_transfer_encoding = True
def download_cover(self, log, result_queue, abort,
title=None, authors=None, identifiers={}, timeout=30, get_best_cover=False):
if not title:
return
br = self.browser
tokens = tuple(self.get_title_tokens(title)) + tuple(self.get_author_tokens(authors))
urls = get_urls(br, tokens)
self.download_multiple_covers(title, authors, urls, get_best_cover, timeout, result_queue, abort, log)
def test():
from calibre import browser
import pprint
br = browser()
urls = get_urls(br, ['consider', 'phlebas', 'banks'])
pprint.pprint(urls)
if __name__ == '__main__':
test()
| gpl-3.0 | -1,086,927,575,637,389,400 | 7,778,114,437,024,421,000 | 36.534483 | 110 | 0.646762 | false |
brianjimenez/lightdock | lightdock/scoring/dfire2/driver.py | 1 | 7814 | """DFIRE2 potential scoring function
Yuedong Yang, Yaoqi Zhou. Ab initio folding of terminal segments with secondary structures
reveals the fine difference between two closely related all-atom statistical energy functions.
Protein Science,17:1212-1219(2008)
"""
import os
import numpy as np
from lightdock.structure.model import DockingModel
from lightdock.scoring.functions import ModelAdapter, ScoringFunction
from lightdock.structure.space import SpacePoints
from lightdock.scoring.dfire2.c.cdfire2 import calculate_dfire2
from lightdock.constants import DEFAULT_CONTACT_RESTRAINTS_CUTOFF
# Potential constants
atom_type_number = 167
bin_number = 30
DFIRE2_ATOM_TYPES = {'GLY CA': 40, 'HIS C': 45, 'VAL O': 137, 'GLY O': 42, 'GLY N': 39, 'HIS O': 46, 'HIS N': 43,
'TRP CE3': 151, 'GLY C': 41, 'TRP CE2': 150, 'LYS NZ': 69, 'MET C': 80, 'VAL N': 134, 'PRO CA': 95,
'MET O': 81, 'MET N': 78, 'SER OG': 126, 'ARG NH2': 120, 'VAL C': 136, 'THR CG2': 133, 'ALA CB': 4,
'ALA CA': 1, 'TRP CG': 146, 'TRP CA': 142, 'TRP CB': 145, 'ALA N': 0, 'ILE CB': 57, 'ILE CA': 54,
'TRP CH2': 154, 'GLU CA': 20, 'GLU CB': 23, 'GLU CD': 25, 'GLU CG': 24, 'HIS CG': 48,
'ASP OD1': 17, 'HIS CA': 44, 'CYS N': 5, 'CYS O': 8, 'HIS CE1': 51, 'TYR CG': 160, 'TYR CA': 156,
'TYR CB': 159, 'CYS C': 7, 'ARG CB': 114, 'LYS C': 63, 'ARG CG': 115, 'ARG CD': 116,
'THR OG1': 132, 'LYS O': 64, 'LYS N': 61, 'SER C': 123, 'ILE CD1': 60, 'PRO CB': 98, 'PRO CD': 100,
'PRO CG': 99, 'ARG CZ': 118, 'SER O': 124, 'SER N': 121, 'PHE CD1': 34, 'PHE CD2': 35,
'THR CA': 128, 'HIS CD2': 50, 'THR CB': 131, 'PRO C': 96, 'PRO N': 94, 'PRO O': 97, 'PHE CA': 29,
'MET CE': 85, 'MET CG': 83, 'MET CA': 79, 'ILE C': 55, 'MET CB': 82, 'TRP CD2': 148,
'TRP CD1': 147, 'GLN CD': 107, 'ILE CG1': 58, 'ILE CG2': 59, 'PHE CE2': 37, 'PHE CE1': 36,
'GLU OE1': 26, 'GLU OE2': 27, 'ASP CG': 16, 'ASP CB': 15, 'ASP CA': 12, 'THR O': 130, 'THR N': 127,
'SER CA': 122, 'SER CB': 125, 'PHE CG': 33, 'GLU O': 22, 'GLU N': 19, 'PHE CB': 32, 'VAL CG1': 139,
'GLU C': 21, 'ILE O': 56, 'ILE N': 53, 'GLN CA': 102, 'GLN CB': 105, 'ASN C': 88, 'VAL CG2': 140,
'TRP CZ2': 152, 'TRP CZ3': 153, 'PHE CZ': 38, 'TRP O': 144, 'TRP N': 141, 'LEU CB': 74,
'GLN N': 101, 'GLN O': 104, 'LEU O': 73, 'GLN C': 103, 'TRP C': 143, 'HIS CB': 47, 'GLN NE2': 109,
'LEU CD2': 77, 'ASP OD2': 18, 'LEU CD1': 76, 'VAL CA': 135, 'ASN OD1': 92, 'ALA O': 3,
'MET SD': 84, 'ALA C': 2, 'THR C': 129, 'TYR CD1': 161, 'ARG NH1': 119, 'TYR CD2': 162,
'ASN ND2': 93, 'TRP NE1': 149, 'HIS ND1': 49, 'LEU C': 72, 'ASN O': 89, 'ASN N': 86, 'ASP C': 13,
'LEU CA': 71, 'ASP O': 14, 'ASP N': 11, 'CYS CB': 9, 'LEU N': 70, 'LEU CG': 75, 'CYS CA': 6,
'TYR OH': 166, 'ASN CA': 87, 'ASN CB': 90, 'ASN CG': 91, 'TYR CE2': 164, 'ARG C': 112,
'TYR CE1': 163, 'HIS NE2': 52, 'ARG O': 113, 'ARG N': 110, 'TYR C': 157, 'GLN CG': 106,
'ARG CA': 111, 'TYR N': 155, 'TYR O': 158, 'CYS SG': 10, 'TYR CZ': 165, 'ARG NE': 117,
'VAL CB': 138, 'LYS CB': 65, 'LYS CA': 62, 'PHE C': 30, 'LYS CG': 66, 'LYS CE': 68, 'LYS CD': 67,
'GLN OE1': 108, 'PHE N': 28, 'PHE O': 31}
class DFIRE2Potential(object):
"""Loads DFIRE2 potentials information"""
def __init__(self):
data_path = os.path.dirname(os.path.realpath(__file__)) + '/data/'
self.energy = np.load(data_path + 'dfire2_energies.npy').ravel()
class DFIRE2Object(object):
def __init__(self, residue_index, atom_index):
self.residue_index = residue_index
self.atom_index = atom_index
class DFIRE2Adapter(ModelAdapter, DFIRE2Potential):
"""Adapts a given Complex to a DockingModel object suitable for this
DFIRE2 scoring function.
"""
def _get_docking_model(self, molecule, restraints):
"""Builds a suitable docking model for this scoring function"""
objects = []
coordinates = []
parsed_restraints = {}
atom_index = 0
for residue in molecule.residues:
for rec_atom in residue.atoms:
rec_atom_type = rec_atom.residue_name + ' ' + rec_atom.name
if rec_atom_type in DFIRE2_ATOM_TYPES:
objects.append(DFIRE2Object(residue.number, DFIRE2_ATOM_TYPES[rec_atom_type]))
coordinates.append([rec_atom.x, rec_atom.y, rec_atom.z])
# Restraints support
res_id = "%s.%s.%s" % (rec_atom.chain_id, residue.name, str(residue.number))
if restraints and res_id in restraints:
try:
parsed_restraints[res_id].append(atom_index)
except:
parsed_restraints[res_id] = [atom_index]
atom_index += 1
try:
return DockingModel(objects, SpacePoints(coordinates), parsed_restraints, n_modes=molecule.n_modes.copy())
except AttributeError:
return DockingModel(objects, SpacePoints(coordinates), parsed_restraints)
class DFIRE2(ScoringFunction):
"""Implements DFIRE2 potential"""
def __init__(self, weight=1.0):
super(DFIRE2, self).__init__(weight)
self.cached = False
self.potential = DFIRE2Potential()
def __call__(self, receptor, receptor_coordinates, ligand, ligand_coordinates):
if not self.cached:
self.res_index = []
self.atom_index = []
for o in receptor.objects:
self.res_index.append(o.residue_index)
self.atom_index.append(o.atom_index)
last = self.res_index[-1]
for o in ligand.objects:
self.res_index.append(o.residue_index + last)
self.atom_index.append(o.atom_index)
self.res_index = np.array(self.res_index, dtype=np.int32)
self.atom_index = np.array(self.atom_index, dtype=np.int32)
self.molecule_length = len(self.res_index)
self.cached = True
return self.evaluate_energy(receptor, receptor_coordinates, ligand, ligand_coordinates)
def evaluate_energy(self, receptor, receptor_coordinates, ligand, ligand_coordinates):
coordinates = np.append(receptor_coordinates.coordinates, ligand_coordinates.coordinates).reshape((-1, 3))
energy, interface_receptor, interface_ligand = calculate_dfire2(self.res_index,
self.atom_index,
coordinates,
self.potential.energy,
self.molecule_length,
DEFAULT_CONTACT_RESTRAINTS_CUTOFF)
# Code to consider contacts in the interface
perc_receptor_restraints = ScoringFunction.restraints_satisfied(receptor.restraints, set(interface_receptor))
perc_ligand_restraints = ScoringFunction.restraints_satisfied(ligand.restraints, set(interface_ligand))
return energy + perc_receptor_restraints * energy + perc_ligand_restraints * energy
# Needed to dynamically load the scoring functions from command line
DefinedScoringFunction = DFIRE2
DefinedModelAdapter = DFIRE2Adapter
| gpl-3.0 | 6,132,850,966,868,575,000 | -3,935,294,673,033,120,000 | 57.75188 | 120 | 0.538905 | false |
MediaBrowser/MediaBrowser.Kodi | default.py | 2 | 1454 | '''
@document : default.py
@package : XBMB3C add-on
@authors : xnappo, null_pointer, im85288
@copyleft : 2013, xnappo
@license : Gnu General Public License - see LICENSE.TXT
@description: XBMB3C XBMC add-on
This file is part of the XBMC XBMB3C Plugin.
XBMB3C Plugin is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
XBMB3C Plugin is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with XBMB3C Plugin. If not, see <http://www.gnu.org/licenses/>.
Thanks to Hippojay for the PleXBMC plugin this is derived from
'''
import xbmcgui
import xbmcaddon
import os
__settings__ = xbmcaddon.Addon(id='plugin.video.xbmb3c')
__cwd__ = __settings__.getAddonInfo('path')
BASE_RESOURCE_PATH = xbmc.translatePath( os.path.join( __cwd__, 'resources', 'lib' ) )
sys.path.append(BASE_RESOURCE_PATH)
import MainModule
try:
MainModule.MainEntryPoint()
except Exception, msg:
xbmcgui.Dialog().ok("Error", str(msg))
raise
| gpl-2.0 | -7,787,512,765,094,220,000 | 2,582,582,950,167,224,300 | 29.957447 | 86 | 0.696011 | false |
moraesnicol/scrapy | scrapy/settings/deprecated.py | 160 | 1383 | import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
DEPRECATED_SETTINGS = [
('TRACK_REFS', 'no longer needed (trackref is always enabled)'),
('RESPONSE_CLASSES', 'no longer supported'),
('DEFAULT_RESPONSE_ENCODING', 'no longer supported'),
('BOT_VERSION', 'no longer used (user agent defaults to Scrapy now)'),
('ENCODING_ALIASES', 'no longer needed (encoding discovery uses w3lib now)'),
('STATS_ENABLED', 'no longer supported (change STATS_CLASS instead)'),
('SQLITE_DB', 'no longer supported'),
('SELECTORS_BACKEND', 'use SCRAPY_SELECTORS_BACKEND environment variable instead'),
('AUTOTHROTTLE_MIN_DOWNLOAD_DELAY', 'use DOWNLOAD_DELAY instead'),
('AUTOTHROTTLE_MAX_CONCURRENCY', 'use CONCURRENT_REQUESTS_PER_DOMAIN instead'),
('AUTOTHROTTLE_MAX_CONCURRENCY', 'use CONCURRENT_REQUESTS_PER_DOMAIN instead'),
('REDIRECT_MAX_METAREFRESH_DELAY', 'use METAREFRESH_MAXDELAY instead'),
]
def check_deprecated_settings(settings):
deprecated = [x for x in DEPRECATED_SETTINGS if settings[x[0]] is not None]
if deprecated:
msg = "You are using the following settings which are deprecated or obsolete"
msg += " (ask [email protected] for alternatives):"
msg = msg + "\n " + "\n ".join("%s: %s" % x for x in deprecated)
warnings.warn(msg, ScrapyDeprecationWarning)
| bsd-3-clause | -6,443,114,631,840,156,000 | 8,083,612,889,058,133,000 | 52.192308 | 87 | 0.698482 | false |
Tesora/tesora-tempest | tempest/tests/test_list_tests.py | 34 | 1824 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
import six
import subprocess
from tempest.tests import base
class TestTestList(base.TestCase):
def test_testr_list_tests_no_errors(self):
# Remove unit test discover path from env to test tempest tests
test_env = os.environ.copy()
test_env.pop('OS_TEST_PATH')
import_failures = []
p = subprocess.Popen(['testr', 'list-tests'], stdout=subprocess.PIPE,
env=test_env)
ids, err = p.communicate()
self.assertEqual(0, p.returncode,
"test discovery failed, one or more files cause an "
"error on import %s" % ids)
ids = six.text_type(ids).split('\n')
for test_id in ids:
if re.match('(\w+\.){3}\w+', test_id):
if not test_id.startswith('tempest.'):
parts = test_id.partition('tempest')
fail_id = parts[1] + parts[2]
import_failures.append(fail_id)
error_message = ("The following tests have import failures and aren't"
" being run with test filters %s" % import_failures)
self.assertFalse(import_failures, error_message)
| apache-2.0 | -5,112,530,380,596,708,000 | -7,324,209,273,513,850,000 | 39.533333 | 78 | 0.614035 | false |
mvaled/sentry | src/sentry/message_filters.py | 1 | 16944 | # TODO RaduW 8.06.2019 remove the sentry.filters package and rename this module to filters
from __future__ import absolute_import
import collections
from collections import namedtuple
import re
from sentry.models.projectoption import ProjectOption
from sentry.utils.data_filters import FilterStatKeys
from rest_framework import serializers
from sentry.api.fields.multiplechoice import MultipleChoiceField
from six.moves.urllib.parse import urlparse
from sentry.utils.safe import get_path
from ua_parser.user_agent_parser import Parse
from sentry.signals import inbound_filter_toggled
EventFilteredRet = namedtuple("EventFilteredRet", "should_filter reason")
def should_filter_event(project_config, data):
"""
Checks if an event should be filtered
:param project_config: relay config for the request (for the project really)
:param data: the event data
:return: an EventFilteredRet explaining if the event should be filtered and, if it should the reason
for filtering
"""
for event_filter in get_all_filters():
if _is_filter_enabled(project_config, event_filter) and event_filter(project_config, data):
return EventFilteredRet(should_filter=True, reason=event_filter.spec.id)
return EventFilteredRet(should_filter=False, reason=None)
def get_all_filters():
"""
Returns a list of the existing event filters
An event filter is a function that receives a project_config and an event data payload and returns a tuple
(should_filter:bool, filter_reason: string | None) representing
:return: list of registered event filters
"""
return (
_localhost_filter,
_browser_extensions_filter,
_legacy_browsers_filter,
_web_crawlers_filter,
)
def set_filter_state(filter_id, project, state):
flt = _filter_from_filter_id(filter_id)
if flt is None:
raise FilterNotRegistered(filter_id)
if flt == _legacy_browsers_filter:
if state is None:
state = {}
option_val = "0"
if "active" in state:
if state["active"]:
option_val = "1"
elif "subfilters" in state and len(state["subfilters"]) > 0:
option_val = set(state["subfilters"])
ProjectOption.objects.set_value(
project=project, key=u"filters:{}".format(filter_id), value=option_val
)
return option_val
else:
# all boolean filters
if state is None:
state = {"active": True}
ProjectOption.objects.set_value(
project=project,
key=u"filters:{}".format(filter_id),
value="1" if state.get("active", False) else "0",
)
if state:
inbound_filter_toggled.send(project=project, sender=flt)
return state.get("active", False)
def get_filter_state(filter_id, project):
"""
Returns the filter state
IMPORTANT: this function accesses the database, it should NEVER be used by the ingestion pipe.
This api is used by the ProjectFilterDetails and ProjectFilters endpoints
:param filter_id: the filter Id
:param project: the project for which we want the filter state
:return: True if the filter is enabled False otherwise
:raises: ValueError if filter id not registered
"""
flt = _filter_from_filter_id(filter_id)
if flt is None:
raise FilterNotRegistered(filter_id)
filter_state = ProjectOption.objects.get_value(
project=project, key=u"filters:{}".format(flt.spec.id)
)
if filter_state is None:
raise ValueError(
"Could not find filter state for filter {0}."
" You need to register default filter state in projectoptions.defaults.".format(
filter_id
)
)
if flt == _legacy_browsers_filter:
# special handling for legacy browser state
if filter_state == "1":
return True
if filter_state == "0":
return False
return filter_state
else:
return filter_state == "1"
class FilterNotRegistered(Exception):
pass
def _filter_from_filter_id(filter_id):
"""
Returns the corresponding filter for a filter id or None if no filter with the given id found
"""
for flt in get_all_filters():
if flt.spec.id == filter_id:
return flt
return None
class _FilterSerializer(serializers.Serializer):
active = serializers.BooleanField()
class _FilterSpec(object):
"""
Data associated with a filter, it defines its name, id, default enable state and how its state is serialized
in the database
"""
def __init__(self, id, name, description, serializer_cls=None):
self.id = id
self.name = name
self.description = description
if serializer_cls is None:
self.serializer_cls = _FilterSerializer
else:
self.serializer_cls = serializer_cls
def _get_filter_settings(project_config, flt):
"""
Gets the filter options from the relay config or the default option if not specified in the relay config
:param project_config: the relay config for the request
:param flt: the filter
:return: the options for the filter
"""
filter_settings = project_config.config.get("filter_settings", {})
return filter_settings.get(get_filter_key(flt), None)
def _is_filter_enabled(project_config, flt):
filter_options = _get_filter_settings(project_config, flt)
if filter_options is None:
raise ValueError("unknown filter", flt.spec.id)
return filter_options["is_enabled"]
def get_filter_key(flt):
return flt.spec.id.replace("-", "_")
# ************* local host filter *************
_LOCAL_IPS = frozenset(["127.0.0.1", "::1"])
_LOCAL_DOMAINS = frozenset(["127.0.0.1", "localhost"])
def _localhost_filter(project_config, data):
ip_address = get_path(data, "user", "ip_address") or ""
url = get_path(data, "request", "url") or ""
domain = urlparse(url).hostname
return ip_address in _LOCAL_IPS or domain in _LOCAL_DOMAINS
_localhost_filter.spec = _FilterSpec(
id=FilterStatKeys.LOCALHOST,
name="Filter out events coming from localhost",
description="This applies to both IPv4 (``127.0.0.1``) and IPv6 (``::1``) addresses.",
)
# ************* browser extensions filter *************
_EXTENSION_EXC_VALUES = re.compile(
"|".join(
(
re.escape(x)
for x in (
# Random plugins/extensions
"top.GLOBALS",
# See: http://blog.errorception.com/2012/03/tale-of-unfindable-js-error.html
"originalCreateNotification",
"canvas.contentDocument",
"MyApp_RemoveAllHighlights",
"http://tt.epicplay.com",
"Can't find variable: ZiteReader",
"jigsaw is not defined",
"ComboSearch is not defined",
"http://loading.retry.widdit.com/",
"atomicFindClose",
# Facebook borked
"fb_xd_fragment",
# ISP "optimizing" proxy - `Cache-Control: no-transform` seems to
# reduce this. (thanks @acdha)
# See http://stackoverflow.com/questions/4113268
"bmi_SafeAddOnload",
"EBCallBackMessageReceived",
# See
# https://groups.google.com/a/chromium.org/forum/#!topic/chromium-discuss/7VU0_VvC7mE
"_gCrWeb",
# See http://toolbar.conduit.com/Debveloper/HtmlAndGadget/Methods/JSInjection.aspx
"conduitPage",
# Google Search app (iOS)
# See: https://github.com/getsentry/raven-js/issues/756
"null is not an object (evaluating 'elt.parentNode')",
# Dragon Web Extension from Nuance Communications
# See: https://forum.sentry.io/t/error-in-raven-js-plugin-setsuspendstate/481/
"plugin.setSuspendState is not a function",
# lastpass
"should_do_lastpass_here",
# google translate
# see https://medium.com/@amir.harel/a-b-target-classname-indexof-is-not-a-function-at-least-not-mine-8e52f7be64ca
"a[b].target.className.indexOf is not a function",
)
)
),
re.I,
)
_EXTENSION_EXC_SOURCES = re.compile(
"|".join(
(
# Facebook flakiness
r"graph\.facebook\.com",
# Facebook blocked
r"connect\.facebook\.net",
# Woopra flakiness
r"eatdifferent\.com\.woopra-ns\.com",
r"static\.woopra\.com\/js\/woopra\.js",
# Chrome extensions
r"^chrome(?:-extension)?:\/\/",
# Cacaoweb
r"127\.0\.0\.1:4001\/isrunning",
# Other
r"webappstoolbarba\.texthelp\.com\/",
r"metrics\.itunes\.apple\.com\.edgesuite\.net\/",
# Kaspersky Protection browser extension
r"kaspersky-labs\.com",
# Google ad server (see http://whois.domaintools.com/2mdn.net)
r"2mdn\.net",
)
),
re.I,
)
def _browser_extensions_filter(project_config, data):
if data.get("platform") != "javascript":
return False
# get exception value
try:
exc_value = data["exception"]["values"][0]["value"]
except (LookupError, TypeError):
exc_value = ""
if exc_value:
if _EXTENSION_EXC_VALUES.search(exc_value):
return True
# get exception source
try:
exc_source = data["exception"]["values"][0]["stacktrace"]["frames"][-1]["abs_path"]
except (LookupError, TypeError):
exc_source = ""
if exc_source:
if _EXTENSION_EXC_SOURCES.search(exc_source):
return True
return False
_browser_extensions_filter.spec = _FilterSpec(
id=FilterStatKeys.BROWSER_EXTENSION,
name="Filter out errors known to be caused by browser extensions",
description="Certain browser extensions will inject inline scripts and are known to cause errors.",
)
# ************* legacy browsers filter *************
MIN_VERSIONS = {
"Chrome": 0,
"IE": 10,
"Firefox": 0,
"Safari": 6,
"Edge": 0,
"Opera": 15,
"Android": 4,
"Opera Mini": 8,
}
def _legacy_browsers_filter(project_config, data):
def get_user_agent(data):
try:
for key, value in get_path(data, "request", "headers", filter=True) or ():
if key.lower() == "user-agent":
return value
except LookupError:
return ""
if data.get("platform") != "javascript":
return False
value = get_user_agent(data)
if not value:
return False
ua = Parse(value)
if not ua:
return False
browser = ua["user_agent"]
if not browser["family"]:
return False
# IE Desktop and IE Mobile use the same engines, therefore we can treat them as one
if browser["family"] == "IE Mobile":
browser["family"] = "IE"
filter_settings = _get_filter_settings(project_config, _legacy_browsers_filter)
# handle old style config
if filter_settings is None:
return _filter_default(browser)
enabled_sub_filters = filter_settings.get("options")
if isinstance(enabled_sub_filters, collections.Sequence):
for sub_filter_name in enabled_sub_filters:
sub_filter = _legacy_browsers_sub_filters.get(sub_filter_name)
if sub_filter is not None and sub_filter(browser):
return True
return False
class _LegacyBrowserFilterSerializer(serializers.Serializer):
active = serializers.BooleanField()
subfilters = MultipleChoiceField(
choices=[
"ie_pre_9",
"ie9",
"ie10",
"opera_pre_15",
"android_pre_4",
"safari_pre_6",
"opera_mini_pre_8",
]
)
_legacy_browsers_filter.spec = _FilterSpec(
id=FilterStatKeys.LEGACY_BROWSER,
name="Filter out known errors from legacy browsers",
description="Older browsers often give less accurate information, and while they may report valid issues, "
"the context to understand them is incorrect or missing.",
serializer_cls=_LegacyBrowserFilterSerializer,
)
def _filter_default(browser):
"""
Legacy filter - new users specify individual filters
"""
try:
minimum_version = MIN_VERSIONS[browser["family"]]
except KeyError:
return False
try:
major_browser_version = int(browser["major"])
except (TypeError, ValueError):
return False
if minimum_version > major_browser_version:
return True
return False
def _filter_opera_pre_15(browser):
if not browser["family"] == "Opera":
return False
try:
major_browser_version = int(browser["major"])
except (TypeError, ValueError):
return False
if major_browser_version < 15:
return True
return False
def _filter_safari_pre_6(browser):
if not browser["family"] == "Safari":
return False
try:
major_browser_version = int(browser["major"])
except (TypeError, ValueError):
return False
if major_browser_version < 6:
return True
return False
def _filter_android_pre_4(browser):
if not browser["family"] == "Android":
return False
try:
major_browser_version = int(browser["major"])
except (TypeError, ValueError):
return False
if major_browser_version < 4:
return True
return False
def _filter_opera_mini_pre_8(browser):
if not browser["family"] == "Opera Mini":
return False
try:
major_browser_version = int(browser["major"])
except (TypeError, ValueError):
return False
if major_browser_version < 8:
return True
return False
def _filter_ie10(browser):
return _filter_ie_internal(browser, lambda major_ver: major_ver == 10)
def _filter_ie9(browser):
return _filter_ie_internal(browser, lambda major_ver: major_ver == 9)
def _filter_ie_pre_9(browser):
return _filter_ie_internal(browser, lambda major_ver: major_ver <= 8)
def _filter_ie_internal(browser, compare_version):
if not browser["family"] == "IE":
return False
try:
major_browser_version = int(browser["major"])
except (TypeError, ValueError):
return False
return compare_version(major_browser_version)
# list all browser specific sub filters that should be called
_legacy_browsers_sub_filters = {
"default": _filter_default,
"opera_pre_15": _filter_opera_pre_15,
"safari_pre_6": _filter_safari_pre_6,
"android_pre_4": _filter_android_pre_4,
"opera_mini_pre_8": _filter_opera_mini_pre_8,
"ie9": _filter_ie9,
"ie10": _filter_ie10,
"ie_pre_9": _filter_ie_pre_9,
}
# ************* web crawler filter *************
# not all of these agents are guaranteed to execute JavaScript, but to avoid
# overhead of identifying which ones do, and which ones will over time we simply
# target all of the major ones
_CRAWLERS = re.compile(
r"|".join(
(
# Google spiders (Adsense and others)
# https://support.google.com/webmasters/answer/1061943?hl=en
r"Mediapartners\-Google",
r"AdsBot\-Google",
r"Googlebot",
r"FeedFetcher\-Google",
# Bing search
r"BingBot",
r"BingPreview",
# Baidu search
r"Baiduspider",
# Yahoo
r"Slurp",
# Sogou
r"Sogou",
# facebook
r"facebook",
# Alexa
r"ia_archiver",
# Generic bot
r"bots?[\/\s\)\;]",
# Generic spider
r"spider[\/\s\)\;]",
# Slack - see https://api.slack.com/robots
r"Slack",
# Google indexing bot
r"Calypso AppCrawler",
# Pingdom
r"pingdom",
# Lytics
r"lyticsbot",
)
),
re.I,
)
def _web_crawlers_filter(project_config, data):
try:
for key, value in get_path(data, "request", "headers", filter=True) or ():
if key.lower() == "user-agent":
if not value:
return False
return bool(_CRAWLERS.search(value))
return False
except LookupError:
return False
_web_crawlers_filter.spec = _FilterSpec(
id=FilterStatKeys.WEB_CRAWLER,
name="Filter out known web crawlers",
description="Some crawlers may execute pages in incompatible ways which then cause errors that"
" are unlikely to be seen by a normal user.",
)
| bsd-3-clause | -2,643,370,731,254,507,500 | -7,615,703,112,004,092,000 | 28.519164 | 130 | 0.602632 | false |
eResearchSA/reporting-storage-hcp | ersa_storage_hcp/__init__.py | 1 | 5549 | #!/usr/bin/python3
"""Application and persistence management."""
# pylint: disable=no-member, import-error, no-init, too-few-public-methods
# pylint: disable=cyclic-import, no-name-in-module, invalid-name
import os
from flask import Flask
from flask.ext import restful
from flask.ext.cors import CORS
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.sql import text
from sqlalchemy.dialects.postgresql import UUID
app = Flask("storage-hcp")
cors = CORS(app)
restapi = restful.Api(app)
app.config["ERSA_STORAGE_HCP_TOKEN"] = os.getenv("ERSA_STORAGE_HCP_TOKEN")
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("ERSA_STORAGE_HCP_DATABASE")
db = SQLAlchemy(app)
def _id_column():
"""Generate a UUID column."""
return db.Column(UUID,
server_default=text("uuid_generate_v4()"),
primary_key=True)
class Allocation(db.Model):
"""Storage Allocation"""
id = _id_column()
allocation = db.Column(db.Integer, unique=True, nullable=False)
tenants = db.relationship("Tenant", backref="allocation")
namespaces = db.relationship("Namespace", backref="allocation")
def json(self):
"""Jsonify"""
return {"id": self.id, "allocation": self.allocation}
class Snapshot(db.Model):
"""Storage Snapshot"""
id = _id_column()
ts = db.Column(db.Integer, nullable=False)
usage = db.relationship("Usage", backref="snapshot")
def json(self):
"""Jsonify"""
return {"id": self.id, "ts": self.ts}
class Tenant(db.Model):
"""HCP Tenant"""
id = _id_column()
name = db.Column(db.String(256), unique=True, nullable=False)
namespaces = db.relationship("Namespace", backref="tenant")
allocation_id = db.Column(None, db.ForeignKey("allocation.id"))
def json(self, namespaces=True):
"""Jsonify"""
result = {"id": self.id, "name": self.name}
if self.allocation:
result["allocation"] = self.allocation.json()
if namespaces:
result["namespaces"] = [namespace.json(tenants=False)
for namespace in self.namespaces]
return result
class Namespace(db.Model):
"""HCP Namespace"""
id = _id_column()
name = db.Column(db.String(256), nullable=False)
usage = db.relationship("Usage", backref="namespace")
tenant_id = db.Column(None,
db.ForeignKey("tenant.id"),
index=True,
nullable=False)
allocation_id = db.Column(None, db.ForeignKey("allocation.id"))
def json(self, tenants=True):
"""Jsonify"""
result = {"id": self.id, "name": self.name}
if self.allocation:
result["allocation"] = self.allocation.json()
if tenants:
result["tenant"] = self.tenant.json(namespaces=False)
return result
class Usage(db.Model):
"""HCP Usage"""
id = _id_column()
start_time = db.Column(db.Integer, index=True, nullable=False)
end_time = db.Column(db.Integer, index=True, nullable=False)
ingested_bytes = db.Column(db.BigInteger, nullable=False)
raw_bytes = db.Column(db.BigInteger, nullable=False)
reads = db.Column(db.BigInteger, nullable=False)
writes = db.Column(db.BigInteger, nullable=False)
deletes = db.Column(db.BigInteger, nullable=False)
objects = db.Column(db.BigInteger, nullable=False)
bytes_in = db.Column(db.BigInteger, nullable=False)
bytes_out = db.Column(db.BigInteger, nullable=False)
metadata_only_objects = db.Column(db.BigInteger, nullable=False)
metadata_only_bytes = db.Column(db.BigInteger, nullable=False)
tiered_objects = db.Column(db.BigInteger, nullable=False)
tiered_bytes = db.Column(db.BigInteger, nullable=False)
snapshot_id = db.Column(None,
db.ForeignKey("snapshot.id"),
index=True,
nullable=False)
namespace_id = db.Column(None,
db.ForeignKey("namespace.id"),
index=True,
nullable=False)
def json(self):
"""Jsonify"""
return {
"start_time": self.start_time,
"end_time": self.end_time,
"ingested_bytes": self.ingested_bytes,
"raw_bytes": self.raw_bytes,
"reads": self.reads,
"writes": self.writes,
"deletes": self.deletes,
"objects": self.objects,
"bytes_in": self.bytes_in,
"bytes_out": self.bytes_out,
"metadata_only_objects": self.metadata_only_objects,
"metadata_only_bytes": self.metadata_only_bytes,
"tiered_objects": self.tiered_objects,
"tiered_bytes": self.tiered_bytes,
"snapshot": self.snapshot.json(),
"namespace": {
"id": self.namespace.id,
"name": self.namespace.name
}
}
def run():
"""Let's roll."""
db.engine.execute("create extension if not exists \"uuid-ossp\";")
db.create_all()
from ersa_storage_hcp import api
restapi.add_resource(api.PingResource, "/ping")
restapi.add_resource(api.AllocationResource, "/allocation")
restapi.add_resource(api.StorageResource, "/storage")
restapi.add_resource(api.SnapshotResource, "/snapshot")
restapi.add_resource(api.UsageResource, "/usage")
app.run(host="127.0.0.1", port=int(os.getenv("ERSA_STORAGE_HCP_PORT")))
| apache-2.0 | 3,103,691,278,539,025,000 | -4,416,800,244,741,473,000 | 31.83432 | 78 | 0.605515 | false |
Elico-Corp/odoo_OCB | addons/l10n_multilang/l10n_multilang.py | 10 | 5981 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import api, models, SUPERUSER_ID
import logging
_logger = logging.getLogger(__name__)
class AccountChartTemplate(models.Model):
_inherit = 'account.chart.template'
@api.multi
def process_translations(self, langs, in_field, in_ids, out_ids):
"""
This method copies translations values of templates into new Accounts/Taxes/Journals for languages selected
:param langs: List of languages to load for new records
:param in_field: Name of the translatable field of source templates
:param in_ids: Recordset of ids of source object
:param out_ids: Recordset of ids of destination object
:return: True
"""
xlat_obj = self.env['ir.translation']
#find the source from Account Template
for lang in langs:
#find the value from Translation
value = xlat_obj._get_ids(in_ids._name + ',' + in_field, 'model', lang, in_ids.ids)
counter = 0
for element in in_ids.with_context(lang=None):
if value[element.id]:
#copy Translation from Source to Destination object
xlat_obj.create({
'name': out_ids._name + ',' + in_field,
'type': 'model',
'res_id': out_ids[counter].id,
'lang': lang,
'src': element.name,
'value': value[element.id],
})
else:
_logger.info('Language: %s. Translation from template: there is no translation available for %s!' %(lang, element.name))
counter += 1
return True
@api.multi
def process_coa_translations(self):
installed_lang_ids = self.env['res.lang'].search([])
installed_langs = [x.code for x in installed_lang_ids]
company_obj = self.env['res.company']
for chart_template_id in self:
langs = []
if chart_template_id.spoken_languages:
for lang in chart_template_id.spoken_languages.split(';'):
if lang not in installed_langs:
# the language is not installed, so we don't need to load its translations
continue
else:
langs.append(lang)
if langs:
company_ids = company_obj.search([('chart_template_id', '=', chart_template_id.id)])
for company in company_ids:
# write account.account translations in the real COA
chart_template_id._process_accounts_translations(company.id, langs, 'name')
# copy account.tax name translations
chart_template_id._process_taxes_translations(company.id, langs, 'name')
# copy account.tax description translations
chart_template_id._process_taxes_translations(company.id, langs, 'description')
# copy account.fiscal.position translations
chart_template_id._process_fiscal_pos_translations(company.id, langs, 'name')
return True
@api.multi
def _process_accounts_translations(self, company_id, langs, field):
in_ids = self.env['account.account.template'].search([('chart_template_id', '=', self.id)], order='id')
out_ids = self.env['account.account'].search([('company_id', '=', company_id)], order='id')
return self.process_translations(langs, field, in_ids, out_ids)
@api.multi
def _process_taxes_translations(self, company_id, langs, field):
in_ids = self.env['account.tax.template'].search([('chart_template_id', '=', self.id)], order='id')
out_ids = self.env['account.tax'].search([('company_id', '=', company_id)], order='id')
return self.process_translations(langs, field, in_ids, out_ids)
@api.multi
def _process_fiscal_pos_translations(self, company_id, langs, field):
in_ids = self.env['account.fiscal.position.template'].search([('chart_template_id', '=', self.id)], order='id')
out_ids = self.env['account.fiscal.position'].search([('company_id', '=', company_id)], order='id')
return self.process_translations(langs, field, in_ids, out_ids)
class base_language_install(models.TransientModel):
""" Install Language"""
_inherit = "base.language.install"
@api.multi
def lang_install(self):
self.ensure_one()
already_installed = self.env['res.lang'].search_count([('code', '=', self.lang)])
res = super(base_language_install, self).lang_install()
if already_installed:
# update of translations instead of new installation
# skip to avoid duplicating the translations
return res
# CoA in multilang mode
for coa in self.env['account.chart.template'].search([('spoken_languages', '!=', False)]):
if self.lang in coa.spoken_languages.split(';'):
# companies on which it is installed
for company in self.env['res.company'].search([('chart_template_id', '=', coa.id)]):
# write account.account translations in the real COA
coa._process_accounts_translations(company.id, [self.lang], 'name')
# copy account.tax name translations
coa._process_taxes_translations(company.id, [self.lang], 'name')
# copy account.tax description translations
coa._process_taxes_translations(company.id, [self.lang], 'description')
# copy account.fiscal.position translations
coa._process_fiscal_pos_translations(company.id, [self.lang], 'name')
return res
| agpl-3.0 | 4,898,024,317,555,354,000 | 8,864,289,179,700,901,000 | 49.260504 | 141 | 0.579167 | false |
analurandis/Tur | backend/venv/Lib/site-packages/Cheetah/Tools/CGITemplate.py | 15 | 2200 | # $Id: CGITemplate.py,v 1.6 2006/01/29 02:09:59 tavis_rudd Exp $
"""A subclass of Cheetah.Template for use in CGI scripts.
Usage in a template:
#extends Cheetah.Tools.CGITemplate
#implements respond
$cgiHeaders#slurp
Usage in a template inheriting a Python class:
1. The template
#extends MyPythonClass
#implements respond
$cgiHeaders#slurp
2. The Python class
from Cheetah.Tools import CGITemplate
class MyPythonClass(CGITemplate):
def cgiHeadersHook(self):
return "Content-Type: text/html; charset=koi8-r\n\n"
To read GET/POST variables, use the .webInput method defined in
Cheetah.Utils.WebInputMixin (available in all templates without importing
anything), use Python's 'cgi' module, or make your own arrangements.
This class inherits from Cheetah.Template to make it usable in Cheetah's
single-inheritance model.
Meta-Data
================================================================================
Author: Mike Orr <[email protected]>
License: This software is released for unlimited distribution under the
terms of the MIT license. See the LICENSE file.
Version: $Revision: 1.6 $
Start Date: 2001/10/03
Last Revision Date: $Date: 2006/01/29 02:09:59 $
"""
__author__ = "Mike Orr <[email protected]>"
__revision__ = "$Revision: 1.6 $"[11:-2]
import os
from Cheetah.Template import Template
class CGITemplate(Template):
"""Methods useful in CGI scripts.
Any class that inherits this mixin must also inherit Cheetah.Servlet.
"""
def cgiHeaders(self):
"""Outputs the CGI headers if this is a CGI script.
Usage: $cgiHeaders#slurp
Override .cgiHeadersHook() if you want to customize the headers.
"""
if self.isCgi():
return self.cgiHeadersHook()
def cgiHeadersHook(self):
"""Override if you want to customize the CGI headers.
"""
return "Content-type: text/html\n\n"
def isCgi(self):
"""Is this a CGI script?
"""
env = 'REQUEST_METHOD' in os.environ
wk = self._CHEETAH__isControlledByWebKit
return env and not wk
# vim: shiftwidth=4 tabstop=4 expandtab
| mit | -1,179,801,104,857,992,400 | 158,282,149,067,095,140 | 27.571429 | 80 | 0.651818 | false |
TraurigeNarr/ThirdParties | assimp-3.2/test/regression/gen_db.py | 16 | 8088 | #!/usr/bin/env python3
# -*- Coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# Open Asset Import Library (ASSIMP)
# ---------------------------------------------------------------------------
#
# Copyright (c) 2006-2010, ASSIMP Development Team
#
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# * Neither the name of the ASSIMP team, nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior
# written permission of the ASSIMP Development Team.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ---------------------------------------------------------------------------
"""
Generate the regression database db.zip from the files in the <root>/test/models
directory. Older databases are overwritten with no prompt but can be restored
using Git as needed.
Use --help for usage.
On Windows, use ``py run.py <arguments>`` to make sure command line parameters
are forwarded to the script.
"""
import sys
import os
import subprocess
import zipfile
import settings
import utils
usage = """gen_db [assimp_binary] [-i=...] [-e=...] [-p] [-n]
The assimp_cmd (or assimp) binary to use is specified by the first
command line argument and defaults to ``assimp``.
To build, set ``ASSIMP_BUILD_ASSIMP_TOOLS=ON`` in CMake. If generating
configs for an IDE, make sure to build the assimp_cmd project.
-i,--include: List of file extensions to update dumps for. If omitted,
all file extensions are updated except those in `exclude`.
Example: -ixyz,abc
-i.xyz,.abc
--include=xyz,abc
-e,--exclude: Merged with settings.exclude_extensions to produce a
list of all file extensions to ignore. If dumps exist,
they are not altered. If not, theu are not created.
-p,--preview: Preview list of file extensions touched by the update.
Dont' change anything.
-n,--nozip: Don't pack to ZIP archive. Keep all dumps in individual files.
"""
# -------------------------------------------------------------------------------
def process_dir(d, outfile, file_filter):
""" Generate small dump records for all files in 'd' """
print("Processing directory " + d)
num = 0
for f in os.listdir(d):
fullp = os.path.join(d, f)
if os.path.isdir(fullp) and not f == ".svn":
num += process_dir(fullp, outfile, file_filter)
continue
if file_filter(f):
for pp in settings.pp_configs_to_test:
num += 1
print("DUMP " + fullp + "\n post-processing: " + pp)
outf = os.path.join(os.getcwd(), settings.database_name,
utils.hashing(fullp, pp))
cmd = [ assimp_bin_path, "dump", fullp, outf, "-b", "-s", "-l" ] + pp.split()
outfile.write("assimp dump "+"-"*80+"\n")
outfile.flush()
if subprocess.call(cmd, stdout=outfile, stderr=outfile, shell=False):
print("Failure processing " + fullp)
# spit out an empty file to indicate that this failure is expected
with open(outf,'wb') as f:
pass
return num
# -------------------------------------------------------------------------------
def make_zip():
"""Zip the contents of ./<settings.database_name>
to <settings.database_name>.zip using DEFLATE
compression to minimize the file size. """
num = 0
zipout = zipfile.ZipFile(settings.database_name + ".zip", "w", zipfile.ZIP_DEFLATED)
for f in os.listdir(settings.database_name):
p = os.path.join(settings.database_name, f)
zipout.write(p, f)
if settings.remove_old:
os.remove(p)
num += 1
if settings.remove_old:
os.rmdir(settings.database_name)
bad = zipout.testzip()
assert bad is None
print("="*60)
print("Database contains {0} entries".format(num))
# -------------------------------------------------------------------------------
def extract_zip():
"""Unzip <settings.database_name>.zip to
./<settings.database_name>"""
try:
zipout = zipfile.ZipFile(settings.database_name + ".zip", "r", 0)
zipout.extractall(path=settings.database_name)
except (RuntimeError,IOError) as r:
print(r)
print("failed to extract previous ZIP contents. "\
"DB is generated from scratch.")
# -------------------------------------------------------------------------------
def gen_db(ext_list,outfile):
"""Generate the crash dump database in
./<settings.database_name>"""
try:
os.mkdir(settings.database_name)
except OSError:
pass
num = 0
for tp in settings.model_directories:
num += process_dir(tp, outfile,
lambda x: os.path.splitext(x)[1].lower() in ext_list and not x in settings.files_to_ignore)
print("="*60)
print("Updated {0} entries".format(num))
# -------------------------------------------------------------------------------
if __name__ == "__main__":
def clean(f):
f = f.strip("* \'")
return "."+f if f[:1] != '.' else f
if len(sys.argv) <= 1 or sys.argv[1] == "--help" or sys.argv[1] == "-h":
print(usage)
sys.exit(0)
assimp_bin_path = sys.argv[1]
ext_list, preview, nozip = None, False, False
for m in sys.argv[2:]:
if m[:10]=="--exclude=":
settings.exclude_extensions += map(clean, m[10:].split(","))
elif m[:2]=="-e":
settings.exclude_extensions += map(clean, m[2:].split(","))
elif m[:10]=="--include=":
ext_list = m[10:].split(",")
elif m[:2]=="-i":
ext_list = m[2:].split(",")
elif m=="-p" or m == "--preview":
preview = True
elif m=="-n" or m == "--nozip":
nozip = True
else:
print("Unrecognized parameter: " + m)
sys.exit(-1)
outfile = open(os.path.join("..", "results", "gen_regression_db_output.txt"), "w")
if ext_list is None:
(ext_list, err) = subprocess.Popen([assimp_bin_path, "listext"],
stdout=subprocess.PIPE).communicate()
ext_list = str(ext_list.strip()).lower().split(";")
# todo: Fix for multi dot extensions like .skeleton.xml
ext_list = list(filter(lambda f: not f in settings.exclude_extensions,
map(clean, ext_list)))
print('File extensions processed: ' + ', '.join(ext_list))
if preview:
sys.exit(1)
extract_zip()
gen_db(ext_list,outfile)
make_zip()
print("="*60)
input("Press any key to continue")
sys.exit(0)
# vim: ai ts=4 sts=4 et sw=4
| gpl-2.0 | 1,179,068,034,535,424,000 | -3,103,186,027,322,189,300 | 34.946667 | 103 | 0.569486 | false |
varunarya10/oslo.serialization | oslo_serialization/jsonutils.py | 1 | 8936 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
#. A handy function for getting an object down to something that can be
JSON serialized. See :func:`.to_primitive`.
#. Wrappers around :func:`.loads` and :func:`.dumps`. The :func:`.dumps`
wrapper will automatically use :func:`.to_primitive` for you if needed.
#. This sets up ``anyjson`` to use the :func:`.loads` and :func:`.dumps`
wrappers if ``anyjson`` is available.
'''
import codecs
import datetime
import functools
import inspect
import itertools
import sys
import uuid
is_simplejson = False
if sys.version_info < (2, 7):
# On Python <= 2.6, json module is not C boosted, so try to use
# simplejson module if available
try:
import simplejson as json
# NOTE(mriedem): Make sure we have a new enough version of simplejson
# to support the namedobject_as_tuple argument. This can be removed
# in the Kilo release when python 2.6 support is dropped.
if 'namedtuple_as_object' in inspect.getargspec(json.dumps).args:
is_simplejson = True
else:
import json
except ImportError:
import json
else:
import json
from oslo_utils import encodeutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
import six.moves.xmlrpc_client as xmlrpclib
netaddr = importutils.try_import("netaddr")
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (six.string_types + six.integer_types
+ (type(None), bool, float))
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, ``convert_instances=True`` is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
if isinstance(value, uuid.UUID):
return six.text_type(value)
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in six.iteritems(value))
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return list(map(recursive, value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif netaddr and isinstance(value, netaddr.IPAddress):
return six.text_type(value)
elif any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
JSONEncoder = json.JSONEncoder
JSONDecoder = json.JSONDecoder
def dumps(obj, default=to_primitive, **kwargs):
"""Serialize ``obj`` to a JSON formatted ``str``.
:param obj: object to be serialized
:param default: function that returns a serializable version of an object
:param kwargs: extra named parameters, please see documentation \
of `json.dumps <https://docs.python.org/2/library/json.html#basic-usage>`_
:returns: json formatted string
"""
if is_simplejson:
kwargs['namedtuple_as_object'] = False
return json.dumps(obj, default=default, **kwargs)
def dump(obj, fp, *args, **kwargs):
"""Serialize ``obj`` as a JSON formatted stream to ``fp``
:param obj: object to be serialized
:param fp: a ``.write()``-supporting file-like object
:param default: function that returns a serializable version of an object
:param args: extra arguments, please see documentation \
of `json.dump <https://docs.python.org/2/library/json.html#basic-usage>`_
:param kwargs: extra named parameters, please see documentation \
of `json.dump <https://docs.python.org/2/library/json.html#basic-usage>`_
"""
default = kwargs.get('default', to_primitive)
if is_simplejson:
kwargs['namedtuple_as_object'] = False
return json.dump(obj, fp, default=default, *args, **kwargs)
def loads(s, encoding='utf-8', **kwargs):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
:param s: string to deserialize
:param encoding: encoding used to interpret the string
:param kwargs: extra named parameters, please see documentation \
of `json.loads <https://docs.python.org/2/library/json.html#basic-usage>`_
:returns: python object
"""
return json.loads(encodeutils.safe_decode(s, encoding), **kwargs)
def load(fp, encoding='utf-8', **kwargs):
"""Deserialize ``fp`` to a Python object.
:param fp: a ``.read()`` -supporting file-like object
:param encoding: encoding used to interpret the string
:param kwargs: extra named parameters, please see documentation \
of `json.loads <https://docs.python.org/2/library/json.html#basic-usage>`_
:returns: python object
"""
return json.load(codecs.getreader(encoding)(fp), **kwargs)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)
| apache-2.0 | 2,524,845,387,671,525,400 | 3,652,109,918,999,738,000 | 37.025532 | 79 | 0.652865 | false |
aavanian/bokeh | bokeh/sampledata/tests/test_world_cities.py | 2 | 1963 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
import pandas as pd
# Bokeh imports
from bokeh.util.testing import verify_all
# Module under test
#import bokeh.sampledata.world_cities as bsw
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
ALL = (
'data',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
Test___all__ = pytest.mark.sampledata(verify_all("bokeh.sampledata.world_cities", ALL))
@pytest.mark.sampledata
def test_data():
import bokeh.sampledata.world_cities as bsw
assert isinstance(bsw.data, pd.DataFrame)
# don't check detail for external data
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
| bsd-3-clause | 7,986,296,991,859,412,000 | 282,364,090,075,556,380 | 32.844828 | 87 | 0.320428 | false |
strk/QGIS | tests/src/python/test_processing_alg_decorator.py | 23 | 5963 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for the @alg processing algorithm.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nathan Woodrow'
__date__ = '10.12.2018'
__copyright__ = 'Copyright 2018, The QGIS Project'
import sys
import os
import qgis # NOQA
from qgis.testing import unittest, start_app
from qgis.processing import alg
from qgis.core import QgsSettings
from qgis.PyQt.QtCore import QCoreApplication
start_app()
ARGNAME = "TEST_ALG{0}"
HELPSTRING = "TEST_HELP STRING{0}"
def define_new_no_inputs(newid=1):
@alg(name="noinputs", label=alg.tr("Test func"), group="unittest",
group_label=alg.tr("Test label"))
@alg.output(type=str, name="DISTANCE_OUT", label="Distance out")
def testalg(instance, parameters, context, feedback, inputs):
"""
Test doc string text
"""
def define_new_no_outputs_but_sink_instead(newid=1):
@alg(name=ARGNAME.format(newid), label=alg.tr("Test func"), group="unittest",
group_label=alg.tr("Test label"))
@alg.help(HELPSTRING.format(newid))
@alg.input(type=alg.SOURCE, name="INPUT", label="Input layer")
@alg.input(type=alg.DISTANCE, name="DISTANCE", label="Distance", default=30)
@alg.input(type=alg.SINK, name="SINK", label="Output layer")
def testalg(instance, parameters, context, feedback, inputs):
"""
Given a distance will split a line layer into segments of the distance
"""
def define_new_doc_string(newid=1):
@alg(name=ARGNAME.format(newid), label=alg.tr("Test func"), group="unittest",
group_label=alg.tr("Test label"))
@alg.input(type=alg.SOURCE, name="INPUT", label="Input layer")
@alg.output(type=str, name="DISTANCE_OUT", label="Distance out")
def testalg(instance, parameters, context, feedback, inputs):
"""
Test doc string text
"""
def define_new(newid=1):
@alg(name=ARGNAME.format(newid), label=alg.tr("Test func"), group="unittest",
group_label=alg.tr("Test label"))
@alg.help(HELPSTRING.format(newid))
@alg.input(type=alg.SOURCE, name="INPUT", label="Input layer")
@alg.input(type=alg.DISTANCE, name="DISTANCE", label="Distance", default=30)
@alg.input(type=alg.SINK, name="SINK", label="Output layer")
@alg.output(type=str, name="DISTANCE_OUT", label="Distance out")
def testalg(instance, parameters, context, feedback, inputs):
"""
Given a distance will split a line layer into segments of the distance
"""
def cleanup():
alg.instances.clear()
class AlgNoInputs(unittest.TestCase):
def setUp(self):
cleanup()
def test_can_have_no_inputs(self):
define_new_no_inputs()
class AlgNoOutputsButSinkInstead(unittest.TestCase):
def setUp(self):
cleanup()
def test_can_have_no_outputs_if_there_is_destination(self):
define_new_no_outputs_but_sink_instead()
class AlgInstanceTests(unittest.TestCase):
"""
Tests to check the createInstance method will work as expected.
"""
def setUp(self):
cleanup()
define_new()
self.current = alg.instances.pop().createInstance()
def test_correct_number_of_inputs_and_outputs(self):
self.assertEqual(3, len(self.current.inputs))
self.assertEqual(1, len(self.current.outputs))
def test_correct_number_of_inputs_and_outputs_after_init(self):
self.current.initAlgorithm()
defs = self.current.parameterDefinitions()
self.assertEqual(3, len(defs))
inputs = [
("INPUT", "Input layer"),
("DISTANCE", "Distance"),
("SINK", "Output layer"),
]
for count, data in enumerate(inputs):
parmdef = defs[count]
self.assertEqual(data[0], parmdef.name())
self.assertEqual(data[1], parmdef.description())
def test_func_is_set(self):
self.assertIsNotNone(self.current._func)
def test_has_help_from_help_decorator(self):
self.assertEqual(HELPSTRING.format(1), self.current.shortHelpString())
def test_name_and_label(self):
self.assertEqual(ARGNAME.format(1), self.current.name())
self.assertEqual("Test func", self.current.displayName())
def test_group(self):
self.assertEqual("Test label", self.current.group())
self.assertEqual("unittest", self.current.groupId())
class AlgHelpTests(unittest.TestCase):
def test_has_help_from_help_decorator(self):
cleanup()
define_new()
current = alg.instances.pop()
self.assertEqual(HELPSTRING.format(1), current.shortHelpString())
def test_has_help_from_docstring(self):
define_new_doc_string()
current = alg.instances.pop()
self.assertEqual("Test doc string text", current.shortHelpString())
class TestAlg(unittest.TestCase):
def setUp(self):
cleanup()
define_new()
def test_correct_number_of_inputs_and_outputs(self):
current = alg.instances.pop()
self.assertEqual(3, len(current.inputs))
self.assertEqual(1, len(current.outputs))
self.assertTrue(current.has_inputs)
self.assertTrue(current.has_outputs)
def test_correct_number_defined_in_stack_before_and_after(self):
self.assertEqual(1, len(alg.instances))
alg.instances.pop()
self.assertEqual(0, len(alg.instances))
def test_current_has_correct_name(self):
alg.instances.pop()
for i in range(3):
define_new(i)
self.assertEqual(3, len(alg.instances))
for i in range(3, 1):
current = alg.instances.pop()
self.assertEqual(ARGNAME.format(i), current.name())
if __name__ == "__main__":
unittest.main()
| gpl-2.0 | 5,207,415,009,265,192,000 | 4,587,174,987,908,323,300 | 31.232432 | 81 | 0.650176 | false |
LockScreen/Backend | venv/lib/python2.7/site-packages/boxsdk/object/item.py | 6 | 9391 | # coding: utf-8
from __future__ import unicode_literals
import json
from .base_object import BaseObject
from boxsdk.config import API
from boxsdk.exception import BoxAPIException
class Item(BaseObject):
"""Box API endpoint for interacting with files and folders."""
def _get_accelerator_upload_url(self, file_id=None):
"""
Make an API call to get the Accelerator upload url for either upload a new file or updating an existing file.
:param file_id:
Box id of the file to be uploaded. Not required for new file uploads.
:type file_id:
`unicode` or None
:return:
The Accelerator upload url or None if cannot get the Accelerator upload url.
:rtype:
`unicode` or None
"""
endpoint = '{0}/content'.format(file_id) if file_id else 'content'
url = '{0}/files/{1}'.format(API.BASE_API_URL, endpoint)
try:
response_json = self._session.options(
url=url,
expect_json_response=True,
).json()
return response_json.get('upload_url', None)
except BoxAPIException:
return None
def _preflight_check(self, size, name=None, file_id=None, parent_id=None):
"""
Make an API call to check if certain file can be uploaded to Box or not.
(https://developers.box.com/docs/#files-preflight-check)
:param size:
The size of the file to be uploaded in bytes. Specify 0 for unknown file sizes.
:type size:
`int`
:param name:
The name of the file to be uploaded. This is optional if `file_id` is specified,
but required for new file uploads.
:type name:
`unicode`
:param file_id:
Box id of the file to be uploaded. Not required for new file uploads.
:type file_id:
`unicode`
:param parent_id:
The ID of the parent folder. Required only for new file uploads.
:type parent_id:
`unicode`
:raises:
:class:`BoxAPIException` when preflight check fails.
"""
endpoint = '{0}/content'.format(file_id) if file_id else 'content'
url = '{0}/files/{1}'.format(API.BASE_API_URL, endpoint)
data = {'size': size}
if name:
data['name'] = name
if parent_id:
data['parent'] = {'id': parent_id}
self._session.options(
url=url,
expect_json_response=False,
data=json.dumps(data),
)
def update_info(self, data, etag=None):
"""Baseclass override.
:param etag:
If specified, instruct the Box API to perform the update only if
the current version's etag matches.
:type etag:
`unicode` or None
:return:
The updated object.
Return a new object of the same type, without modifying the original object passed as self.
Construct the new object with all the default attributes that are returned from the endpoint.
:rtype:
:class:`BaseObject`
"""
# pylint:disable=arguments-differ
headers = {'If-Match': etag} if etag is not None else None
return super(Item, self).update_info(data, headers=headers)
def rename(self, name):
"""
Rename the item to a new name.
:param name:
The new name, you want the item to be renamed to.
:type name:
`unicode`
"""
data = {
'name': name,
}
return self.update_info(data)
def get(self, fields=None, etag=None):
"""Base class override.
:param etag:
If specified, instruct the Box API to get the info only if the current version's etag doesn't match.
:type etag:
`unicode` or None
:returns:
Information about the file or folder.
:rtype:
`dict`
:raises: :class:`BoxAPIException` if the specified etag matches the latest version of the item.
"""
# pylint:disable=arguments-differ
headers = {'If-None-Match': etag} if etag is not None else None
return super(Item, self).get(fields=fields, headers=headers)
def copy(self, parent_folder):
"""Copy the item to the given folder.
:param parent_folder:
The folder to which the item should be copied.
:type parent_folder:
:class:`Folder`
"""
url = self.get_url('copy')
data = {
'parent': {'id': parent_folder.object_id}
}
box_response = self._session.post(url, data=json.dumps(data))
response = box_response.json()
return self.__class__(
session=self._session,
object_id=response['id'],
response_object=response,
)
def move(self, parent_folder):
"""
Move the item to the given folder.
:param parent_folder:
The parent `Folder` object, where the item will be moved to.
:type parent_folder:
`Folder`
"""
data = {
'parent': {'id': parent_folder.object_id}
}
return self.update_info(data)
def get_shared_link(self, access=None, etag=None, unshared_at=None, allow_download=None, allow_preview=None, password=None):
"""Get a shared link for the item with the given access permissions.
:param access:
Determines who can access the shared link. May be open, company, or collaborators. If no access is
specified, the default access will be used.
:type access:
`unicode` or None
:param etag:
If specified, instruct the Box API to create the link only if the current version's etag matches.
:type etag:
`unicode` or None
:param unshared_at:
The date on which this link should be disabled. May only be set if the current user is not a free user
and has permission to set expiration dates.
:type unshared_at:
:class:`datetime.date` or None
:param allow_download:
Whether or not the item being shared can be downloaded when accessed via the shared link.
If this parameter is None, the default setting will be used.
:type allow_download:
`bool` or None
:param allow_preview:
Whether or not the item being shared can be previewed when accessed via the shared link.
If this parameter is None, the default setting will be used.
:type allow_preview:
`bool` or None
:param password:
The password required to view this link. If no password is specified then no password will be set.
Please notice that this is a premium feature, which might not be available to your app.
:type password:
`unicode` or None
:returns:
The URL of the shared link.
:rtype:
`unicode`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
data = {
'shared_link': {} if not access else {
'access': access
}
}
if unshared_at is not None:
data['shared_link']['unshared_at'] = unshared_at.isoformat()
if allow_download is not None or allow_preview is not None:
data['shared_link']['permissions'] = permissions = {}
if allow_download is not None:
permissions['can_download'] = allow_download
if allow_preview is not None:
permissions['can_preview'] = allow_preview
if password is not None:
data['shared_link']['password'] = password
item = self.update_info(data, etag=etag)
return item.shared_link['url']
def remove_shared_link(self, etag=None):
"""Delete the shared link for the item.
:param etag:
If specified, instruct the Box API to delete the link only if the current version's etag matches.
:type etag:
`unicode` or None
:returns:
Whether or not the update was successful.
:rtype:
`bool`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
data = {'shared_link': None}
item = self.update_info(data, etag=etag)
return item.shared_link is None
def delete(self, params=None, etag=None):
"""Delete the item.
:param params:
Additional parameters to send with the request.
:type params:
`dict`
:param etag:
If specified, instruct the Box API to delete the item only if the current version's etag matches.
:type etag:
`unicode` or None
:returns:
Whether or not the delete was successful.
:rtype:
`bool`
:raises: :class:`BoxAPIException` if the specified etag doesn't match the latest version of the item.
"""
headers = {'If-Match': etag} if etag is not None else None
return super(Item, self).delete(params, headers)
| mit | -7,510,206,384,788,581,000 | 8,376,173,094,621,021,000 | 35.540856 | 128 | 0.578 | false |
schrd/django-crispy-forms | crispy_forms/helper.py | 14 | 14012 | # -*- coding: utf-8 -*-
import re
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.safestring import mark_safe
from crispy_forms.compatibility import string_types
from crispy_forms.layout import Layout
from crispy_forms.layout_slice import LayoutSlice
from crispy_forms.utils import render_field, flatatt, TEMPLATE_PACK
from crispy_forms.exceptions import FormHelpersException
class DynamicLayoutHandler(object):
def _check_layout(self):
if self.layout is None:
raise FormHelpersException("You need to set a layout in your FormHelper")
def _check_layout_and_form(self):
self._check_layout()
if self.form is None:
raise FormHelpersException("You need to pass a form instance to your FormHelper")
def all(self):
"""
Returns all layout objects of first level of depth
"""
self._check_layout()
return LayoutSlice(self.layout, slice(0, len(self.layout.fields), 1))
def filter(self, *LayoutClasses, **kwargs):
"""
Returns a LayoutSlice pointing to layout objects of type `LayoutClass`
"""
self._check_layout()
max_level = kwargs.pop('max_level', 0)
greedy = kwargs.pop('greedy', False)
filtered_layout_objects = self.layout.get_layout_objects(LayoutClasses, max_level=max_level, greedy=greedy)
return LayoutSlice(self.layout, filtered_layout_objects)
def filter_by_widget(self, widget_type):
"""
Returns a LayoutSlice pointing to fields with widgets of `widget_type`
"""
self._check_layout_and_form()
layout_field_names = self.layout.get_field_names()
# Let's filter all fields with widgets like widget_type
filtered_fields = []
for pointer in layout_field_names:
if isinstance(self.form.fields[pointer[1]].widget, widget_type):
filtered_fields.append(pointer)
return LayoutSlice(self.layout, filtered_fields)
def exclude_by_widget(self, widget_type):
"""
Returns a LayoutSlice pointing to fields with widgets NOT matching `widget_type`
"""
self._check_layout_and_form()
layout_field_names = self.layout.get_field_names()
# Let's exclude all fields with widgets like widget_type
filtered_fields = []
for pointer in layout_field_names:
if not isinstance(self.form.fields[pointer[1]].widget, widget_type):
filtered_fields.append(pointer)
return LayoutSlice(self.layout, filtered_fields)
def __getitem__(self, key):
"""
Return a LayoutSlice that makes changes affect the current instance of the layout
and not a copy.
"""
# when key is a string containing the field name
if isinstance(key, string_types):
# Django templates access FormHelper attributes using dictionary [] operator
# This could be a helper['form_id'] access, not looking for a field
if hasattr(self, key):
return getattr(self, key)
self._check_layout()
layout_field_names = self.layout.get_field_names()
filtered_field = []
for pointer in layout_field_names:
# There can be an empty pointer
if len(pointer) == 2 and pointer[1] == key:
filtered_field.append(pointer)
return LayoutSlice(self.layout, filtered_field)
return LayoutSlice(self.layout, key)
def __setitem__(self, key, value):
self.layout[key] = value
def __delitem__(self, key):
del self.layout.fields[key]
def __len__(self):
if self.layout is not None:
return len(self.layout.fields)
else:
return 0
class FormHelper(DynamicLayoutHandler):
"""
This class controls the form rendering behavior of the form passed to
the `{% crispy %}` tag. For doing so you will need to set its attributes
and pass the corresponding helper object to the tag::
{% crispy form form.helper %}
Let's see what attributes you can set and what form behaviors they apply to:
**form_method**: Specifies form method attribute.
You can see it to 'POST' or 'GET'. Defaults to 'POST'
**form_action**: Applied to the form action attribute:
- Can be a named url in your URLconf that can be executed via the `{% url %}` template tag. \
Example: 'show_my_profile'. In your URLconf you could have something like::
url(r'^show/profile/$', 'show_my_profile_view', name = 'show_my_profile')
- It can simply point to a URL '/whatever/blabla/'.
**form_id**: Generates a form id for dom identification.
If no id provided then no id attribute is created on the form.
**form_class**: String containing separated CSS clases to be applied
to form class attribute. The form will always have by default
'uniForm' class.
**form_tag**: It specifies if <form></form> tags should be rendered when using a Layout.
If set to False it renders the form without the <form></form> tags. Defaults to True.
**form_error_title**: If a form has `non_field_errors` to display, they
are rendered in a div. You can set title's div with this attribute.
Example: "Oooops!" or "Form Errors"
**formset_error_title**: If a formset has `non_form_errors` to display, they
are rendered in a div. You can set title's div with this attribute.
**form_style**: Uni-form has two built in different form styles. You can choose
your favorite. This can be set to "default" or "inline". Defaults to "default".
Public Methods:
**add_input(input)**: You can add input buttons using this method. Inputs
added using this method will be rendered at the end of the form/formset.
**add_layout(layout)**: You can add a `Layout` object to `FormHelper`. The Layout
specifies in a simple, clean and DRY way how the form fields should be rendered.
You can wrap fields, order them, customize pretty much anything in the form.
Best way to add a helper to a form is adding a property named helper to the form
that returns customized `FormHelper` object::
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
class MyForm(forms.Form):
title = forms.CharField(_("Title"))
@property
def helper(self):
helper = FormHelper()
helper.form_id = 'this-form-rocks'
helper.form_class = 'search'
helper.add_input(Submit('save', 'save'))
[...]
return helper
You can use it in a template doing::
{% load crispy_forms_tags %}
{% crispy form %}
"""
_form_method = 'post'
_form_action = ''
_form_style = 'default'
form = None
form_id = ''
form_class = ''
layout = None
form_tag = True
form_error_title = None
formset_error_title = None
form_show_errors = True
render_unmentioned_fields = False
render_hidden_fields = False
render_required_fields = False
_help_text_inline = False
_error_text_inline = True
html5_required = False
form_show_labels = True
template = None
field_template = None
disable_csrf = False
label_class = ''
field_class = ''
def __init__(self, form=None):
self.attrs = {}
self.inputs = []
if form is not None:
self.form = form
self.layout = self.build_default_layout(form)
def build_default_layout(self, form):
return Layout(*form.fields.keys())
@property
def form_method(self):
return self._form_method
@form_method.setter
def form_method(self, method):
if method.lower() not in ('get', 'post'):
raise FormHelpersException('Only GET and POST are valid in the \
form_method helper attribute')
self._form_method = method.lower()
@property
def form_action(self):
try:
return reverse(self._form_action)
except NoReverseMatch:
return self._form_action
@form_action.setter
def form_action(self, action):
self._form_action = action
@property
def form_style(self):
if self._form_style == "default":
return ''
if self._form_style == "inline":
return 'inlineLabels'
@form_style.setter
def form_style(self, style):
if style.lower() not in ('default', 'inline'):
raise FormHelpersException('Only default and inline are valid in the \
form_style helper attribute')
self._form_style = style.lower()
@property
def help_text_inline(self):
return self._help_text_inline
@help_text_inline.setter
def help_text_inline(self, flag):
self._help_text_inline = flag
self._error_text_inline = not flag
@property
def error_text_inline(self):
return self._error_text_inline
@error_text_inline.setter
def error_text_inline(self, flag):
self._error_text_inline = flag
self._help_text_inline = not flag
def add_input(self, input_object):
self.inputs.append(input_object)
def add_layout(self, layout):
self.layout = layout
def render_layout(self, form, context, template_pack=TEMPLATE_PACK):
"""
Returns safe html of the rendering of the layout
"""
form.rendered_fields = set()
form.crispy_field_template = self.field_template
# This renders the specified Layout strictly
html = self.layout.render(
form,
self.form_style,
context,
template_pack=template_pack
)
# Rendering some extra fields if specified
if self.render_unmentioned_fields or self.render_hidden_fields or self.render_required_fields:
fields = set(form.fields.keys())
left_fields_to_render = fields - form.rendered_fields
for field in left_fields_to_render:
if (
self.render_unmentioned_fields or
self.render_hidden_fields and form.fields[field].widget.is_hidden or
self.render_required_fields and form.fields[field].widget.is_required
):
html += render_field(
field,
form,
self.form_style,
context,
template_pack=template_pack
)
# If the user has Meta.fields defined, not included in the layout,
# we suppose they need to be rendered
if hasattr(form, 'Meta'):
if hasattr(form.Meta, 'fields'):
current_fields = set(getattr(form, 'fields', []))
meta_fields = set(getattr(form.Meta, 'fields'))
fields_to_render = current_fields & meta_fields
left_fields_to_render = fields_to_render - form.rendered_fields
for field in left_fields_to_render:
html += render_field(field, form, self.form_style, context)
return mark_safe(html)
def get_attributes(self, template_pack=TEMPLATE_PACK):
"""
Used by crispy_forms_tags to get helper attributes
"""
items = {
'form_method': self.form_method.strip(),
'form_tag': self.form_tag,
'form_style': self.form_style.strip(),
'form_show_errors': self.form_show_errors,
'help_text_inline': self.help_text_inline,
'error_text_inline': self.error_text_inline,
'html5_required': self.html5_required,
'form_show_labels': self.form_show_labels,
'disable_csrf': self.disable_csrf,
'label_class': self.label_class,
'field_class': self.field_class
}
# col-[lg|md|sm|xs]-<number>
label_size_match = re.search('(\d+)', self.label_class)
device_type_match = re.search('(lg|md|sm|xs)', self.label_class)
if label_size_match and device_type_match:
try:
items['label_size'] = int(label_size_match.groups()[0])
items['bootstrap_device_type'] = device_type_match.groups()[0]
except:
pass
items['attrs'] = {}
if self.attrs:
items['attrs'] = self.attrs.copy()
if self.form_action:
items['attrs']['action'] = self.form_action.strip()
if self.form_id:
items['attrs']['id'] = self.form_id.strip()
if self.form_class:
# uni_form TEMPLATE PACK has a uniForm class by default
if template_pack == 'uni_form':
items['attrs']['class'] = "uniForm %s" % self.form_class.strip()
else:
items['attrs']['class'] = self.form_class.strip()
else:
if template_pack == 'uni_form':
items['attrs']['class'] = self.attrs.get('class', '') + " uniForm"
items['flat_attrs'] = flatatt(items['attrs'])
if self.inputs:
items['inputs'] = self.inputs
if self.form_error_title:
items['form_error_title'] = self.form_error_title.strip()
if self.formset_error_title:
items['formset_error_title'] = self.formset_error_title.strip()
for attribute_name, value in self.__dict__.items():
if attribute_name not in items and attribute_name not in ['layout', 'inputs'] and not attribute_name.startswith('_'):
items[attribute_name] = value
return items
| mit | -4,172,455,794,540,598,300 | 7,270,075,038,863,433,000 | 35.300518 | 129 | 0.593206 | false |
OWASP/django-DefectDojo | dojo/tools/appspider/parser.py | 2 | 3838 |
from datetime import datetime
from xml.dom import NamespaceErr
from defusedxml import ElementTree
from dojo.models import Endpoint, Finding
import html2text
import urllib.parse
__author__ = "Jay Paz"
class AppSpiderXMLParser(object):
def __init__(self, filename, test):
if "VulnerabilitiesSummary.xml" not in str(filename):
raise NamespaceErr('Please ensure that you are uploading AppSpider\'s VulnerabilitiesSummary.xml file.'
'At this time it is the only file that is consumable by DefectDojo.')
vscan = ElementTree.parse(filename)
root = vscan.getroot()
if "VulnSummary" not in str(root.tag):
raise NamespaceErr('Please ensure that you are uploading AppSpider\'s VulnerabilitiesSummary.xml file.'
'At this time it is the only file that is consumable by DefectDojo.')
dupes = dict()
for finding in root.iter('Vuln'):
severity = finding.find("AttackScore").text
if severity == "0-Safe":
severity = "Info"
elif severity == "1-Informational":
severity = "Low"
elif severity == "2-Low":
severity = "Medium"
elif severity == "3-Medium":
severity = "High"
elif severity == "4-High":
severity = "Critical"
else:
severity = "Info"
title = finding.find("VulnType").text
description = finding.find("Description").text
mitigation = finding.find("Recommendation").text
vuln_url = finding.find("VulnUrl").text
parts = urllib.parse.urlparse(vuln_url)
cwe = int(finding.find("CweId").text)
dupe_key = severity + title
unsaved_endpoints = list()
unsaved_req_resp = list()
if title is None:
title = ''
if description is None:
description = ''
if mitigation is None:
mitigation = ''
if dupe_key in dupes:
find = dupes[dupe_key]
unsaved_endpoints.append(find.unsaved_endpoints)
unsaved_req_resp.append(find.unsaved_req_resp)
else:
find = Finding(title=title,
test=test,
active=False,
verified=False,
description=html2text.html2text(description),
severity=severity,
numerical_severity=Finding.get_numerical_severity(severity),
mitigation=html2text.html2text(mitigation),
impact="N/A",
references=None,
cwe=cwe)
find.unsaved_endpoints = unsaved_endpoints
find.unsaved_req_resp = unsaved_req_resp
dupes[dupe_key] = find
for attack in finding.iter("AttackRequest"):
req = attack.find("Request").text
resp = attack.find("Response").text
find.unsaved_req_resp.append({"req": req, "resp": resp})
find.unsaved_endpoints.append(Endpoint(protocol=parts.scheme,
host=parts.netloc,
path=parts.path,
query=parts.query,
fragment=parts.fragment,
product=test.engagement.product))
self.items = list(dupes.values())
| bsd-3-clause | -6,336,344,524,479,093,000 | -263,598,179,446,615,680 | 36.627451 | 115 | 0.494007 | false |
wteiken/letsencrypt | certbot/tests/account_test.py | 4 | 6573 | """Tests for certbot.account."""
import datetime
import os
import shutil
import stat
import tempfile
import unittest
import mock
import pytz
from acme import jose
from acme import messages
from certbot import errors
from certbot.tests import test_util
KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key_2.pem"))
class AccountTest(unittest.TestCase):
"""Tests for certbot.account.Account."""
def setUp(self):
from certbot.account import Account
self.regr = mock.MagicMock()
self.meta = Account.Meta(
creation_host="test.certbot.org",
creation_dt=datetime.datetime(
2015, 7, 4, 14, 4, 10, tzinfo=pytz.UTC))
self.acc = Account(self.regr, KEY, self.meta)
with mock.patch("certbot.account.socket") as mock_socket:
mock_socket.getfqdn.return_value = "test.certbot.org"
with mock.patch("certbot.account.datetime") as mock_dt:
mock_dt.datetime.now.return_value = self.meta.creation_dt
self.acc_no_meta = Account(self.regr, KEY)
def test_init(self):
self.assertEqual(self.regr, self.acc.regr)
self.assertEqual(KEY, self.acc.key)
self.assertEqual(self.meta, self.acc_no_meta.meta)
def test_id(self):
self.assertEqual(
self.acc.id, "bca5889f66457d5b62fbba7b25f9ab6f")
def test_slug(self):
self.assertEqual(
self.acc.slug, "test.certbot.org@2015-07-04T14:04:10Z (bca5)")
def test_repr(self):
self.assertEqual(
repr(self.acc),
"<Account(bca5889f66457d5b62fbba7b25f9ab6f)>")
class ReportNewAccountTest(unittest.TestCase):
"""Tests for certbot.account.report_new_account."""
def setUp(self):
self.config = mock.MagicMock(config_dir="/etc/letsencrypt")
reg = messages.Registration.from_data(email="[email protected]")
self.acc = mock.MagicMock(regr=messages.RegistrationResource(
uri=None, new_authzr_uri=None, body=reg))
def _call(self):
from certbot.account import report_new_account
report_new_account(self.acc, self.config)
@mock.patch("certbot.account.zope.component.queryUtility")
def test_no_reporter(self, mock_zope):
mock_zope.return_value = None
self._call()
@mock.patch("certbot.account.zope.component.queryUtility")
def test_it(self, mock_zope):
self._call()
call_list = mock_zope().add_message.call_args_list
self.assertTrue(self.config.config_dir in call_list[0][0][0])
self.assertTrue(
", ".join(self.acc.regr.body.emails) in call_list[1][0][0])
class AccountMemoryStorageTest(unittest.TestCase):
"""Tests for certbot.account.AccountMemoryStorage."""
def setUp(self):
from certbot.account import AccountMemoryStorage
self.storage = AccountMemoryStorage()
def test_it(self):
account = mock.Mock(id="x")
self.assertEqual([], self.storage.find_all())
self.assertRaises(errors.AccountNotFound, self.storage.load, "x")
self.storage.save(account)
self.assertEqual([account], self.storage.find_all())
self.assertEqual(account, self.storage.load("x"))
self.storage.save(account)
self.assertEqual([account], self.storage.find_all())
class AccountFileStorageTest(unittest.TestCase):
"""Tests for certbot.account.AccountFileStorage."""
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.config = mock.MagicMock(
accounts_dir=os.path.join(self.tmp, "accounts"))
from certbot.account import AccountFileStorage
self.storage = AccountFileStorage(self.config)
from certbot.account import Account
self.acc = Account(
regr=messages.RegistrationResource(
uri=None, new_authzr_uri=None, body=messages.Registration()),
key=KEY)
def tearDown(self):
shutil.rmtree(self.tmp)
def test_init_creates_dir(self):
self.assertTrue(os.path.isdir(self.config.accounts_dir))
def test_save_and_restore(self):
self.storage.save(self.acc)
account_path = os.path.join(self.config.accounts_dir, self.acc.id)
self.assertTrue(os.path.exists(account_path))
for file_name in "regr.json", "meta.json", "private_key.json":
self.assertTrue(os.path.exists(
os.path.join(account_path, file_name)))
self.assertEqual("0400", oct(os.stat(os.path.join(
account_path, "private_key.json"))[stat.ST_MODE] & 0o777))
# restore
self.assertEqual(self.acc, self.storage.load(self.acc.id))
def test_find_all(self):
self.storage.save(self.acc)
self.assertEqual([self.acc], self.storage.find_all())
def test_find_all_none_empty_list(self):
self.assertEqual([], self.storage.find_all())
def test_find_all_accounts_dir_absent(self):
os.rmdir(self.config.accounts_dir)
self.assertEqual([], self.storage.find_all())
def test_find_all_load_skips(self):
self.storage.load = mock.MagicMock(
side_effect=["x", errors.AccountStorageError, "z"])
with mock.patch("certbot.account.os.listdir") as mock_listdir:
mock_listdir.return_value = ["x", "y", "z"]
self.assertEqual(["x", "z"], self.storage.find_all())
def test_load_non_existent_raises_error(self):
self.assertRaises(errors.AccountNotFound, self.storage.load, "missing")
def test_load_id_mismatch_raises_error(self):
self.storage.save(self.acc)
shutil.move(os.path.join(self.config.accounts_dir, self.acc.id),
os.path.join(self.config.accounts_dir, "x" + self.acc.id))
self.assertRaises(errors.AccountStorageError, self.storage.load,
"x" + self.acc.id)
def test_load_ioerror(self):
self.storage.save(self.acc)
mock_open = mock.mock_open()
mock_open.side_effect = IOError
with mock.patch("__builtin__.open", mock_open):
self.assertRaises(
errors.AccountStorageError, self.storage.load, self.acc.id)
def test_save_ioerrors(self):
mock_open = mock.mock_open()
mock_open.side_effect = IOError # TODO: [None, None, IOError]
with mock.patch("__builtin__.open", mock_open):
self.assertRaises(
errors.AccountStorageError, self.storage.save, self.acc)
if __name__ == "__main__":
unittest.main() # pragma: no cover
| apache-2.0 | -3,825,689,010,595,611,600 | -4,421,202,470,911,767,000 | 34.52973 | 79 | 0.637913 | false |
ryanjoneil/docker-image-construction | ipynb/examples/example1.py | 1 | 3732 | from mosek.fusion import Model, Domain, Expr, ObjectiveSense
import sys
# Example 1. Full representation of 3-image problem with all maximal cliques.
# DICP instance:
#
# Resource consumption by command:
#
# C = {A, B, C, D}
#
# | x = A: 5 |
# r(c) = | x = B: 10 |
# | x = C: 7 |
# | x = D: 12 |
#
# Images to create:
#
# I = {1, 2, 3}
#
# | i = 1: {A, B} |
# C(i) = | i = 2: {A, B, C, D} |
# | i = 3: {B, C, D} |
r = {'A': 5.0, 'B': 10.0, 'C': 7.0, 'D': 12.0}
m = Model()
binary = (Domain.inRange(0.0, 1.0), Domain.isInteger())
# Provide a variable for each image and command. This is 1 if the command
# is not run as part of a clique for the image.
x_1_a = m.variable('x_1_a', *binary)
x_1_b = m.variable('x_1_b', *binary)
x_2_a = m.variable('x_2_a', *binary)
x_2_b = m.variable('x_2_b', *binary)
x_2_c = m.variable('x_2_c', *binary)
x_2_d = m.variable('x_2_d', *binary)
x_3_b = m.variable('x_3_b', *binary)
x_3_c = m.variable('x_3_c', *binary)
x_3_d = m.variable('x_3_d', *binary)
# Provide a variable for each maximal clique and maximal sub-clique.
x_12_ab = m.variable('x_12_ab', *binary)
x_123_b = m.variable('x_123_b', *binary)
x_123_b_12_a = m.variable('x_123_b_12_a', *binary)
x_123_b_23_cd = m.variable('x_123_b_23_cd', *binary)
# Each command must be run once for each image.
m.constraint('c_1_a', Expr.add([x_1_a, x_12_ab, x_123_b_12_a]), Domain.equalsTo(1.0))
m.constraint('c_1_b', Expr.add([x_1_b, x_12_ab, x_123_b]), Domain.equalsTo(1.0))
m.constraint('c_2_a', Expr.add([x_2_a, x_12_ab, x_123_b_12_a]), Domain.equalsTo(1.0))
m.constraint('c_2_b', Expr.add([x_2_b, x_12_ab, x_123_b]), Domain.equalsTo(1.0))
m.constraint('c_2_c', Expr.add([x_2_c, x_123_b_23_cd]), Domain.equalsTo(1.0))
m.constraint('c_2_d', Expr.add([x_2_d, x_123_b_23_cd]), Domain.equalsTo(1.0))
m.constraint('c_3_b', Expr.add([x_3_b, x_123_b]), Domain.equalsTo(1.0))
m.constraint('c_3_c', Expr.add([x_3_c, x_123_b_23_cd]), Domain.equalsTo(1.0))
m.constraint('c_3_d', Expr.add([x_3_d, x_123_b_23_cd]), Domain.equalsTo(1.0))
# Add dependency constraints for sub-cliques.
m.constraint('d_123_b_12_a', Expr.sub(x_123_b, x_123_b_12_a), Domain.greaterThan(0.0))
m.constraint('d_123_b_23_cd', Expr.sub(x_123_b, x_123_b_23_cd), Domain.greaterThan(0.0))
# Eliminated intersections between cliques.
m.constraint('e1', Expr.add([x_12_ab, x_123_b]), Domain.lessThan(1.0))
m.constraint('e2', Expr.add([x_123_b_12_a, x_123_b_23_cd]), Domain.lessThan(1.0))
# Minimize resources required to construct all images.
obj = [Expr.mul(c, x) for c, x in [
# Individual image/command pairs
(r['A'], x_1_a), (r['B'], x_1_b),
(r['A'], x_2_a), (r['B'], x_2_b), (r['C'], x_2_c), (r['D'], x_2_d),
(r['B'], x_3_b), (r['C'], x_3_c), (r['D'], x_3_d),
# Cliques
(r['A'] + r['B'], x_12_ab),
(r['B'], x_123_b),
(r['A'], x_123_b_12_a),
(r['C'] + r['D'], x_123_b_23_cd),
]]
m.objective('w', ObjectiveSense.Minimize, Expr.add(obj))
m.setLogHandler(sys.stdout)
m.solve()
print
print 'Image 1:'
print '\tx_1_a = %.0f' % x_1_a.level()[0]
print '\tx_1_b = %.0f' % x_1_b.level()[0]
print
print 'Image 2:'
print '\tx_2_a = %.0f' % x_2_a.level()[0]
print '\tx_2_b = %.0f' % x_2_b.level()[0]
print '\tx_2_c = %.0f' % x_2_c.level()[0]
print '\tx_2_d = %.0f' % x_2_d.level()[0]
print
print 'Image 3:'
print '\tx_3_b = %.0f' % x_3_b.level()[0]
print '\tx_3_c = %.0f' % x_3_c.level()[0]
print '\tx_3_d = %.0f' % x_3_d.level()[0]
print
print 'Cliques:'
print '\tx_12_ab = %.0f' % x_12_ab.level()[0]
print '\tx_123_b = %.0f' % x_123_b.level()[0]
print '\tx_123_b_12_a = %.0f' % x_123_b_12_a.level()[0]
print '\tx_123_b_23_cd = %.0f' % x_123_b_23_cd.level()[0]
print
| mit | 8,393,846,710,338,322,000 | -396,294,256,665,370,900 | 32.927273 | 88 | 0.566184 | false |
clld/tsammalex | tsammalex/util.py | 1 | 4317 | from collections import OrderedDict
from purl import URL
from sqlalchemy.orm import joinedload, contains_eager
from clld.web.util.multiselect import MultiSelect
from clld.db.meta import DBSession
from clld.db.models.common import Language, Unit, Value, ValueSet
from clld.web.util.htmllib import HTML
from clld.web.util.helpers import maybe_external_link, collapsed
from tsammalex.models import split_ids
assert split_ids
def license_name(license_url):
if license_url == "http://commons.wikimedia.org/wiki/GNU_Free_Documentation_License":
return 'GNU Free Documentation License'
if license_url == 'http://en.wikipedia.org/wiki/Public_domain':
license_url = 'http://creativecommons.org/publicdomain/zero/1.0/'
license_url_ = URL(license_url)
if license_url_.host() != 'creativecommons.org':
return license_url
comps = license_url_.path().split('/')
if len(comps) < 3:
return license_url
return {
'zero': 'Public Domain',
}.get(comps[2], '(CC) %s' % comps[2].upper())
def names_in_2nd_languages(vs):
def format_name(n):
res = [HTML.i(n.name)]
if n.ipa:
res.append(' [%s]' % n.ipa)
return HTML.span(*res)
def format_language(vs):
return ' '.join([vs.language.name, ', '.join(format_name(n) for n in vs.values)])
query = DBSession.query(ValueSet).join(ValueSet.language)\
.order_by(Language.name)\
.filter(Language.pk.in_([l.pk for l in vs.language.second_languages]))\
.filter(ValueSet.parameter_pk == vs.parameter_pk)\
.options(contains_eager(ValueSet.language), joinedload(ValueSet.values))
res = '; '.join(format_language(vs) for vs in query)
if res:
res = '(%s)' % res
return res
def source_link(source):
label = source
host = URL(source).host()
if host == 'commons.wikimedia.org':
label = 'wikimedia'
elif host == 'en.wikipedia.org':
label = 'wikipedia'
return maybe_external_link(source, label=label)
def with_attr(f):
def wrapper(ctx, name, *args, **kw):
kw['attr'] = getattr(ctx, name)
if not kw['attr']:
return '' # pragma: no cover
return f(ctx, name, *args, **kw)
return wrapper
@with_attr
def tr_rel(ctx, name, label=None, dt='name', dd='description', attr=None):
content = []
for item in attr:
content.extend([HTML.dt(getattr(item, dt)), HTML.dd(getattr(item, dd))])
content = HTML.dl(*content, class_='dl-horizontal')
if len(attr) > 3:
content = collapsed('collapsed-' + name, content)
return HTML.tr(HTML.td((label or name.capitalize()) + ':'), HTML.td(content))
@with_attr
def tr_attr(ctx, name, label=None, content=None, attr=None):
return HTML.tr(
HTML.td((label or name.capitalize()) + ':'),
HTML.td(content or maybe_external_link(attr)))
def format_classification(taxon, with_species=False, with_rank=False):
names = OrderedDict()
for r in 'kingdom phylum class_ order family'.split():
names[r.replace('_', '')] = getattr(taxon, r)
if with_species:
names[taxon.rank] = taxon.name
return HTML.ul(
*[HTML.li(('{0} {1}: {2}' if with_rank else '{0}{2}').format('-' * i, *n))
for i, n in enumerate(n for n in names.items() if n[1])],
class_="unstyled")
class LanguageMultiSelect(MultiSelect):
def __init__(self, ctx, req, name='languages', eid='ms-languages', **kw):
kw['selected'] = ctx.languages
MultiSelect.__init__(self, req, name, eid, **kw)
@classmethod
def query(cls):
return DBSession.query(Language).order_by(Language.name)
def get_options(self):
return {
'data': [self.format_result(p) for p in self.query()],
'multiple': True,
'maximumSelectionSize': 2}
def parameter_index_html(context=None, request=None, **kw):
return dict(select=LanguageMultiSelect(context, request))
def language_detail_html(context=None, request=None, **kw):
return dict(categories=list(DBSession.query(Unit)
.filter(Unit.language == context).order_by(Unit.name)))
def language_index_html(context=None, request=None, **kw):
return dict(map_=request.get_map('languages', col='lineage', dt=context))
| apache-2.0 | 8,558,602,948,993,568,000 | -8,631,604,126,024,156,000 | 32.207692 | 89 | 0.632847 | false |
royveshovda/pifog | source/piclient/sensorpi/sensor_runner.py | 2 | 4086 | import json
import time
import settings
from shared import common
from datetime import datetime
from uptime import boottime
handler = None
loudness_sensor_pin = 2
dht_sensor_pin = 4
def init():
global handler
if settings.is_fake():
from sensorpi import read_faker
handler = read_faker
else:
from sensorpi import read
handler = read
return
def customShadowCallback_Update(payload, responseStatus, token):
if responseStatus == "timeout":
print("Update request " + token + " time out!")
if responseStatus == "accepted":
payloadDict = json.loads(payload)
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Update request with token: " + token + " accepted!")
reported = payloadDict["state"]["reported"]
if "temperature" in reported:
print("temperature: " + str(payloadDict["state"]["reported"]["temperature"]))
if "humidity" in reported:
print("humidity: " + str(payloadDict["state"]["reported"]["humidity"]))
if "co2" in reported:
print("co2: " + str(payloadDict["state"]["reported"]["co2"]))
if "connected" in reported:
print("connected: " + str(payloadDict["state"]["reported"]["connected"]))
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Update request " + token + " rejected!")
def should_read_co2(boot_time):
d2 = datetime.now()
d = d2 - boot_time
if d.total_seconds() > 200.0:
return True
else:
return False
def handle_command(client, message):
payload = message.payload.decode('utf-8')
print("Command received:")
print(payload)
#cmd = json.loads(payload)
#command = cmd["command"]
#cmd_id = cmd["id"]
#if command == "ping":
# common.send_pong(client, cmd_id, settings.topic_sensorpi_event)
def handle_notification(message):
print("Notification received: " + str(message.payload))
def on_message(client, userdata, msg):
if msg.topic == settings.topic_sensorpi_command:
handle_command(client, msg)
return
if msg.topic == settings.topic_sensorpi_notify:
handle_notification(msg)
return
print("Spam received: " + str(msg.payload))
def send_data(client, co2, temperature, humidity, loudness):
# Prepare our sensor data in JSON format.
payload = json.dumps({
"state": {
"reported": {
"co2": co2,
"temperature": temperature,
"humidity": humidity
}
}
})
client.shadowUpdate(payload, customShadowCallback_Update, 5)
def start():
time.sleep(20)
shadow, client = common.setup_aws_shadow_client(settings.aws_endpoint,
settings.aws_root_certificate,
settings.aws_private_key,
settings.aws_certificate,
settings.device_name)
JSONPayload = '{"state":{"reported":{"connected":"true"}}}'
client.shadowUpdate(JSONPayload, customShadowCallback_Update, 5)
handler.setup(dht_sensor_pin, loudness_sensor_pin)
d1 = datetime.min
boot_time = boottime()
should_read = False
try:
while True:
d2 = datetime.now()
d = d2 - d1
if d.total_seconds() > 10.0:
if (should_read == False):
should_read = should_read_co2(boot_time)
[co2, temperature, humidity, loudness] = handler.read_data(should_read)
send_data(client, co2, temperature, humidity, loudness)
d1 = d2
else:
time.sleep(1)
except KeyboardInterrupt:
JSONPayload = '{"state":{"reported":{"connected":"false"}}}'
client.shadowUpdate(JSONPayload, customShadowCallback_Update, 5)
shadow.disconnect()
handler.cleanup()
print('stopped')
def stop():
return
| apache-2.0 | 5,035,017,325,888,542,000 | -2,007,515,910,761,329,700 | 30.19084 | 89 | 0.567792 | false |
Kelfast/mamba-framework | mamba/test/test_unittest.py | 3 | 4197 |
# Copyright (c) 2012 ~ 2014 - Oscar Campos <[email protected]>
# See LICENSE for more details
"""Unit tests for unittesting module helper
"""
import os
from storm.store import Store
from twisted.trial import unittest
from twisted.python.threadpool import ThreadPool
from mamba.utils import config
from mamba.application.model import Model
from mamba.unittest import database_helpers
from mamba.test.test_model import DummyModel
class DatabaseHelpersTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
Model.database = database_helpers.Database()
def test_testable_database_engine_native(self):
db = database_helpers.TestableDatabase()
self.assertEqual(db.engine, database_helpers.ENGINE.NATIVE)
def test_initialize_engine_native(self):
config.Database('../mamba/test/dummy_app/config/database.json')
current_dir = os.getcwd()
os.chdir('../mamba/test/dummy_app')
db = database_helpers.TestableDatabase()
store = db.store()
self.assertEqual(store.get_database()._filename, 'db/dummy.db')
os.chdir(current_dir)
def test_testable_database_engine_inmemory(self):
engine = database_helpers.ENGINE.INMEMORY
db = database_helpers.TestableDatabase(engine)
self.assertEqual(db.engine, database_helpers.ENGINE.INMEMORY)
def test_initialize_engine_memory(self):
engine = database_helpers.ENGINE.INMEMORY
db = database_helpers.TestableDatabase(engine)
store = db.store()
self.assertEqual(store.get_database()._filename, ':memory:')
store.close()
def test_testable_database_engine_persistent(self):
engine = database_helpers.ENGINE.PERSISTENT
db = database_helpers.TestableDatabase(engine)
self.assertEqual(db.engine, database_helpers.ENGINE.PERSISTENT)
def test_initialize_engine_persistent(self):
engine = database_helpers.ENGINE.PERSISTENT
db = database_helpers.TestableDatabase(engine)
uri = database_helpers.global_zstorm.get_default_uris()['mamba'].split(
'?foreign_keys=1'
)[0].split('sqlite:')[1]
store = db.store()
self.assertEqual(store.get_database()._filename, uri)
def test_prepare_model_for_test(self):
model = Model()
self.assertEqual(model.database.__class__, database_helpers.Database)
database_helpers.prepare_model_for_test(model)
self.assertEqual(
model.database.__class__, database_helpers.TestableDatabase)
def test_prepate_model_for_test_using_class(self):
self.assertEqual(Model.database.__class__, database_helpers.Database)
database_helpers.prepare_model_for_test(Model)
self.assertEqual(
Model.database.__class__, database_helpers.TestableDatabase)
def test_prepare_model_for_test_using_real_model(self):
self.assertEqual(
DummyModel.database.__class__, database_helpers.Database)
database_helpers.prepare_model_for_test(DummyModel)
self.assertEqual(
DummyModel.database.__class__, database_helpers.TestableDatabase)
def test_database_is_started_defacto(self):
config.Database('../mamba/test/dummy_app/config/database.json')
model = Model()
database_helpers.prepare_model_for_test(model)
self.assertTrue(model.database.started)
def test_database_stop(self):
model = Model()
database_helpers.prepare_model_for_test(model)
self.assertTrue(model.database.started)
model.database.stop()
self.assertFalse(model.database.started)
def test_store_return_valid_store(self):
model = Model()
database_helpers.prepare_model_for_test(model)
store = model.database.store()
self.assertIsInstance(store, Store)
def test_model_transactor_uses_dummy_thread_pool(self):
model = Model()
self.assertIsInstance(model.transactor._threadpool, ThreadPool)
database_helpers.prepare_model_for_test(model)
self.assertIsInstance(
model.transactor._threadpool, database_helpers.DummyThreadPool)
| gpl-3.0 | 2,349,737,813,386,960,400 | -9,030,370,998,929,310,000 | 36.810811 | 79 | 0.68549 | false |
fidomason/kbengine | kbe/res/scripts/common/Lib/distutils/dir_util.py | 59 | 7780 | """distutils.dir_util
Utility functions for manipulating directories and directory trees."""
import os
import errno
from distutils.errors import DistutilsFileError, DistutilsInternalError
from distutils import log
# cache for by mkpath() -- in addition to cheapening redundant calls,
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
_path_created = {}
# I don't use os.makedirs because a) it's new to Python 1.5.2, and
# b) it blows up if the directory already exists (I want to silently
# succeed in that case).
def mkpath(name, mode=0o777, verbose=1, dry_run=0):
"""Create a directory and any missing ancestor directories.
If the directory already exists (or if 'name' is the empty string, which
means the current directory, which of course exists), then do nothing.
Raise DistutilsFileError if unable to create some directory along the way
(eg. some sub-path exists, but is a file rather than a directory).
If 'verbose' is true, print a one-line summary of each mkdir to stdout.
Return the list of directories actually created.
"""
global _path_created
# Detect a common bug -- name is None
if not isinstance(name, str):
raise DistutilsInternalError(
"mkpath: 'name' must be a string (got %r)" % (name,))
# XXX what's the better way to handle verbosity? print as we create
# each directory in the path (the current behaviour), or only announce
# the creation of the whole path? (quite easy to do the latter since
# we're not using a recursive algorithm)
name = os.path.normpath(name)
created_dirs = []
if os.path.isdir(name) or name == '':
return created_dirs
if _path_created.get(os.path.abspath(name)):
return created_dirs
(head, tail) = os.path.split(name)
tails = [tail] # stack of lone dirs to create
while head and tail and not os.path.isdir(head):
(head, tail) = os.path.split(head)
tails.insert(0, tail) # push next higher dir onto stack
# now 'head' contains the deepest directory that already exists
# (that is, the child of 'head' in 'name' is the highest directory
# that does *not* exist)
for d in tails:
#print "head = %s, d = %s: " % (head, d),
head = os.path.join(head, d)
abs_head = os.path.abspath(head)
if _path_created.get(abs_head):
continue
if verbose >= 1:
log.info("creating %s", head)
if not dry_run:
try:
os.mkdir(head, mode)
except OSError as exc:
if not (exc.errno == errno.EEXIST and os.path.isdir(head)):
raise DistutilsFileError(
"could not create '%s': %s" % (head, exc.args[-1]))
created_dirs.append(head)
_path_created[abs_head] = 1
return created_dirs
def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0):
"""Create all the empty directories under 'base_dir' needed to put 'files'
there.
'base_dir' is just the a name of a directory which doesn't necessarily
exist yet; 'files' is a list of filenames to be interpreted relative to
'base_dir'. 'base_dir' + the directory portion of every file in 'files'
will be created if it doesn't already exist. 'mode', 'verbose' and
'dry_run' flags are as for 'mkpath()'.
"""
# First get the list of directories to create
need_dir = set()
for file in files:
need_dir.add(os.path.join(base_dir, os.path.dirname(file)))
# Now create them
for dir in sorted(need_dir):
mkpath(dir, mode, verbose=verbose, dry_run=dry_run)
def copy_tree(src, dst, preserve_mode=1, preserve_times=1,
preserve_symlinks=0, update=0, verbose=1, dry_run=0):
"""Copy an entire directory tree 'src' to a new location 'dst'.
Both 'src' and 'dst' must be directory names. If 'src' is not a
directory, raise DistutilsFileError. If 'dst' does not exist, it is
created with 'mkpath()'. The end result of the copy is that every
file in 'src' is copied to 'dst', and directories under 'src' are
recursively copied to 'dst'. Return the list of files that were
copied or might have been copied, using their output name. The
return value is unaffected by 'update' or 'dry_run': it is simply
the list of all files under 'src', with the names changed to be
under 'dst'.
'preserve_mode' and 'preserve_times' are the same as for
'copy_file'; note that they only apply to regular files, not to
directories. If 'preserve_symlinks' is true, symlinks will be
copied as symlinks (on platforms that support them!); otherwise
(the default), the destination of the symlink will be copied.
'update' and 'verbose' are the same as for 'copy_file'.
"""
from distutils.file_util import copy_file
if not dry_run and not os.path.isdir(src):
raise DistutilsFileError(
"cannot copy tree '%s': not a directory" % src)
try:
names = os.listdir(src)
except OSError as e:
if dry_run:
names = []
else:
raise DistutilsFileError(
"error listing files in '%s': %s" % (src, e.strerror))
if not dry_run:
mkpath(dst, verbose=verbose)
outputs = []
for n in names:
src_name = os.path.join(src, n)
dst_name = os.path.join(dst, n)
if n.startswith('.nfs'):
# skip NFS rename files
continue
if preserve_symlinks and os.path.islink(src_name):
link_dest = os.readlink(src_name)
if verbose >= 1:
log.info("linking %s -> %s", dst_name, link_dest)
if not dry_run:
os.symlink(link_dest, dst_name)
outputs.append(dst_name)
elif os.path.isdir(src_name):
outputs.extend(
copy_tree(src_name, dst_name, preserve_mode,
preserve_times, preserve_symlinks, update,
verbose=verbose, dry_run=dry_run))
else:
copy_file(src_name, dst_name, preserve_mode,
preserve_times, update, verbose=verbose,
dry_run=dry_run)
outputs.append(dst_name)
return outputs
def _build_cmdtuple(path, cmdtuples):
"""Helper for remove_tree()."""
for f in os.listdir(path):
real_f = os.path.join(path,f)
if os.path.isdir(real_f) and not os.path.islink(real_f):
_build_cmdtuple(real_f, cmdtuples)
else:
cmdtuples.append((os.remove, real_f))
cmdtuples.append((os.rmdir, path))
def remove_tree(directory, verbose=1, dry_run=0):
"""Recursively remove an entire directory tree.
Any errors are ignored (apart from being reported to stdout if 'verbose'
is true).
"""
global _path_created
if verbose >= 1:
log.info("removing '%s' (and everything under it)", directory)
if dry_run:
return
cmdtuples = []
_build_cmdtuple(directory, cmdtuples)
for cmd in cmdtuples:
try:
cmd[0](cmd[1])
# remove dir from cache if it's already there
abspath = os.path.abspath(cmd[1])
if abspath in _path_created:
del _path_created[abspath]
except OSError as exc:
log.warn("error removing %s: %s", directory, exc)
def ensure_relative(path):
"""Take the full path 'path', and make it a relative path.
This is useful to make 'path' the second argument to os.path.join().
"""
drive, path = os.path.splitdrive(path)
if path[0:1] == os.sep:
path = drive + path[1:]
return path
| lgpl-3.0 | 3,139,926,721,559,403,500 | 1,638,874,294,491,509,800 | 36.047619 | 78 | 0.616067 | false |
mcanthony/cython | tests/run/generators_py.py | 20 | 7054 | # mode: run
# tag: generators
import cython
def very_simple():
"""
>>> x = very_simple()
>>> next(x)
1
>>> next(x)
Traceback (most recent call last):
StopIteration
>>> next(x)
Traceback (most recent call last):
StopIteration
>>> x = very_simple()
>>> x.send(1)
Traceback (most recent call last):
TypeError: can't send non-None value to a just-started generator
"""
yield 1
def simple():
"""
>>> x = simple()
>>> list(x)
[1, 2, 3]
"""
yield 1
yield 2
yield 3
def simple_seq(seq):
"""
>>> x = simple_seq("abc")
>>> list(x)
['a', 'b', 'c']
"""
for i in seq:
yield i
def simple_send():
"""
>>> x = simple_send()
>>> next(x)
>>> x.send(1)
1
>>> x.send(2)
2
>>> x.send(3)
3
"""
i = None
while True:
i = yield i
def raising():
"""
>>> x = raising()
>>> next(x)
Traceback (most recent call last):
KeyError: 'foo'
>>> next(x)
Traceback (most recent call last):
StopIteration
"""
yield {}['foo']
def with_outer(*args):
"""
>>> x = with_outer(1, 2, 3)
>>> list(x())
[1, 2, 3]
"""
def generator():
for i in args:
yield i
return generator
def with_outer_raising(*args):
"""
>>> x = with_outer_raising(1, 2, 3)
>>> list(x())
[1, 2, 3]
"""
def generator():
for i in args:
yield i
raise StopIteration
return generator
def test_close():
"""
>>> x = test_close()
>>> x.close()
>>> x = test_close()
>>> next(x)
>>> x.close()
>>> next(x)
Traceback (most recent call last):
StopIteration
"""
while True:
yield
def test_ignore_close():
"""
>>> x = test_ignore_close()
>>> x.close()
>>> x = test_ignore_close()
>>> next(x)
>>> x.close()
Traceback (most recent call last):
RuntimeError: generator ignored GeneratorExit
"""
try:
yield
except GeneratorExit:
yield
def check_throw():
"""
>>> x = check_throw()
>>> x.throw(ValueError)
Traceback (most recent call last):
ValueError
>>> next(x)
Traceback (most recent call last):
StopIteration
>>> x = check_throw()
>>> next(x)
>>> x.throw(ValueError)
>>> next(x)
>>> x.throw(IndexError, "oops")
Traceback (most recent call last):
IndexError: oops
>>> next(x)
Traceback (most recent call last):
StopIteration
"""
while True:
try:
yield
except ValueError:
pass
def check_yield_in_except():
"""
>>> import sys
>>> orig_exc = sys.exc_info()[0]
>>> g = check_yield_in_except()
>>> next(g)
>>> next(g)
>>> orig_exc is sys.exc_info()[0] or sys.exc_info()[0]
True
"""
try:
yield
raise ValueError
except ValueError:
yield
def yield_in_except_throw_exc_type():
"""
>>> import sys
>>> g = yield_in_except_throw_exc_type()
>>> next(g)
>>> g.throw(TypeError)
Traceback (most recent call last):
TypeError
>>> next(g)
Traceback (most recent call last):
StopIteration
"""
try:
raise ValueError
except ValueError:
yield
def yield_in_except_throw_instance():
"""
>>> import sys
>>> g = yield_in_except_throw_instance()
>>> next(g)
>>> g.throw(TypeError())
Traceback (most recent call last):
TypeError
>>> next(g)
Traceback (most recent call last):
StopIteration
"""
try:
raise ValueError
except ValueError:
yield
def test_swap_assignment():
"""
>>> gen = test_swap_assignment()
>>> next(gen)
(5, 10)
>>> next(gen)
(10, 5)
"""
x,y = 5,10
yield (x,y)
x,y = y,x # no ref-counting here
yield (x,y)
class Foo(object):
"""
>>> obj = Foo()
>>> list(obj.simple(1, 2, 3))
[1, 2, 3]
"""
def simple(self, *args):
for i in args:
yield i
def test_nested(a, b, c):
"""
>>> obj = test_nested(1, 2, 3)
>>> [i() for i in obj]
[1, 2, 3, 4]
"""
def one():
return a
def two():
return b
def three():
return c
def new_closure(a, b):
def sum():
return a + b
return sum
yield one
yield two
yield three
yield new_closure(a, c)
def tolist(func):
def wrapper(*args, **kwargs):
return list(func(*args, **kwargs))
return wrapper
@tolist
def test_decorated(*args):
"""
>>> test_decorated(1, 2, 3)
[1, 2, 3]
"""
for i in args:
yield i
def test_return(a):
"""
>>> d = dict()
>>> obj = test_return(d)
>>> next(obj)
1
>>> next(obj)
Traceback (most recent call last):
StopIteration
>>> d['i_was_here']
True
"""
yield 1
a['i_was_here'] = True
return
def test_copied_yield(foo):
"""
>>> class Manager(object):
... def __enter__(self):
... return self
... def __exit__(self, type, value, tb):
... pass
>>> list(test_copied_yield(Manager()))
[1]
"""
with foo:
yield 1
def test_nested_yield():
"""
>>> obj = test_nested_yield()
>>> next(obj)
1
>>> obj.send(2)
2
>>> obj.send(3)
3
>>> obj.send(4)
Traceback (most recent call last):
StopIteration
"""
yield (yield (yield 1))
def test_sum_of_yields(n):
"""
>>> g = test_sum_of_yields(3)
>>> next(g)
(0, 0)
>>> g.send(1)
(0, 1)
>>> g.send(1)
(1, 2)
"""
x = 0
x += yield (0, x)
x += yield (0, x)
yield (1, x)
def test_nested_gen(n):
"""
>>> [list(a) for a in test_nested_gen(5)]
[[], [0], [0, 1], [0, 1, 2], [0, 1, 2, 3]]
"""
for a in range(n):
yield (b for b in range(a))
def test_lambda(n):
"""
>>> [i() for i in test_lambda(3)]
[0, 1, 2]
"""
for i in range(n):
yield lambda : i
def test_generator_cleanup():
"""
>>> g = test_generator_cleanup()
>>> del g
>>> g = test_generator_cleanup()
>>> next(g)
1
>>> del g
cleanup
"""
try:
yield 1
finally:
print('cleanup')
def test_del_in_generator():
"""
>>> [ s for s in test_del_in_generator() ]
['abcabcabc', 'abcabcabc']
"""
x = len('abc') * 'abc'
a = x
yield x
del x
yield a
del a
@cython.test_fail_if_path_exists("//IfStatNode", "//PrintStatNode")
def test_yield_in_const_conditional_false():
"""
>>> list(test_yield_in_const_conditional_false())
[]
"""
if False:
print((yield 1))
@cython.test_fail_if_path_exists("//IfStatNode")
@cython.test_assert_path_exists("//PrintStatNode")
def test_yield_in_const_conditional_true():
"""
>>> list(test_yield_in_const_conditional_true())
None
[1]
"""
if True:
print((yield 1))
| apache-2.0 | -8,436,136,479,174,199,000 | 551,237,242,184,551,740 | 17.563158 | 68 | 0.490218 | false |
michaelld/gnuradio | grc/converter/flow_graph.py | 5 | 3929 | # Copyright 2017,2018 Free Software Foundation, Inc.
# This file is part of GNU Radio
#
# GNU Radio Companion is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# GNU Radio Companion is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
from __future__ import absolute_import, division
import ast
from collections import OrderedDict
from ..core.io import yaml
from . import xml
def from_xml(filename):
"""Load flow graph from xml file"""
element, version_info = xml.load(filename, 'flow_graph.dtd')
data = convert_flow_graph_xml(element)
try:
file_format = int(version_info['format'])
except KeyError:
file_format = _guess_file_format_1(data)
data['metadata'] = {'file_format': file_format}
return data
def dump(data, stream):
out = yaml.dump(data, indent=2)
replace = [
('blocks:', '\nblocks:'),
('connections:', '\nconnections:'),
('metadata:', '\nmetadata:'),
]
for r in replace:
out = out.replace(*r)
prefix = '# auto-generated by grc.converter\n\n'
stream.write(prefix + out)
def convert_flow_graph_xml(node):
blocks = [
convert_block(block_data)
for block_data in node.findall('block')
]
options = next(b for b in blocks if b['id'] == 'options')
blocks.remove(options)
options.pop('id')
connections = [
convert_connection(connection)
for connection in node.findall('connection')
]
flow_graph = OrderedDict()
flow_graph['options'] = options
flow_graph['blocks'] = blocks
flow_graph['connections'] = connections
return flow_graph
def convert_block(data):
block_id = data.findtext('key')
params = OrderedDict(sorted(
(param.findtext('key'), param.findtext('value'))
for param in data.findall('param')
))
if block_id == "import":
params["imports"] = params.pop("import")
states = OrderedDict()
x, y = ast.literal_eval(params.pop('_coordinate', '(10, 10)'))
states['coordinate'] = yaml.ListFlowing([x, y])
states['rotation'] = int(params.pop('_rotation', '0'))
enabled = params.pop('_enabled', 'True')
states['state'] = (
'enabled' if enabled in ('1', 'True') else
'bypassed' if enabled == '2' else
'disabled'
)
block = OrderedDict()
if block_id != 'options':
block['name'] = params.pop('id')
block['id'] = block_id
block['parameters'] = params
block['states'] = states
return block
def convert_connection(data):
src_blk_id = data.findtext('source_block_id')
src_port_id = data.findtext('source_key')
snk_blk_id = data.findtext('sink_block_id')
snk_port_id = data.findtext('sink_key')
if src_port_id.isdigit():
src_port_id = src_port_id
if snk_port_id.isdigit():
snk_port_id = snk_port_id
return yaml.ListFlowing([src_blk_id, src_port_id, snk_blk_id, snk_port_id])
def _guess_file_format_1(data):
"""Try to guess the file format for flow-graph files without version tag"""
def has_numeric_port_ids(src_id, src_port_id, snk_id, snk_port_id):
return src_port_id.isdigit() and snk_port_id.isdigit()
try:
if any(not has_numeric_port_ids(*con) for con in data['connections']):
return 1
except (TypeError, KeyError):
pass
return 0
| gpl-3.0 | -8,563,911,048,658,310,000 | 2,647,621,648,034,919,000 | 28.541353 | 80 | 0.641894 | false |
mhugent/Quantum-GIS | python/plugins/processing/algs/grass7/ext/r_coin.py | 20 | 1212 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_coin.py
---------------------
Date : December 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'December 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import HtmlReportPostProcessor
def postProcessResults(alg):
HtmlReportPostProcessor.postProcessResults(alg)
| gpl-2.0 | 915,246,336,551,961,900 | 4,759,309,086,847,411,000 | 36.875 | 75 | 0.410891 | false |
cchauve/lrcstats | src/preprocessing/multi2singlefasta.py | 2 | 1641 | import sys, getopt
if __name__ == "__main__":
helpMessage = "Process FASTA files such that sequences for each sample are contained in one line."
usageMessage = "Usage: %s [-h help and usage] [-i long reads FASTA inputPath] [-o output path]" % (sys.argv[0])
options = "hi:o:"
try:
opts, args = getopt.getopt(sys.argv[1:], options)
except getopt.GetoptError:
print "Error: unable to read command line arguments."
sys.exit(2)
if (len(sys.argv) == 1):
print usageMessage
sys.exit()
inputPath = None
outputPath = None
for opt, arg in opts:
# Help message
if opt == '-h':
print helpMessage
print usageMessage
sys.exit()
# Get long reads FASTA inputPath
elif opt == '-i':
inputPath = arg
elif opt == '-o':
outputPath = arg
optsIncomplete = False
if inputPath is None or inputPath is '':
print "Please provide the sample long read FASTQ inputPath."
optsIncomplete = True
if outputPath is None or outputPath is '':
print "Please provide an output path."
optsIncomplete = True
if optsIncomplete:
print usageMessage
sys.exit(2)
with open(inputPath, 'r') as inputFile:
with open(outputPath, 'w') as outputFile:
sequence = ''
for line in inputFile:
if line is not '' and line[0] is '>':
if sequence is not '':
outputFile.write(sequence)
outputFile.write('\n')
outputFile.write(line)
sequence = ''
else:
line = line.rstrip('\n')
sequence = sequence + line
outputFile.write(sequence)
| gpl-3.0 | -5,007,155,991,882,718,000 | -1,967,110,318,187,689,500 | 27.293103 | 112 | 0.602681 | false |
tsengj10/physics-admit | admissions/management/commands/jelley.py | 1 | 1202 | from django.core.management.base import BaseCommand, CommandError
from admissions.models import *
class Command(BaseCommand):
help = 'Recalculate Jelley scores and ranks'
def add_arguments(self, parser):
parser.add_argument('tag', nargs='?', default='test')
def handle(self, *args, **options):
weights = Weights.objects.last()
all_students = Candidate.objects.all()
for s in all_students:
s.stored_jell_score = s.calc_jell_score(weights)
s.save()
self.stdout.write('Jelley score of {0} is {1}'.format(s.ucas_id, s.stored_jell_score))
ordered = Candidate.objects.order_by('-stored_jell_score').all()
first = True
index = 1
for s in ordered:
if first:
s.stored_rank = index
previous_score = s.stored_jell_score
previous_rank = index
first = False
else:
if s.stored_jell_score == previous_score:
s.stored_rank = previous_rank
else:
s.stored_rank = index
previous_score = s.stored_jell_score
previous_rank = index
s.save()
self.stdout.write('Rank of {0} is {1} ({2})'.format(s.ucas_id, s.stored_rank, index))
index = index + 1
| gpl-2.0 | -188,805,419,564,766,850 | 5,445,382,254,445,789,000 | 31.486486 | 92 | 0.624792 | false |
realsobek/freeipa | ipaclient/remote_plugins/2_49/ping.py | 8 | 1648 | #
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
# pylint: disable=unused-import
import six
from . import Command, Method, Object
from ipalib import api, parameters, output
from ipalib.parameters import DefaultFrom
from ipalib.plugable import Registry
from ipalib.text import _
from ipapython.dn import DN
from ipapython.dnsutil import DNSName
if six.PY3:
unicode = str
__doc__ = _("""
Ping the remote IPA server to ensure it is running.
The ping command sends an echo request to an IPA server. The server
returns its version information. This is used by an IPA client
to confirm that the server is available and accepting requests.
The server from xmlrpc_uri in /etc/ipa/default.conf is contacted first.
If it does not respond then the client will contact any servers defined
by ldap SRV records in DNS.
EXAMPLES:
Ping an IPA server:
ipa ping
------------------------------------------
IPA server version 2.1.9. API version 2.20
------------------------------------------
Ping an IPA server verbosely:
ipa -v ping
ipa: INFO: trying https://ipa.example.com/ipa/xml
ipa: INFO: Forwarding 'ping' to server u'https://ipa.example.com/ipa/xml'
-----------------------------------------------------
IPA server version 2.1.9. API version 2.20
-----------------------------------------------------
""")
register = Registry()
@register()
class ping(Command):
__doc__ = _("Ping a remote server.")
has_output = (
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
)
| gpl-3.0 | 4,710,337,718,851,379,000 | -4,222,664,713,137,622,500 | 26.466667 | 76 | 0.617112 | false |
public-ink/public-ink | server/appengine-staging/lib/graphql/type/__init__.py | 3 | 1366 | # flake8: noqa
from .definition import ( # no import order
GraphQLScalarType,
GraphQLObjectType,
GraphQLField,
GraphQLArgument,
GraphQLInterfaceType,
GraphQLUnionType,
GraphQLEnumType,
GraphQLEnumValue,
GraphQLInputObjectType,
GraphQLInputObjectField,
GraphQLList,
GraphQLNonNull,
get_named_type,
is_abstract_type,
is_composite_type,
is_input_type,
is_leaf_type,
is_type,
get_nullable_type,
is_output_type
)
from .directives import (
# "Enum" of Directive locations
DirectiveLocation,
# Directive definition
GraphQLDirective,
# Built-in directives defined by the Spec
specified_directives,
GraphQLSkipDirective,
GraphQLIncludeDirective,
GraphQLDeprecatedDirective,
# Constant Deprecation Reason
DEFAULT_DEPRECATION_REASON,
)
from .scalars import ( # no import order
GraphQLInt,
GraphQLFloat,
GraphQLString,
GraphQLBoolean,
GraphQLID,
)
from .schema import GraphQLSchema
from .introspection import (
# "Enum" of Type Kinds
TypeKind,
# GraphQL Types for introspection.
__Schema,
__Directive,
__DirectiveLocation,
__Type,
__Field,
__InputValue,
__EnumValue,
__TypeKind,
# Meta-field definitions.
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef
)
| gpl-3.0 | 8,522,626,847,833,194,000 | 864,655,416,847,765,400 | 19.38806 | 45 | 0.68448 | false |
sdague/home-assistant | tests/components/mqtt/test_tag.py | 6 | 24527 | """The tests for MQTT tag scanner."""
import copy
import json
import pytest
from tests.async_mock import ANY, patch
from tests.common import (
async_fire_mqtt_message,
async_get_device_automations,
mock_device_registry,
mock_registry,
)
DEFAULT_CONFIG_DEVICE = {
"device": {"identifiers": ["0AFFD2"]},
"topic": "foobar/tag_scanned",
}
DEFAULT_CONFIG = {
"topic": "foobar/tag_scanned",
}
DEFAULT_CONFIG_JSON = {
"device": {"identifiers": ["0AFFD2"]},
"topic": "foobar/tag_scanned",
"value_template": "{{ value_json.PN532.UID }}",
}
DEFAULT_TAG_ID = "E9F35959"
DEFAULT_TAG_SCAN = "E9F35959"
DEFAULT_TAG_SCAN_JSON = (
'{"Time":"2020-09-28T17:02:10","PN532":{"UID":"E9F35959", "DATA":"ILOVETASMOTA"}}'
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def tag_mock():
"""Fixture to mock tag."""
with patch("homeassistant.components.tag.async_scan_tag") as mock_tag:
yield mock_tag
@pytest.mark.no_fail_on_log_exception
async def test_discover_bad_tag(hass, device_reg, entity_reg, mqtt_mock, tag_mock):
"""Test bad discovery message."""
config1 = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
# Test sending bad data
data0 = '{ "device":{"identifiers":["0AFFD2"]}, "topics": "foobar/tag_scanned" }'
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data0)
await hass.async_block_till_done()
assert device_reg.async_get_device({("mqtt", "0AFFD2")}, set()) is None
# Test sending correct data
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", json.dumps(config1))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_if_fires_on_mqtt_message_with_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning, with device."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_if_fires_on_mqtt_message_without_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning, without device."""
config = copy.deepcopy(DEFAULT_CONFIG)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
async def test_if_fires_on_mqtt_message_with_template(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning, with device."""
config = copy.deepcopy(DEFAULT_CONFIG_JSON)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_strip_tag_id(hass, device_reg, mqtt_mock, tag_mock):
"""Test strip whitespace from tag_id."""
config = copy.deepcopy(DEFAULT_CONFIG)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", "123456 ")
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, "123456", None)
async def test_if_fires_on_mqtt_message_after_update_with_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after update."""
config1 = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
config2 = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
config2["topic"] = "foobar/tag_scanned2"
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with different topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with same topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_if_fires_on_mqtt_message_after_update_without_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after update."""
config1 = copy.deepcopy(DEFAULT_CONFIG)
config2 = copy.deepcopy(DEFAULT_CONFIG)
config2["topic"] = "foobar/tag_scanned2"
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
# Update the tag scanner with different topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
# Update the tag scanner with same topic
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned2", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
async def test_if_fires_on_mqtt_message_after_update_with_template(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after update."""
config1 = copy.deepcopy(DEFAULT_CONFIG_JSON)
config2 = copy.deepcopy(DEFAULT_CONFIG_JSON)
config2["value_template"] = "{{ value_json.RDM6300.UID }}"
tag_scan_2 = '{"Time":"2020-09-28T17:02:10","RDM6300":{"UID":"E9F35959", "DATA":"ILOVETASMOTA"}}'
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with different template
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned", tag_scan_2)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Update the tag scanner with same template
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config2))
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN_JSON)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async_fire_mqtt_message(hass, "foobar/tag_scanned", tag_scan_2)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_no_resubscribe_same_topic(hass, device_reg, mqtt_mock):
"""Test subscription to topics without change."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
assert device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
call_count = mqtt_mock.async_subscribe.call_count
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
assert mqtt_mock.async_subscribe.call_count == call_count
async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_with_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning after removal."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Remove the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
# Rediscover the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
async def test_not_fires_on_mqtt_message_after_remove_by_mqtt_without_device(
hass, device_reg, mqtt_mock, tag_mock
):
"""Test tag scanning not firing after removal."""
config = copy.deepcopy(DEFAULT_CONFIG)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
# Remove the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
# Rediscover the tag scanner
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, None)
async def test_not_fires_on_mqtt_message_after_remove_from_registry(
hass,
device_reg,
mqtt_mock,
tag_mock,
):
"""Test tag scanning after removal."""
config = copy.deepcopy(DEFAULT_CONFIG_DEVICE)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config))
await hass.async_block_till_done()
device_entry = device_reg.async_get_device({("mqtt", "0AFFD2")}, set())
# Fake tag scan.
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, DEFAULT_TAG_ID, device_entry.id)
# Remove the device
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
tag_mock.reset_mock()
async_fire_mqtt_message(hass, "foobar/tag_scanned", DEFAULT_TAG_SCAN)
await hass.async_block_till_done()
tag_mock.assert_not_called()
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT device registry integration."""
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"topic": "test-topic",
"device": {
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
}
)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device(set(), {("mac", "02:5b:26:a8:dc:12")})
assert device is not None
assert device.connections == {("mac", "02:5b:26:a8:dc:12")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT device registry integration."""
registry = await hass.helpers.device_registry.async_get_registry()
data = json.dumps(
{
"topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
}
)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.identifiers == {("mqtt", "helloworld")}
assert device.manufacturer == "Whatever"
assert device.name == "Beer"
assert device.model == "Glass"
assert device.sw_version == "0.1-beta"
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
registry = await hass.helpers.device_registry.async_get_registry()
config = {
"topic": "test-topic",
"device": {
"identifiers": ["helloworld"],
"connections": [["mac", "02:5b:26:a8:dc:12"]],
"manufacturer": "Whatever",
"name": "Beer",
"model": "Glass",
"sw_version": "0.1-beta",
},
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Beer"
config["device"]["name"] = "Milk"
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
device = registry.async_get_device({("mqtt", "helloworld")}, set())
assert device is not None
assert device.name == "Milk"
async def test_cleanup_tag(hass, device_reg, entity_reg, mqtt_mock):
"""Test tag discovery topic is cleaned when device is removed from registry."""
config = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
device_reg.async_remove_device(device_entry.id)
await hass.async_block_till_done()
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
# Verify retained discovery topic has been cleared
mqtt_mock.async_publish.assert_called_once_with(
"homeassistant/tag/bla/config", "", 0, True
)
async def test_cleanup_device(hass, device_reg, entity_reg, mqtt_mock):
"""Test removal from device registry when tag is removed."""
config = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
data = json.dumps(config)
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", data)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/tag/bla/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
async def test_cleanup_device_several_tags(
hass, device_reg, entity_reg, mqtt_mock, tag_mock
):
"""Test removal from device registry when the last tag is removed."""
config1 = {
"topic": "test-topic1",
"device": {"identifiers": ["helloworld"]},
}
config2 = {
"topic": "test-topic2",
"device": {"identifiers": ["helloworld"]},
}
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", json.dumps(config1))
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/tag/bla2/config", json.dumps(config2))
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
# Verify device registry entry is not cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
# Fake tag scan.
async_fire_mqtt_message(hass, "test-topic1", "12345")
async_fire_mqtt_message(hass, "test-topic2", "23456")
await hass.async_block_till_done()
tag_mock.assert_called_once_with(ANY, "23456", device_entry.id)
async_fire_mqtt_message(hass, "homeassistant/tag/bla2/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
async def test_cleanup_device_with_entity_and_trigger_1(
hass, device_reg, entity_reg, mqtt_mock
):
"""Test removal from device registry for device with tag, entity and trigger.
Tag removed first, then trigger and entity.
"""
config1 = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
config2 = {
"automation_type": "trigger",
"topic": "test-topic",
"type": "foo",
"subtype": "bar",
"device": {"identifiers": ["helloworld"]},
}
config3 = {
"name": "test_binary_sensor",
"state_topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
"unique_id": "veryunique",
}
data1 = json.dumps(config1)
data2 = json.dumps(config2)
data3 = json.dumps(config3)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data2)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", data3)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert len(triggers) == 3 # 2 binary_sensor triggers + device trigger
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
# Verify device registry entry is not cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", "")
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
async def test_cleanup_device_with_entity2(hass, device_reg, entity_reg, mqtt_mock):
"""Test removal from device registry for device with tag, entity and trigger.
Trigger and entity removed first, then tag.
"""
config1 = {
"topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
}
config2 = {
"automation_type": "trigger",
"topic": "test-topic",
"type": "foo",
"subtype": "bar",
"device": {"identifiers": ["helloworld"]},
}
config3 = {
"name": "test_binary_sensor",
"state_topic": "test-topic",
"device": {"identifiers": ["helloworld"]},
"unique_id": "veryunique",
}
data1 = json.dumps(config1)
data2 = json.dumps(config2)
data3 = json.dumps(config3)
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", data1)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", data2)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", data3)
await hass.async_block_till_done()
# Verify device registry entry is created
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
triggers = await async_get_device_automations(hass, "trigger", device_entry.id)
assert len(triggers) == 3 # 2 binary_sensor triggers + device trigger
async_fire_mqtt_message(hass, "homeassistant/device_automation/bla2/config", "")
await hass.async_block_till_done()
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla3/config", "")
await hass.async_block_till_done()
# Verify device registry entry is not cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is not None
async_fire_mqtt_message(hass, "homeassistant/tag/bla1/config", "")
await hass.async_block_till_done()
# Verify device registry entry is cleared
device_entry = device_reg.async_get_device({("mqtt", "helloworld")}, set())
assert device_entry is None
| apache-2.0 | -7,931,625,779,240,333,000 | -1,295,247,682,059,283,200 | 34.858187 | 101 | 0.668325 | false |
digwanderlust/pants | src/python/pants/backend/android/distribution/android_distribution.py | 31 | 3997 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import shutil
from pants.util.dirutil import safe_mkdir
class AndroidDistribution(object):
"""Represent an Android SDK distribution."""
class DistributionError(Exception):
"""Indicate an invalid android distribution."""
_CACHED_SDK = {}
@classmethod
def cached(cls, path=None):
"""Return an AndroidDistribution and cache results.
:param string path: Optional path of an Android SDK installation.
:return: An android distribution.
:rtype: AndroidDistribution
"""
dist = cls._CACHED_SDK.get(path)
if not dist:
dist = cls.locate_sdk_path(path)
cls._CACHED_SDK[path] = dist
return dist
@classmethod
def locate_sdk_path(cls, path=None):
"""Locate an Android SDK by checking any passed path and then traditional environmental aliases.
:param string path: Optional local address of a SDK.
:return: An android distribution.
:rtype: AndroidDistribution
:raises: ``DistributionError`` if SDK cannot be found.
"""
def sdk_path(sdk_env_var):
"""Return the full path of environmental variable sdk_env_var."""
sdk = os.environ.get(sdk_env_var)
return os.path.abspath(sdk) if sdk else None
def search_path(path):
"""Find a Android SDK home directory."""
if path:
yield os.path.abspath(path)
yield sdk_path('ANDROID_HOME')
yield sdk_path('ANDROID_SDK_HOME')
yield sdk_path('ANDROID_SDK')
for path in filter(None, search_path(path)):
dist = cls(sdk_path=path)
return dist
raise cls.DistributionError('Failed to locate Android SDK. Please install '
'SDK and set ANDROID_HOME in your path.')
def __init__(self, sdk_path):
"""Create an Android distribution and cache tools for quick retrieval."""
self._sdk_path = sdk_path
self._validated_tools = {}
def register_android_tool(self, tool_path, workdir=None):
"""Return the full path for the tool at SDK location tool_path or of a copy under workdir.
All android tasks should request their tools using this method.
:param string tool_path: Path to tool, relative to the Android SDK root, e.g
'platforms/android-19/android.jar'.
:param string workdir: Location for the copied file. Pants will put a copy of the
android file under workdir.
:return: Full path to either the tool or a created copy of that tool.
:rtype: string
:raises: ``DistributionError`` if tool cannot be found.
"""
if tool_path not in self._validated_tools:
android_tool = self._get_tool_path(tool_path)
# If an android file is bound for the classpath it must be under buildroot, so create a copy.
if workdir:
copy_path = os.path.join(workdir, tool_path)
if not os.path.isfile(copy_path):
try:
safe_mkdir(os.path.dirname(copy_path))
shutil.copy(android_tool, copy_path)
except OSError as e:
raise self.DistributionError('Problem creating copy of the android tool: {}'.format(e))
self._validated_tools[tool_path] = copy_path
else:
self._validated_tools[tool_path] = android_tool
return self._validated_tools[tool_path]
def _get_tool_path(self, tool_path):
"""Return full path of tool if it is found on disk."""
android_tool = os.path.join(self._sdk_path, tool_path)
if os.path.isfile(android_tool):
return android_tool
else:
raise self.DistributionError('There is no {} installed. The Android SDK may need to be '
'updated.'.format(android_tool))
def __repr__(self):
return 'AndroidDistribution({})'.format(self._sdk_path)
| apache-2.0 | -8,050,936,772,747,179,000 | -4,212,669,284,774,917,600 | 36.35514 | 100 | 0.663998 | false |
xiawei0000/Kinectforactiondetect | ChalearnLAPSample.py | 1 | 41779 | # coding=gbk
#-------------------------------------------------------------------------------
# Name: Chalearn LAP sample
# Purpose: Provide easy access to Chalearn LAP challenge data samples
#
# Author: Xavier Baro
#
# Created: 21/01/2014
# Copyright: (c) Xavier Baro 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
import os
import zipfile
import shutil
import cv2
import numpy
import csv
from PIL import Image, ImageDraw
from scipy.misc import imresize
class Skeleton(object):
""" Class that represents the skeleton information """
"""¹Ç¼ÜÀ࣬ÊäÈë¹Ç¼ÜÊý¾Ý£¬½¨Á¢Àà"""
#define a class to encode skeleton data
def __init__(self,data):
""" Constructor. Reads skeleton information from given raw data """
# Create an object from raw data
self.joins=dict();
pos=0
self.joins['HipCenter']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['Spine']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['ShoulderCenter']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['Head']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['ShoulderLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['ElbowLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['WristLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['HandLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['ShoulderRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['ElbowRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['WristRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['HandRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['HipLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['KneeLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['AnkleLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['FootLeft']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['HipRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['KneeRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['AnkleRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
pos=pos+9
self.joins['FootRight']=(map(float,data[pos:pos+3]),map(float,data[pos+3:pos+7]),map(int,data[pos+7:pos+9]))
def getAllData(self):
""" Return a dictionary with all the information for each skeleton node """
return self.joins
def getWorldCoordinates(self):
""" Get World coordinates for each skeleton node """
skel=dict()
for key in self.joins.keys():
skel[key]=self.joins[key][0]
return skel
def getJoinOrientations(self):
""" Get orientations of all skeleton nodes """
skel=dict()
for key in self.joins.keys():
skel[key]=self.joins[key][1]
return skel
def getPixelCoordinates(self):
""" Get Pixel coordinates for each skeleton node """
skel=dict()
for key in self.joins.keys():
skel[key]=self.joins[key][2]
return skel
def toImage(self,width,height,bgColor):
""" Create an image for the skeleton information """
SkeletonConnectionMap = (['HipCenter','Spine'],['Spine','ShoulderCenter'],['ShoulderCenter','Head'],['ShoulderCenter','ShoulderLeft'], \
['ShoulderLeft','ElbowLeft'],['ElbowLeft','WristLeft'],['WristLeft','HandLeft'],['ShoulderCenter','ShoulderRight'], \
['ShoulderRight','ElbowRight'],['ElbowRight','WristRight'],['WristRight','HandRight'],['HipCenter','HipRight'], \
['HipRight','KneeRight'],['KneeRight','AnkleRight'],['AnkleRight','FootRight'],['HipCenter','HipLeft'], \
['HipLeft','KneeLeft'],['KneeLeft','AnkleLeft'],['AnkleLeft','FootLeft'])
im = Image.new('RGB', (width, height), bgColor)
draw = ImageDraw.Draw(im)
for link in SkeletonConnectionMap:
p=self.getPixelCoordinates()[link[1]]
p.extend(self.getPixelCoordinates()[link[0]])
draw.line(p, fill=(255,0,0), width=5)
for node in self.getPixelCoordinates().keys():
p=self.getPixelCoordinates()[node]
r=5
draw.ellipse((p[0]-r,p[1]-r,p[0]+r,p[1]+r),fill=(0,0,255))
del draw
image = numpy.array(im)
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
return image
##ÊÖÊÆÊý¾ÝµÄÀ࣬ÊäÈë·¾¶£¬½¨Á¢ÊÖÊÆÊý¾ÝÀà
class GestureSample(object):
""" Class that allows to access all the information for a certain gesture database sample """
#define class to access gesture data samples
#³õʼ»¯£¬¶ÁÈ¡Îļþ
def __init__ (self,fileName):
""" Constructor. Read the sample file and unzip it if it is necessary. All the data is loaded.
sample=GestureSample('Sample0001.zip')
"""
# Check the given file
if not os.path.exists(fileName): #or not os.path.isfile(fileName):
raise Exception("Sample path does not exist: " + fileName)
# Prepare sample information
self.fullFile = fileName
self.dataPath = os.path.split(fileName)[0]
self.file=os.path.split(fileName)[1]
self.seqID=os.path.splitext(self.file)[0]
self.samplePath=self.dataPath + os.path.sep + self.seqID;
#ÅжÏÊÇzip»¹ÊÇĿ¼
# Unzip sample if it is necessary
if os.path.isdir(self.samplePath) :
self.unzip = False
else:
self.unzip = True
zipFile=zipfile.ZipFile(self.fullFile,"r")
zipFile.extractall(self.samplePath)
# Open video access for RGB information
rgbVideoPath=self.samplePath + os.path.sep + self.seqID + '_color.mp4'
if not os.path.exists(rgbVideoPath):
raise Exception("Invalid sample file. RGB data is not available")
self.rgb = cv2.VideoCapture(rgbVideoPath)
while not self.rgb.isOpened():
self.rgb = cv2.VideoCapture(rgbVideoPath)
cv2.waitKey(500)
# Open video access for Depth information
depthVideoPath=self.samplePath + os.path.sep + self.seqID + '_depth.mp4'
if not os.path.exists(depthVideoPath):
raise Exception("Invalid sample file. Depth data is not available")
self.depth = cv2.VideoCapture(depthVideoPath)
while not self.depth.isOpened():
self.depth = cv2.VideoCapture(depthVideoPath)
cv2.waitKey(500)
# Open video access for User segmentation information
userVideoPath=self.samplePath + os.path.sep + self.seqID + '_user.mp4'
if not os.path.exists(userVideoPath):
raise Exception("Invalid sample file. User segmentation data is not available")
self.user = cv2.VideoCapture(userVideoPath)
while not self.user.isOpened():
self.user = cv2.VideoCapture(userVideoPath)
cv2.waitKey(500)
# Read skeleton data
skeletonPath=self.samplePath + os.path.sep + self.seqID + '_skeleton.csv'
if not os.path.exists(skeletonPath):
raise Exception("Invalid sample file. Skeleton data is not available")
self.skeletons=[]
with open(skeletonPath, 'rb') as csvfile:
filereader = csv.reader(csvfile, delimiter=',')
for row in filereader:
self.skeletons.append(Skeleton(row))
del filereader
# Read sample data
sampleDataPath=self.samplePath + os.path.sep + self.seqID + '_data.csv'
if not os.path.exists(sampleDataPath):
raise Exception("Invalid sample file. Sample data is not available")
self.data=dict()
with open(sampleDataPath, 'rb') as csvfile:
filereader = csv.reader(csvfile, delimiter=',')
for row in filereader:
self.data['numFrames']=int(row[0])
self.data['fps']=int(row[1])
self.data['maxDepth']=int(row[2])
del filereader
# Read labels data
labelsPath=self.samplePath + os.path.sep + self.seqID + '_labels.csv'
if not os.path.exists(labelsPath):
#warnings.warn("Labels are not available", Warning)
self.labels=[]
else:
self.labels=[]
with open(labelsPath, 'rb') as csvfile:
filereader = csv.reader(csvfile, delimiter=',')
for row in filereader:
self.labels.append(map(int,row))
del filereader
#Îö¹¹º¯Êý
def __del__(self):
""" Destructor. If the object unziped the sample, it remove the temporal data """
if self.unzip:
self.clean()
def clean(self):
""" Clean temporal unziped data """
del self.rgb;
del self.depth;
del self.user;
shutil.rmtree(self.samplePath)
#´ÓvideoÖжÁÈ¡Ò»Ö¡·µ»Ø
def getFrame(self,video, frameNum):
""" Get a single frame from given video object """
# Check frame number
# Get total number of frames
numFrames = video.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)
# Check the given file
if frameNum<1 or frameNum>numFrames:
raise Exception("Invalid frame number <" + str(frameNum) + ">. Valid frames are values between 1 and " + str(int(numFrames)))
# Set the frame index
video.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,frameNum-1)
ret,frame=video.read()
if ret==False:
raise Exception("Cannot read the frame")
return frame
#ÏÂÃæµÄº¯Êý¶¼ÊÇÕë¶ÔÊý¾Ý³ÉÔ±£¬µÄÌض¨Ö¡²Ù×÷µÄ
def getRGB(self, frameNum):
""" Get the RGB color image for the given frame """
#get RGB frame
return self.getFrame(self.rgb,frameNum)
#·µ»ØÉî¶Èͼ£¬Ê¹ÓÃ16int±£´æµÄ
def getDepth(self, frameNum):
""" Get the depth image for the given frame """
#get Depth frame
depthData=self.getFrame(self.depth,frameNum)
# Convert to grayscale
depthGray=cv2.cvtColor(depthData,cv2.cv.CV_RGB2GRAY)
# Convert to float point
depth=depthGray.astype(numpy.float32)
# Convert to depth values
depth=depth/255.0*float(self.data['maxDepth'])
depth=depth.round()
depth=depth.astype(numpy.uint16)
return depth
def getUser(self, frameNum):
""" Get user segmentation image for the given frame """
#get user segmentation frame
return self.getFrame(self.user,frameNum)
def getSkeleton(self, frameNum):
""" Get the skeleton information for a given frame. It returns a Skeleton object """
#get user skeleton for a given frame
# Check frame number
# Get total number of frames
numFrames = len(self.skeletons)
# Check the given file
if frameNum<1 or frameNum>numFrames:
raise Exception("Invalid frame number <" + str(frameNum) + ">. Valid frames are values between 1 and " + str(int(numFrames)))
return self.skeletons[frameNum-1]
def getSkeletonImage(self, frameNum):
""" Create an image with the skeleton image for a given frame """
return self.getSkeleton(frameNum).toImage(640,480,(255,255,255))
def getNumFrames(self):
""" Get the number of frames for this sample """
return self.data['numFrames']
#½«ËùÓеÄÒ»Ö¡Êý¾Ý ´ò°üµ½Ò»¸ö´óµÄ¾ØÕóÀï
def getComposedFrame(self, frameNum):
""" Get a composition of all the modalities for a given frame """
# get sample modalities
rgb=self.getRGB(frameNum)
depthValues=self.getDepth(frameNum)
user=self.getUser(frameNum)
skel=self.getSkeletonImage(frameNum)
# Build depth image
depth = depthValues.astype(numpy.float32)
depth = depth*255.0/float(self.data['maxDepth'])
depth = depth.round()
depth = depth.astype(numpy.uint8)
depth = cv2.applyColorMap(depth,cv2.COLORMAP_JET)
# Build final image
compSize1=(max(rgb.shape[0],depth.shape[0]),rgb.shape[1]+depth.shape[1])
compSize2=(max(user.shape[0],skel.shape[0]),user.shape[1]+skel.shape[1])
comp = numpy.zeros((compSize1[0]+ compSize2[0],max(compSize1[1],compSize2[1]),3), numpy.uint8)
# Create composition
comp[:rgb.shape[0],:rgb.shape[1],:]=rgb
comp[:depth.shape[0],rgb.shape[1]:rgb.shape[1]+depth.shape[1],:]=depth
comp[compSize1[0]:compSize1[0]+user.shape[0],:user.shape[1],:]=user
comp[compSize1[0]:compSize1[0]+skel.shape[0],user.shape[1]:user.shape[1]+skel.shape[1],:]=skel
return comp
def getComposedFrameOverlapUser(self, frameNum):
""" Get a composition of all the modalities for a given frame """
# get sample modalities
rgb=self.getRGB(frameNum)
depthValues=self.getDepth(frameNum)
user=self.getUser(frameNum)
mask = numpy.mean(user, axis=2) > 150
mask = numpy.tile(mask, (3,1,1))
mask = mask.transpose((1,2,0))
# Build depth image
depth = depthValues.astype(numpy.float32)
depth = depth*255.0/float(self.data['maxDepth'])
depth = depth.round()
depth = depth.astype(numpy.uint8)
depth = cv2.applyColorMap(depth,cv2.COLORMAP_JET)
# Build final image
compSize=(max(rgb.shape[0],depth.shape[0]),rgb.shape[1]+depth.shape[1])
comp = numpy.zeros((compSize[0]+ compSize[0],max(compSize[1],compSize[1]),3), numpy.uint8)
# Create composition
comp[:rgb.shape[0],:rgb.shape[1],:]=rgb
comp[:depth.shape[0],rgb.shape[1]:rgb.shape[1]+depth.shape[1],:]= depth
comp[compSize[0]:compSize[0]+user.shape[0],:user.shape[1],:]= mask * rgb
comp[compSize[0]:compSize[0]+user.shape[0],user.shape[1]:user.shape[1]+user.shape[1],:]= mask * depth
return comp
def getComposedFrame_480(self, frameNum, ratio=0.5, topCut=60, botCut=140):
""" Get a composition of all the modalities for a given frame """
# get sample modalities
rgb=self.getRGB(frameNum)
rgb = rgb[topCut:-topCut,botCut:-botCut,:]
rgb = imresize(rgb, ratio, interp='bilinear')
depthValues=self.getDepth(frameNum)
user=self.getUser(frameNum)
user = user[topCut:-topCut,botCut:-botCut,:]
user = imresize(user, ratio, interp='bilinear')
mask = numpy.mean(user, axis=2) > 150
mask = numpy.tile(mask, (3,1,1))
mask = mask.transpose((1,2,0))
# Build depth image
depth = depthValues.astype(numpy.float32)
depth = depth*255.0/float(self.data['maxDepth'])
depth = depth.round()
depth = depth[topCut:-topCut,botCut:-botCut]
depth = imresize(depth, ratio, interp='bilinear')
depth = depth.astype(numpy.uint8)
depth = cv2.applyColorMap(depth,cv2.COLORMAP_JET)
# Build final image
compSize=(max(rgb.shape[0],depth.shape[0]),rgb.shape[1]+depth.shape[1])
comp = numpy.zeros((compSize[0]+ compSize[0],max(compSize[1],compSize[1]),3), numpy.uint8)
# Create composition
comp[:rgb.shape[0],:rgb.shape[1],:]=rgb
comp[:depth.shape[0],rgb.shape[1]:rgb.shape[1]+depth.shape[1],:]= depth
comp[compSize[0]:compSize[0]+user.shape[0],:user.shape[1],:]= mask * rgb
comp[compSize[0]:compSize[0]+user.shape[0],user.shape[1]:user.shape[1]+user.shape[1],:]= mask * depth
return comp
def getDepth3DCNN(self, frameNum, ratio=0.5, topCut=60, botCut=140):
""" Get a composition of all the modalities for a given frame """
# get sample modalities
depthValues=self.getDepth(frameNum)
user=self.getUser(frameNum)
user = user[topCut:-topCut,botCut:-botCut,:]
user = imresize(user, ratio, interp='bilinear')
mask = numpy.mean(user, axis=2) > 150
# Build depth image
depth = depthValues.astype(numpy.float32)
depth = depth*255.0/float(self.data['maxDepth'])
depth = depth.round()
depth = depth[topCut:-topCut,botCut:-botCut]
depth = imresize(depth, ratio, interp='bilinear')
depth = depth.astype(numpy.uint8)
return mask * depth
def getDepthOverlapUser(self, frameNum, x_centre, y_centre, pixel_value, extractedFrameSize=224, upshift = 0):
""" Get a composition of all the modalities for a given frame """
halfFrameSize = extractedFrameSize/2
user=self.getUser(frameNum)
mask = numpy.mean(user, axis=2) > 150
ratio = pixel_value/ 3000
# Build depth image
# get sample modalities
depthValues=self.getDepth(frameNum)
depth = depthValues.astype(numpy.float32)
depth = depth*255.0/float(self.data['maxDepth'])
mask = imresize(mask, ratio, interp='nearest')
depth = imresize(depth, ratio, interp='bilinear')
depth_temp = depth * mask
depth_extracted = depth_temp[x_centre-halfFrameSize-upshift:x_centre+halfFrameSize-upshift, y_centre-halfFrameSize: y_centre+halfFrameSize]
depth = depth.round()
depth = depth.astype(numpy.uint8)
depth = cv2.applyColorMap(depth,cv2.COLORMAP_JET)
depth_extracted = depth_extracted.round()
depth_extracted = depth_extracted.astype(numpy.uint8)
depth_extracted = cv2.applyColorMap(depth_extracted,cv2.COLORMAP_JET)
# Build final image
compSize=(depth.shape[0],depth.shape[1])
comp = numpy.zeros((compSize[0] + extractedFrameSize,compSize[1]+compSize[1],3), numpy.uint8)
# Create composition
comp[:depth.shape[0],:depth.shape[1],:]=depth
mask_new = numpy.tile(mask, (3,1,1))
mask_new = mask_new.transpose((1,2,0))
comp[:depth.shape[0],depth.shape[1]:depth.shape[1]+depth.shape[1],:]= mask_new * depth
comp[compSize[0]:,:extractedFrameSize,:]= depth_extracted
return comp
def getDepthCentroid(self, startFrame, endFrame):
""" Get a composition of all the modalities for a given frame """
x_centre = []
y_centre = []
pixel_value = []
for frameNum in range(startFrame, endFrame):
user=self.getUser(frameNum)
depthValues=self.getDepth(frameNum)
depth = depthValues.astype(numpy.float32)
#depth = depth*255.0/float(self.data['maxDepth'])
mask = numpy.mean(user, axis=2) > 150
width, height = mask.shape
XX, YY, count, pixel_sum = 0, 0, 0, 0
for x in range(width):
for y in range(height):
if mask[x, y]:
XX += x
YY += y
count += 1
pixel_sum += depth[x, y]
if count>0:
x_centre.append(XX/count)
y_centre.append(YY/count)
pixel_value.append(pixel_sum/count)
return [numpy.mean(x_centre), numpy.mean(y_centre), numpy.mean(pixel_value)]
def getGestures(self):
""" Get the list of gesture for this sample. Each row is a gesture, with the format (gestureID,startFrame,endFrame) """
return self.labels
def getGestureName(self,gestureID):
""" Get the gesture label from a given gesture ID """
names=('vattene','vieniqui','perfetto','furbo','cheduepalle','chevuoi','daccordo','seipazzo', \
'combinato','freganiente','ok','cosatifarei','basta','prendere','noncenepiu','fame','tantotempo', \
'buonissimo','messidaccordo','sonostufo')
# Check the given file
if gestureID<1 or gestureID>20:
raise Exception("Invalid gesture ID <" + str(gestureID) + ">. Valid IDs are values between 1 and 20")
return names[gestureID-1]
def exportPredictions(self, prediction,predPath):
""" Export the given prediction to the correct file in the given predictions path """
if not os.path.exists(predPath):
os.makedirs(predPath)
output_filename = os.path.join(predPath, self.seqID + '_prediction.csv')
output_file = open(output_filename, 'wb')
for row in prediction:
output_file.write(repr(int(row[0])) + "," + repr(int(row[1])) + "," + repr(int(row[2])) + "\n")
output_file.close()
def play_video(self):
"""
play the video, Wudi adds this
"""
# Open video access for RGB information
rgbVideoPath=self.samplePath + os.path.sep + self.seqID + '_color.mp4'
if not os.path.exists(rgbVideoPath):
raise Exception("Invalid sample file. RGB data is not available")
self.rgb = cv2.VideoCapture(rgbVideoPath)
while (self.rgb.isOpened()):
ret, frame = self.rgb.read()
cv2.imshow('frame',frame)
if cv2.waitKey(5) & 0xFF == ord('q'):
break
self.rgb.release()
cv2.destroyAllWindows()
def evaluate(self,csvpathpred):
""" Evaluate this sample agains the ground truth file """
maxGestures=11
seqLength=self.getNumFrames()
# Get the list of gestures from the ground truth and frame activation
predGestures = []
binvec_pred = numpy.zeros((maxGestures, seqLength))
gtGestures = []
binvec_gt = numpy.zeros((maxGestures, seqLength))
with open(csvpathpred, 'rb') as csvfilegt:
csvgt = csv.reader(csvfilegt)
for row in csvgt:
binvec_pred[int(row[0])-1, int(row[1])-1:int(row[2])-1] = 1
predGestures.append(int(row[0]))
# Get the list of gestures from prediction and frame activation
for row in self.getActions():
binvec_gt[int(row[0])-1, int(row[1])-1:int(row[2])-1] = 1
gtGestures.append(int(row[0]))
# Get the list of gestures without repetitions for ground truth and predicton
gtGestures = numpy.unique(gtGestures)
predGestures = numpy.unique(predGestures)
# Find false positives
falsePos=numpy.setdiff1d(gtGestures, numpy.union1d(gtGestures,predGestures))
# Get overlaps for each gesture
overlaps = []
for idx in gtGestures:
intersec = sum(binvec_gt[idx-1] * binvec_pred[idx-1])
aux = binvec_gt[idx-1] + binvec_pred[idx-1]
union = sum(aux > 0)
overlaps.append(intersec/union)
# Use real gestures and false positive gestures to calculate the final score
return sum(overlaps)/(len(overlaps)+len(falsePos))
def get_shift_scale(self, template, ref_depth, start_frame=10, end_frame=20, debug_show=False):
"""
Wudi add this method for extracting normalizing depth wrt Sample0003
"""
from skimage.feature import match_template
Feature_all = numpy.zeros(shape=(480, 640, end_frame-start_frame), dtype=numpy.uint16 )
count = 0
for frame_num in range(start_frame,end_frame):
depth_original = self.getDepth(frame_num)
mask = numpy.mean(self.getUser(frame_num), axis=2) > 150
Feature_all[:, :, count] = depth_original * mask
count += 1
depth_image = Feature_all.mean(axis = 2)
depth_image_normalized = depth_image * 1.0 / float(self.data['maxDepth'])
depth_image_normalized /= depth_image_normalized.max()
result = match_template(depth_image_normalized, template, pad_input=True)
#############plot
x, y = numpy.unravel_index(numpy.argmax(result), result.shape)
shift = [depth_image.shape[0]/2-x, depth_image.shape[1]/2-y]
subsize = 25 # we use 25 by 25 region as a measurement for median of distance
minX = max(x - subsize,0)
minY = max(y - subsize,0)
maxX = min(x + subsize,depth_image.shape[0])
maxY = min(y + subsize,depth_image.shape[1])
subregion = depth_image[minX:maxX, minY:maxY]
distance = numpy.median(subregion[subregion>0])
scaling = distance*1.0 / ref_depth
from matplotlib import pyplot as plt
print "[x, y, shift, distance, scaling]"
print str([x, y, shift, distance, scaling])
if debug_show:
fig, (ax1, ax2, ax3, ax4) = plt.subplots(ncols=4, figsize=(8, 4))
ax1.imshow(template)
ax1.set_axis_off()
ax1.set_title('template')
ax2.imshow(depth_image_normalized)
ax2.set_axis_off()
ax2.set_title('image')
# highlight matched region
hcoin, wcoin = template.shape
rect = plt.Rectangle((y-hcoin/2, x-wcoin/2), wcoin, hcoin, edgecolor='r', facecolor='none')
ax2.add_patch(rect)
import cv2
from scipy.misc import imresize
rows,cols = depth_image_normalized.shape
M = numpy.float32([[1,0, shift[1]],[0,1, shift[0]]])
affine_image = cv2.warpAffine(depth_image_normalized, M, (cols, rows))
resize_image = imresize(affine_image, scaling)
resize_image_median = cv2.medianBlur(resize_image,5)
ax3.imshow(resize_image_median)
ax3.set_axis_off()
ax3.set_title('image_transformed')
# highlight matched region
hcoin, wcoin = resize_image_median.shape
rect = plt.Rectangle((wcoin/2-160, hcoin/2-160), 320, 320, edgecolor='r', facecolor='none')
ax3.add_patch(rect)
ax4.imshow(result)
ax4.set_axis_off()
ax4.set_title('`match_template`\nresult')
# highlight matched region
ax4.autoscale(False)
ax4.plot(x, y, 'o', markeredgecolor='r', markerfacecolor='none', markersize=10)
plt.show()
return [shift, scaling]
def get_shift_scale_depth(self, shift, scale, framenumber, IM_SZ, show_flag=False):
"""
Wudi added this method to extract segmented depth frame,
by a shift and scale
"""
depth_original = self.getDepth(framenumber)
mask = numpy.mean(self.getUser(framenumber), axis=2) > 150
resize_final_out = numpy.zeros((IM_SZ,IM_SZ))
if mask.sum() < 1000: # Kinect detect nothing
print "skip "+ str(framenumber)
flag = False
else:
flag = True
depth_user = depth_original * mask
depth_user_normalized = depth_user * 1.0 / float(self.data['maxDepth'])
depth_user_normalized = depth_user_normalized *255 /depth_user_normalized.max()
rows,cols = depth_user_normalized.shape
M = numpy.float32([[1,0, shift[1]],[0,1, shift[0]]])
affine_image = cv2.warpAffine(depth_user_normalized, M,(cols, rows))
resize_image = imresize(affine_image, scale)
resize_image_median = cv2.medianBlur(resize_image,5)
rows, cols = resize_image_median.shape
image_crop = resize_image_median[rows/2-160:rows/2+160, cols/2-160:cols/2+160]
resize_final_out = imresize(image_crop, (IM_SZ,IM_SZ))
if show_flag: # show the segmented images here
cv2.imshow('image',image_crop)
cv2.waitKey(10)
return [resize_final_out, flag]
#¶¯×÷Êý¾ÝÀà
class ActionSample(object):
""" Class that allows to access all the information for a certain action database sample """
#define class to access actions data samples
def __init__ (self,fileName):
""" Constructor. Read the sample file and unzip it if it is necessary. All the data is loaded.
sample=ActionSample('Sec01.zip')
"""
# Check the given file
if not os.path.exists(fileName) and not os.path.isfile(fileName):
raise Exception("Sample path does not exist: " + fileName)
# Prepare sample information
self.fullFile = fileName
self.dataPath = os.path.split(fileName)[0]
self.file=os.path.split(fileName)[1]
self.seqID=os.path.splitext(self.file)[0]
self.samplePath=self.dataPath + os.path.sep + self.seqID;
# Unzip sample if it is necessary
if os.path.isdir(self.samplePath) :
self.unzip = False
else:
self.unzip = True
zipFile=zipfile.ZipFile(self.fullFile,"r")
zipFile.extractall(self.samplePath)
# Open video access for RGB information
rgbVideoPath=self.samplePath + os.path.sep + self.seqID + '_color.mp4'
if not os.path.exists(rgbVideoPath):
raise Exception("Invalid sample file. RGB data is not available")
self.rgb = cv2.VideoCapture(rgbVideoPath)
while not self.rgb.isOpened():
self.rgb = cv2.VideoCapture(rgbVideoPath)
cv2.waitKey(500)
# Read sample data
sampleDataPath=self.samplePath + os.path.sep + self.seqID + '_data.csv'
if not os.path.exists(sampleDataPath):
raise Exception("Invalid sample file. Sample data is not available")
self.data=dict()
with open(sampleDataPath, 'rb') as csvfile:
filereader = csv.reader(csvfile, delimiter=',')
for row in filereader:
self.data['numFrames']=int(row[0])
del filereader
# Read labels data
labelsPath=self.samplePath + os.path.sep + self.seqID + '_labels.csv'
self.labels=[]
if not os.path.exists(labelsPath):
warnings.warn("Labels are not available", Warning)
else:
with open(labelsPath, 'rb') as csvfile:
filereader = csv.reader(csvfile, delimiter=',')
for row in filereader:
self.labels.append(map(int,row))
del filereader
def __del__(self):
""" Destructor. If the object unziped the sample, it remove the temporal data """
if self.unzip:
self.clean()
def clean(self):
""" Clean temporal unziped data """
del self.rgb;
shutil.rmtree(self.samplePath)
def getFrame(self,video, frameNum):
""" Get a single frame from given video object """
# Check frame number
# Get total number of frames
numFrames = video.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT)
# Check the given file
if frameNum<1 or frameNum>numFrames:
raise Exception("Invalid frame number <" + str(frameNum) + ">. Valid frames are values between 1 and " + str(int(numFrames)))
# Set the frame index
video.set(cv2.cv.CV_CAP_PROP_POS_FRAMES,frameNum-1)
ret,frame=video.read()
if ret==False:
raise Exception("Cannot read the frame")
return frame
def getNumFrames(self):
""" Get the number of frames for this sample """
return self.data['numFrames']
def getRGB(self, frameNum):
""" Get the RGB color image for the given frame """
#get RGB frame
return self.getFrame(self.rgb,frameNum)
def getActions(self):
""" Get the list of gesture for this sample. Each row is an action, with the format (actionID,startFrame,endFrame) """
return self.labels
def getActionsName(self,actionID):
""" Get the action label from a given action ID """
names=('wave','point','clap','crouch','jump','walk','run','shake hands', \
'hug','kiss','fight')
# Check the given file
if actionID<1 or actionID>11:
raise Exception("Invalid action ID <" + str(actionID) + ">. Valid IDs are values between 1 and 11")
return names[actionID-1]
def exportPredictions(self, prediction,predPath):
""" Export the given prediction to the correct file in the given predictions path """
if not os.path.exists(predPath):
os.makedirs(predPath)
output_filename = os.path.join(predPath, self.seqID + '_prediction.csv')
output_file = open(output_filename, 'wb')
for row in prediction:
output_file.write(repr(int(row[0])) + "," + repr(int(row[1])) + "," + repr(int(row[2])) + "\n")
output_file.close()
def evaluate(self,csvpathpred):
""" Evaluate this sample agains the ground truth file """
maxGestures=11
seqLength=self.getNumFrames()
# Get the list of gestures from the ground truth and frame activation
predGestures = []
binvec_pred = numpy.zeros((maxGestures, seqLength))
gtGestures = []
binvec_gt = numpy.zeros((maxGestures, seqLength))
with open(csvpathpred, 'rb') as csvfilegt:
csvgt = csv.reader(csvfilegt)
for row in csvgt:
binvec_pred[int(row[0])-1, int(row[1])-1:int(row[2])-1] = 1
predGestures.append(int(row[0]))
# Get the list of gestures from prediction and frame activation
for row in self.getActions():
binvec_gt[int(row[0])-1, int(row[1])-1:int(row[2])-1] = 1
gtGestures.append(int(row[0]))
# Get the list of gestures without repetitions for ground truth and predicton
gtGestures = numpy.unique(gtGestures)
predGestures = numpy.unique(predGestures)
# Find false positives
falsePos=numpy.setdiff1d(gtGestures, numpy.union1d(gtGestures,predGestures))
# Get overlaps for each gesture
overlaps = []
for idx in gtGestures:
intersec = sum(binvec_gt[idx-1] * binvec_pred[idx-1])
aux = binvec_gt[idx-1] + binvec_pred[idx-1]
union = sum(aux > 0)
overlaps.append(intersec/union)
# Use real gestures and false positive gestures to calculate the final score
return sum(overlaps)/(len(overlaps)+len(falsePos))
#×Ë̬Êý¾ÝÀà
class PoseSample(object):
""" Class that allows to access all the information for a certain pose database sample """
#define class to access gesture data samples
def __init__ (self,fileName):
""" Constructor. Read the sample file and unzip it if it is necessary. All the data is loaded.
sample=PoseSample('Seq01.zip')
"""
# Check the given file
if not os.path.exists(fileName) and not os.path.isfile(fileName):
raise Exception("Sequence path does not exist: " + fileName)
# Prepare sample information
self.fullFile = fileName
self.dataPath = os.path.split(fileName)[0]
self.file=os.path.split(fileName)[1]
self.seqID=os.path.splitext(self.file)[0]
self.samplePath=self.dataPath + os.path.sep + self.seqID;
# Unzip sample if it is necessary
if os.path.isdir(self.samplePath):
self.unzip = False
else:
self.unzip = True
zipFile=zipfile.ZipFile(self.fullFile,"r")
zipFile.extractall(self.samplePath)
# Set path for rgb images
rgbPath=self.samplePath + os.path.sep + 'imagesjpg'+ os.path.sep
if not os.path.exists(rgbPath):
raise Exception("Invalid sample file. RGB data is not available")
self.rgbpath = rgbPath
# Set path for gt images
gtPath=self.samplePath + os.path.sep + 'maskspng'+ os.path.sep
if not os.path.exists(gtPath):
self.gtpath= "empty"
else:
self.gtpath = gtPath
frames=os.listdir(self.rgbpath)
self.numberFrames=len(frames)
def __del__(self):
""" Destructor. If the object unziped the sample, it remove the temporal data """
if self.unzip:
self.clean()
def clean(self):
""" Clean temporal unziped data """
shutil.rmtree(self.samplePath)
def getRGB(self, frameNum):
""" Get the RGB color image for the given frame """
#get RGB frame
if frameNum>self.numberFrames:
raise Exception("Number of frame has to be less than: "+ self.numberFrames)
framepath=self.rgbpath+self.seqID[3:5]+'_'+ '%04d' %frameNum+'.jpg'
if not os.path.isfile(framepath):
raise Exception("RGB file does not exist: " + framepath)
return cv2.imread(framepath)
def getNumFrames(self):
return self.numberFrames
def getLimb(self, frameNum, actorID,limbID):
""" Get the BW limb image for a certain frame and a certain limbID """
if self.gtpath == "empty":
raise Exception("Limb labels are not available for this sequence. This sequence belong to the validation set.")
else:
limbpath=self.gtpath+self.seqID[3:5]+'_'+ '%04d' %frameNum+'_'+str(actorID)+'_'+str(limbID)+'.png'
if frameNum>self.numberFrames:
raise Exception("Number of frame has to be less than: "+ self.numberFrames)
if actorID<1 or actorID>2:
raise Exception("Invalid actor ID <" + str(actorID) + ">. Valid frames are values between 1 and 2 ")
if limbID<1 or limbID>14:
raise Exception("Invalid limb ID <" + str(limbID) + ">. Valid frames are values between 1 and 14")
return cv2.imread(limbpath,cv2.CV_LOAD_IMAGE_GRAYSCALE)
def getLimbsName(self,limbID):
""" Get the limb label from a given limb ID """
names=('head','torso','lhand','rhand','lforearm','rforearm','larm','rarm', \
'lfoot','rfoot','lleg','rleg','lthigh','rthigh')
# Check the given file
if limbID<1 or limbID>14:
raise Exception("Invalid limb ID <" + str(limbID) + ">. Valid IDs are values between 1 and 14")
return names[limbID-1]
def overlap_images(self, gtimage, predimage):
""" this function computes the hit measure of overlap between two binary images im1 and im2 """
[ret, im1] = cv2.threshold(gtimage, 127, 255, cv2.THRESH_BINARY)
[ret, im2] = cv2.threshold(predimage, 127, 255, cv2.THRESH_BINARY)
intersec = cv2.bitwise_and(im1, im2)
intersec_val = float(numpy.sum(intersec))
union = cv2.bitwise_or(im1, im2)
union_val = float(numpy.sum(union))
if union_val == 0:
return 0
else:
if float(intersec_val / union_val)>0.5:
return 1
else:
return 0
def exportPredictions(self, prediction,frame,actor,limb,predPath):
""" Export the given prediction to the correct file in the given predictions path """
if not os.path.exists(predPath):
os.makedirs(predPath)
prediction_filename = predPath+os.path.sep+ self.seqID[3:5] +'_'+ '%04d' %frame +'_'+str(actor)+'_'+str(limb)+'_prediction.png'
cv2.imwrite(prediction_filename,prediction)
def evaluate(self, predpath):
""" Evaluate this sample agains the ground truth file """
# Get the list of videos from ground truth
gt_list = os.listdir(self.gtpath)
# For each sample on the GT, search the given prediction
score = 0.0
nevals = 0
for gtlimbimage in gt_list:
# Avoid double check, use only labels file
if not gtlimbimage.lower().endswith(".png"):
continue
# Build paths for prediction and ground truth files
aux = gtlimbimage.split('.')
parts = aux[0].split('_')
seqID = parts[0]
gtlimbimagepath = os.path.join(self.gtpath,gtlimbimage)
predlimbimagepath= os.path.join(predpath) + os.path.sep + seqID+'_'+parts[1]+'_'+parts[2]+'_'+parts[3]+"_prediction.png"
#check predfile exists
if not os.path.exists(predlimbimagepath) or not os.path.isfile(predlimbimagepath):
raise Exception("Invalid video limb prediction file. Not all limb predictions are available")
#Load images
gtimage=cv2.imread(gtlimbimagepath, cv2.CV_LOAD_IMAGE_GRAYSCALE)
predimage=cv2.imread(predlimbimagepath, cv2.CV_LOAD_IMAGE_GRAYSCALE)
if cv2.cv.CountNonZero(cv2.cv.fromarray(gtimage)) >= 1:
score += self.overlap_images(gtimage, predimage)
nevals += 1
#release videos and return mean overlap
return score/nevals
| mit | -851,985,030,771,732,900 | -3,870,436,274,577,117,000 | 41.762538 | 150 | 0.599919 | false |
icandigitbaby/openchange | script/bug-analysis/buganalysis/pkgshelper.py | 1 | 24843 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) Enrique J. Hernández 2014
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Helper methods to set the Package and Dependencies fields, if missing, from Apport crashes.
This is specific to Zentyal.
"""
from datetime import datetime
def map_package(report):
"""
Given a report, it will return the package and the version depending on the
DistroRelease and the ExecutableTimestamp fields specific from Zentyal repositories.
:param apport.report.Report report: the crash report
:returns: a tuple containing the package and the version of the package.
:rtype tuple:
"""
if 'DistroRelease' not in report or 'ExecutableTimestamp' not in report:
raise SystemError('No DistroRelease or ExecutableTimestamp to map the package')
distro_release = report['DistroRelease']
crash_date = datetime.fromtimestamp(int(report['ExecutableTimestamp']))
if distro_release == 'Ubuntu 14.04':
if crash_date >= datetime(2014, 5, 24, 1, 31): # Release date
return ('samba', '3:4.1.7+dfsg-2~zentyal2~64')
return ('samba', '3:4.1.7+dfsg-2~zentyal1~32')
elif distro_release == 'Ubuntu 13.10':
return ('samba', '2:4.1.6+dfsg-1~zentyal1~106')
elif distro_release == 'Ubuntu 12.04':
if crash_date < datetime(2013, 10, 2):
return ('samba4', '4.1.0rc3-zentyal3')
elif crash_date < datetime(2013, 12, 10, 13, 03):
return ('samba4', '4.1.0rc4-zentyal1')
elif crash_date < datetime(2013, 12, 17, 11, 34):
return ('samba4', '4.1.2-zentyal2')
elif crash_date < datetime(2014, 3, 5, 20, 16):
return ('samba4', '4.1.3-zentyal2')
elif crash_date < datetime(2014, 5, 30, 8, 41):
return ('samba4', '4.1.5-zentyal1')
else:
return ('samba4', '4.1.7-zentyal1')
else:
raise SystemError('Invalid Distro Release %s' % distro_release)
def map_dependencies(report):
"""
Given a report, it will return the dependencies from the package depending on the
DistroRelease fields specific from Zentyal repositories.
:param apport.report.Report report: the crash report
:returns: a list of the current dependencies packages
:rtype tuple:
"""
if 'DistroRelease' not in report:
raise SystemError('No DistroRelease to get the dependencies packages')
distro_release = report['DistroRelease']
if distro_release == 'Ubuntu 14.04':
return (
'adduser',
'apt-utils',
'attr',
'base-passwd',
'busybox-initramfs',
'ca-certificates',
'ckeditor',
'coreutils',
'cpio',
'cron',
'dbus',
'debconf',
'debconf-i18n',
'debianutils',
'dpkg',
'e2fslibs',
'e2fsprogs',
'file',
'findutils',
'gcc-4.8-base',
'gcc-4.9-base',
'gnustep-base-common',
'gnustep-base-runtime',
'gnustep-common',
'ifupdown',
'initramfs-tools',
'initramfs-tools-bin',
'initscripts',
'insserv',
'iproute2',
'isc-dhcp-client',
'isc-dhcp-common',
'javascript-common',
'klibc-utils',
'kmod',
'krb5-locales',
'libacl1',
'libaio1',
'libapache2-mod-wsgi',
'libapparmor1',
'libapt-inst1.5',
'libapt-pkg4.12',
'libarchive-extract-perl',
'libasn1-8-heimdal',
'libattr1',
'libaudit-common',
'libaudit1',
'libavahi-client3',
'libavahi-common-data',
'libavahi-common3',
'libblkid1',
'libbsd0',
'libbz2-1.0',
'libc6',
'libcap2',
'libcgmanager0',
'libcomerr2',
'libcups2',
'libcurl3-gnutls',
'libdb5.3',
'libdbus-1-3',
'libdebconfclient0',
'libdrm2',
'libevent-2.0-5',
'libexpat1',
'libffi6',
'libfile-copy-recursive-perl',
'libgcc1',
'libgcrypt11',
'libgdbm3',
'libglib2.0-0',
'libglib2.0-data',
'libgmp10',
'libgnustep-base1.24',
'libgnutls26',
'libgpg-error0',
'libgpm2',
'libgssapi-krb5-2',
'libgssapi3-heimdal',
'libhcrypto4-heimdal',
'libhdb9-heimdal',
'libheimbase1-heimdal',
'libheimntlm0-heimdal',
'libhx509-5-heimdal',
'libicu52',
'libidn11',
'libjs-jquery',
'libjs-jquery-ui',
'libjs-prototype',
'libjs-scriptaculous',
'libjs-sphinxdoc',
'libjs-swfobject',
'libjs-underscore',
'libjson-c2',
'libjson0',
'libk5crypto3',
'libkdc2-heimdal',
'libkeyutils1',
'libklibc',
'libkmod2',
'libkrb5-26-heimdal',
'libkrb5-3',
'libkrb5support0',
'liblasso3',
'libldap-2.4-2',
'libldb1',
'liblocale-gettext-perl',
'liblog-message-simple-perl',
'liblzma5',
'libmagic1',
'libmapi0',
'libmapiproxy0',
'libmapistore0',
'libmemcached10',
'libmodule-pluggable-perl',
'libmount1',
'libmysqlclient18',
'libncurses5',
'libncursesw5',
'libnih-dbus1',
'libnih1',
'libntdb1',
'libobjc4',
'libp11-kit0',
'libpam-modules',
'libpam-modules-bin',
'libpam-runtime',
'libpam-systemd',
'libpam0g',
'libpcre3',
'libplymouth2',
'libpng12-0',
'libpod-latex-perl',
'libpopt0',
'libpq5',
'libprocps3',
'libpython-stdlib',
'libpython2.7',
'libpython2.7-minimal',
'libpython2.7-stdlib',
'libreadline6',
'libroken18-heimdal',
'librtmp0',
'libsasl2-2',
'libsasl2-modules',
'libsasl2-modules-db',
'libsbjson2.3',
'libselinux1',
'libsemanage-common',
'libsemanage1',
'libsepol1',
'libslang2',
'libsope1',
'libsqlite3-0',
'libss2',
'libssl1.0.0',
'libstdc++6',
'libsystemd-daemon0',
'libsystemd-login0',
'libtalloc2',
'libtasn1-6',
'libtdb1',
'libterm-ui-perl',
'libtevent0',
'libtext-charwidth-perl',
'libtext-iconv-perl',
'libtext-soundex-perl',
'libtext-wrapi18n-perl',
'libtinfo5',
'libudev1',
'libustr-1.0-1',
'libuuid1',
'libwbclient0',
'libwind0-heimdal',
'libxml2',
'libxmlsec1',
'libxmlsec1-openssl',
'libxslt1.1',
'libxtables10',
'logrotate',
'lsb-base',
'makedev',
'memcached',
'mime-support',
'module-init-tools',
'mount',
'mountall',
'multiarch-support',
'mysql-common',
'netbase',
'openchange-ocsmanager',
'openchange-rpcproxy',
'openchangeproxy',
'openchangeserver',
'openssl',
'passwd',
'perl',
'perl-base',
'perl-modules',
'plymouth',
'plymouth-theme-ubuntu-text',
'procps',
'psmisc',
'python',
'python-beaker',
'python-bs4',
'python-chardet',
'python-crypto',
'python-decorator',
'python-dns',
'python-dnspython',
'python-formencode',
'python-ldb',
'python-lxml',
'python-mako',
'python-markupsafe',
'python-minimal',
'python-mysqldb',
'python-nose',
'python-ntdb',
'python-ocsmanager',
'python-openid',
'python-openssl',
'python-paste',
'python-pastedeploy',
'python-pastedeploy-tpl',
'python-pastescript',
'python-pkg-resources',
'python-pygments',
'python-pylons',
'python-repoze.lru',
'python-routes',
'python-rpclib',
'python-samba',
'python-scgi',
'python-setuptools',
'python-simplejson',
'python-six',
'python-spyne',
'python-sqlalchemy',
'python-sqlalchemy-ext',
'python-support',
'python-talloc',
'python-tdb',
'python-tempita',
'python-tz',
'python-waitress',
'python-weberror',
'python-webhelpers',
'python-webob',
'python-webtest',
'python2.7',
'python2.7-minimal',
'readline-common',
'samba',
'samba-common',
'samba-common-bin',
'samba-dsdb-modules',
'samba-libs',
'samba-vfs-modules',
'sed',
'sensible-utils',
'sgml-base',
'shared-mime-info',
'sogo',
'sogo-common',
'sogo-openchange',
'systemd-services',
'sysv-rc',
'sysvinit-utils',
'tar',
'tdb-tools',
'tmpreaper',
'tzdata',
'ucf',
'udev',
'unzip',
'update-inetd',
'upstart',
'util-linux',
'uuid-runtime',
'xml-core',
'zip',
'zlib1g'
)
elif distro_release == 'Ubuntu 13.10':
return (
'adduser',
'apt-utils',
'base-passwd',
'busybox-initramfs',
'ca-certificates',
'ckeditor',
'coreutils',
'cpio',
'cron',
'dbus',
'debconf',
'debconf-i18n',
'debianutils',
'dpkg',
'e2fslibs',
'e2fsprogs',
'file',
'findutils',
'gcc-4.8-base',
'gnustep-base-common',
'gnustep-base-runtime',
'gnustep-common',
'ifupdown',
'initramfs-tools',
'initramfs-tools-bin',
'initscripts',
'insserv',
'iproute2',
'isc-dhcp-client',
'isc-dhcp-common',
'klibc-utils',
'kmod',
'libacl1',
'libaio1',
'libapache2-mod-wsgi',
'libapparmor1',
'libapt-inst1.5',
'libapt-pkg4.12',
'libasn1-8-heimdal',
'libattr1',
'libaudit-common',
'libaudit1',
'libavahi-client3',
'libavahi-common-data',
'libavahi-common3',
'libblkid1',
'libbsd0',
'libbz2-1.0',
'libc6',
'libcap2',
'libclass-isa-perl',
'libcomerr2',
'libcups2',
'libcurl3-gnutls',
'libdb5.1',
'libdbus-1-3',
'libdrm2',
'libevent-2.0-5',
'libexpat1',
'libffi6',
'libfile-copy-recursive-perl',
'libgcc1',
'libgcrypt11',
'libgdbm3',
'libglib2.0-0',
'libgmp10',
'libgnustep-base1.24',
'libgnutls26',
'libgpg-error0',
'libgssapi-krb5-2',
'libgssapi3-heimdal',
'libhcrypto4-heimdal',
'libhdb9-heimdal',
'libheimbase1-heimdal',
'libheimntlm0-heimdal',
'libhx509-5-heimdal',
'libicu48',
'libidn11',
'libjs-jquery',
'libjs-jquery-ui',
'libjs-prototype',
'libjs-scriptaculous',
'libjs-sphinxdoc',
'libjs-underscore',
'libjson-c2',
'libjson0',
'libk5crypto3',
'libkdc2-heimdal',
'libkeyutils1',
'libklibc',
'libkmod2',
'libkrb5-26-heimdal',
'libkrb5-3',
'libkrb5support0',
'liblasso3',
'libldap-2.4-2',
'libldb1',
'liblocale-gettext-perl',
'liblzma5',
'libmagic1',
'libmapi0',
'libmapiproxy0',
'libmapistore0',
'libmemcached10',
'libmount1',
'libmysqlclient18',
'libncurses5',
'libncursesw5',
'libnih-dbus1',
'libnih1',
'libntdb1',
'libobjc4',
'libp11-kit0',
'libpam-modules',
'libpam-modules-bin',
'libpam-runtime',
'libpam-systemd',
'libpam0g',
'libpci3',
'libpcre3',
'libplymouth2',
'libpng12-0',
'libpopt0',
'libpq5',
'libprocps0',
'libpython-stdlib',
'libpython2.7',
'libpython2.7-minimal',
'libpython2.7-stdlib',
'libreadline6',
'libroken18-heimdal',
'librtmp0',
'libsasl2-2',
'libsasl2-modules',
'libsasl2-modules-db',
'libsbjson2.3',
'libselinux1',
'libsemanage-common',
'libsemanage1',
'libsepol1',
'libslang2',
'libsope1',
'libsqlite3-0',
'libss2',
'libssl1.0.0',
'libstdc++6',
'libswitch-perl',
'libsystemd-daemon0',
'libsystemd-login0',
'libtalloc2',
'libtasn1-3',
'libtdb1',
'libtevent0',
'libtext-charwidth-perl',
'libtext-iconv-perl',
'libtext-wrapi18n-perl',
'libtinfo5',
'libudev1',
'libusb-1.0-0',
'libustr-1.0-1',
'libuuid1',
'libwbclient0',
'libwind0-heimdal',
'libxml2',
'libxmlsec1',
'libxmlsec1-openssl',
'libxslt1.1',
'libxtables10',
'logrotate',
'lsb-base',
'makedev',
'memcached',
'mime-support',
'module-init-tools',
'mount',
'mountall',
'multiarch-support',
'mysql-common',
'netbase',
'openchange-ocsmanager',
'openchange-rpcproxy',
'openchangeproxy',
'openchangeserver',
'openssl',
'passwd',
'pciutils',
'perl',
'perl-base',
'perl-modules',
'plymouth',
'plymouth-theme-ubuntu-text',
'procps',
'psmisc',
'python',
'python-beaker',
'python-chardet',
'python-crypto',
'python-decorator',
'python-dnspython',
'python-formencode',
'python-ldb',
'python-lxml',
'python-mako',
'python-mapistore',
'python-markupsafe',
'python-minimal',
'python-mysqldb',
'python-nose',
'python-ntdb',
'python-ocsmanager',
'python-openssl',
'python-paste',
'python-pastedeploy',
'python-pastescript',
'python-pkg-resources',
'python-pygments',
'python-pylons',
'python-repoze.lru',
'python-routes',
'python-rpclib',
'python-samba',
'python-setuptools',
'python-simplejson',
'python-spyne',
'python-support',
'python-talloc',
'python-tdb',
'python-tempita',
'python-tz',
'python-weberror',
'python-webhelpers',
'python-webob',
'python-webtest',
'python2.7',
'python2.7-minimal',
'readline-common',
'samba',
'samba-common',
'samba-common-bin',
'samba-dsdb-modules',
'samba-libs',
'samba-vfs-modules',
'sed',
'sensible-utils',
'sgml-base',
'shared-mime-info',
'sogo',
'sogo-common',
'sogo-openchange',
'systemd-services',
'sysv-rc',
'sysvinit-utils',
'tar',
'tdb-tools',
'tmpreaper',
'tzdata',
'ucf',
'udev',
'update-inetd',
'upstart',
'usbutils',
'util-linux',
'xml-core',
'zip',
'zlib1g'
)
elif distro_release == 'Ubuntu 12.04':
return (
'adduser',
'apache2',
'apache2-utils',
'apache2.2-bin',
'apache2.2-common',
'autotools-dev',
'base-passwd',
'bind9-host',
'binutils',
'busybox-initramfs',
'ca-certificates',
'coreutils',
'cpio',
'cpp-4.6',
'debconf',
'debianutils',
'dnsutils',
'dpkg',
'findutils',
'gcc-4.6',
'gcc-4.6-base',
'gnustep-base-common',
'gnustep-base-runtime',
'gnustep-common',
'gnustep-make',
'gobjc-4.6',
'ifupdown',
'initramfs-tools',
'initramfs-tools-bin',
'initscripts',
'insserv',
'iproute',
'klibc-utils',
'libacl1',
'libapache2-mod-wsgi',
'libapr1',
'libaprutil1',
'libaprutil1-dbd-sqlite3',
'libaprutil1-ldap',
'libasn1-8-heimdal',
'libattr1',
'libavahi-client3',
'libavahi-common-data',
'libavahi-common3',
'libbind9-80',
'libblkid1',
'libbsd0',
'libbz2-1.0',
'libc-bin',
'libc-dev-bin',
'libc6',
'libc6-dev',
'libcap2',
'libclass-isa-perl',
'libcomerr2',
'libcups2',
'libcurl3',
'libdb5.1',
'libdbus-1-3',
'libdm0',
'libdns81',
'libdrm-intel1',
'libdrm-nouveau1a',
'libdrm-radeon1',
'libdrm2',
'libevent-2.0-5',
'libexpat1',
'libffi6',
'libgcc1',
'libgcrypt11',
'libgdbm3',
'libgeoip1',
'libglib2.0-0',
'libgmp10',
'libgnustep-base1.22',
'libgnutls26',
'libgomp1',
'libgpg-error0',
'libgssapi-krb5-2',
'libgssapi3-heimdal',
'libhcrypto4-heimdal',
'libheimbase1-heimdal',
'libheimntlm0-heimdal',
'libhx509-5-heimdal',
'libicu48',
'libidn11',
'libisc83',
'libisccc80',
'libisccfg82',
'libjs-prototype',
'libjs-scriptaculous',
'libk5crypto3',
'libkeyutils1',
'libklibc',
'libkrb5-26-heimdal',
'libkrb5-3',
'libkrb5support0',
'libldap-2.4-2',
'liblwres80',
'liblzma5',
'libmapi0',
'libmapiproxy0',
'libmapistore0',
'libmemcached6',
'libmount1',
'libmpc2',
'libmpfr4',
'libmysqlclient18',
'libncurses5',
'libncursesw5',
'libnih-dbus1',
'libnih1',
'libobjc3',
'libp11-kit0',
'libpam-modules',
'libpam-modules-bin',
'libpam0g',
'libpciaccess0',
'libpcre3',
'libplymouth2',
'libpng12-0',
'libpython2.7',
'libquadmath0',
'libreadline6',
'libroken18-heimdal',
'librtmp0',
'libsasl2-2',
'libsbjson2.3',
'libselinux1',
'libslang2',
'libsope-appserver4.9',
'libsope-core4.9',
'libsope-gdl1-4.9',
'libsope-ldap4.9',
'libsope-mime4.9',
'libsope-xml4.9',
'libsqlite3-0',
'libssl1.0.0',
'libstdc++6',
'libswitch-perl',
'libtasn1-3',
'libtinfo5',
'libudev0',
'libuuid1',
'libwind0-heimdal',
'libxml2',
'libxslt1.1',
'linux-libc-dev',
'lsb-base',
'makedev',
'memcached',
'mime-support',
'module-init-tools',
'mount',
'mountall',
'multiarch-support',
'mysql-common',
'ncurses-bin',
'openchange-ocsmanager',
'openchange-rpcproxy',
'openchangeproxy',
'openchangeserver',
'openssl',
'passwd',
'perl',
'perl-base',
'perl-modules',
'plymouth',
'procps',
'python',
'python-beaker',
'python-decorator',
'python-dnspython',
'python-formencode',
'python-lxml',
'python-mako',
'python-mapistore',
'python-markupsafe',
'python-minimal',
'python-mysqldb',
'python-nose',
'python-ocsmanager',
'python-paste',
'python-pastedeploy',
'python-pastescript',
'python-pkg-resources',
'python-pygments',
'python-pylons',
'python-routes',
'python-rpclib',
'python-setuptools',
'python-simplejson',
'python-spyne',
'python-support',
'python-tempita',
'python-tz',
'python-weberror',
'python-webhelpers',
'python-webob',
'python-webtest',
'python2.7',
'python2.7-minimal',
'readline-common',
'samba4',
'sed',
'sensible-utils',
'sgml-base',
'sogo',
'sogo-openchange',
'sope4.9-libxmlsaxdriver',
'sysv-rc',
'sysvinit-utils',
'tar',
'tmpreaper',
'tzdata',
'udev',
'upstart',
'util-linux',
'xml-core',
'xz-utils',
'zlib1g'
)
else:
raise SystemError('Invalid Distro Release %s' % distro_release)
| gpl-3.0 | 606,654,660,452,027,400 | -5,257,302,534,076,294,000 | 27.987165 | 91 | 0.434506 | false |
porcobosso/spark-ec2 | lib/boto-2.34.0/boto/ec2/instancetype.py | 152 | 2273 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.ec2.ec2object import EC2Object
class InstanceType(EC2Object):
"""
Represents an EC2 VM Type
:ivar name: The name of the vm type
:ivar cores: The number of cpu cores for this vm type
:ivar memory: The amount of memory in megabytes for this vm type
:ivar disk: The amount of disk space in gigabytes for this vm type
"""
def __init__(self, connection=None, name=None, cores=None,
memory=None, disk=None):
super(InstanceType, self).__init__(connection)
self.connection = connection
self.name = name
self.cores = cores
self.memory = memory
self.disk = disk
def __repr__(self):
return 'InstanceType:%s-%s,%s,%s' % (self.name, self.cores,
self.memory, self.disk)
def endElement(self, name, value, connection):
if name == 'name':
self.name = value
elif name == 'cpu':
self.cores = value
elif name == 'disk':
self.disk = value
elif name == 'memory':
self.memory = value
else:
setattr(self, name, value)
| apache-2.0 | 3,423,778,975,851,292,000 | 3,983,334,585,634,740,700 | 37.525424 | 74 | 0.66256 | false |
eckucukoglu/arm-linux-gnueabihf | lib/python2.7/unittest/test/test_program.py | 111 | 7555 | from cStringIO import StringIO
import os
import sys
import unittest
class Test_TestProgram(unittest.TestCase):
def test_discovery_from_dotted_path(self):
loader = unittest.TestLoader()
tests = [self]
expectedPath = os.path.abspath(os.path.dirname(unittest.test.__file__))
self.wasRun = False
def _find_tests(start_dir, pattern):
self.wasRun = True
self.assertEqual(start_dir, expectedPath)
return tests
loader._find_tests = _find_tests
suite = loader.discover('unittest.test')
self.assertTrue(self.wasRun)
self.assertEqual(suite._tests, tests)
# Horrible white box test
def testNoExit(self):
result = object()
test = object()
class FakeRunner(object):
def run(self, test):
self.test = test
return result
runner = FakeRunner()
oldParseArgs = unittest.TestProgram.parseArgs
def restoreParseArgs():
unittest.TestProgram.parseArgs = oldParseArgs
unittest.TestProgram.parseArgs = lambda *args: None
self.addCleanup(restoreParseArgs)
def removeTest():
del unittest.TestProgram.test
unittest.TestProgram.test = test
self.addCleanup(removeTest)
program = unittest.TestProgram(testRunner=runner, exit=False, verbosity=2)
self.assertEqual(program.result, result)
self.assertEqual(runner.test, test)
self.assertEqual(program.verbosity, 2)
class FooBar(unittest.TestCase):
def testPass(self):
assert True
def testFail(self):
assert False
class FooBarLoader(unittest.TestLoader):
"""Test loader that returns a suite containing FooBar."""
def loadTestsFromModule(self, module):
return self.suiteClass(
[self.loadTestsFromTestCase(Test_TestProgram.FooBar)])
def test_NonExit(self):
program = unittest.main(exit=False,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=StringIO()),
testLoader=self.FooBarLoader())
self.assertTrue(hasattr(program, 'result'))
def test_Exit(self):
self.assertRaises(
SystemExit,
unittest.main,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=StringIO()),
exit=True,
testLoader=self.FooBarLoader())
def test_ExitAsDefault(self):
self.assertRaises(
SystemExit,
unittest.main,
argv=["foobar"],
testRunner=unittest.TextTestRunner(stream=StringIO()),
testLoader=self.FooBarLoader())
class InitialisableProgram(unittest.TestProgram):
exit = False
result = None
verbosity = 1
defaultTest = None
testRunner = None
testLoader = unittest.defaultTestLoader
progName = 'test'
test = 'test'
def __init__(self, *args):
pass
RESULT = object()
class FakeRunner(object):
initArgs = None
test = None
raiseError = False
def __init__(self, **kwargs):
FakeRunner.initArgs = kwargs
if FakeRunner.raiseError:
FakeRunner.raiseError = False
raise TypeError
def run(self, test):
FakeRunner.test = test
return RESULT
class TestCommandLineArgs(unittest.TestCase):
def setUp(self):
self.program = InitialisableProgram()
self.program.createTests = lambda: None
FakeRunner.initArgs = None
FakeRunner.test = None
FakeRunner.raiseError = False
def testHelpAndUnknown(self):
program = self.program
def usageExit(msg=None):
program.msg = msg
program.exit = True
program.usageExit = usageExit
for opt in '-h', '-H', '--help':
program.exit = False
program.parseArgs([None, opt])
self.assertTrue(program.exit)
self.assertIsNone(program.msg)
program.parseArgs([None, '-$'])
self.assertTrue(program.exit)
self.assertIsNotNone(program.msg)
def testVerbosity(self):
program = self.program
for opt in '-q', '--quiet':
program.verbosity = 1
program.parseArgs([None, opt])
self.assertEqual(program.verbosity, 0)
for opt in '-v', '--verbose':
program.verbosity = 1
program.parseArgs([None, opt])
self.assertEqual(program.verbosity, 2)
def testBufferCatchFailfast(self):
program = self.program
for arg, attr in (('buffer', 'buffer'), ('failfast', 'failfast'),
('catch', 'catchbreak')):
if attr == 'catch' and not hasInstallHandler:
continue
short_opt = '-%s' % arg[0]
long_opt = '--%s' % arg
for opt in short_opt, long_opt:
setattr(program, attr, None)
program.parseArgs([None, opt])
self.assertTrue(getattr(program, attr))
for opt in short_opt, long_opt:
not_none = object()
setattr(program, attr, not_none)
program.parseArgs([None, opt])
self.assertEqual(getattr(program, attr), not_none)
def testRunTestsRunnerClass(self):
program = self.program
program.testRunner = FakeRunner
program.verbosity = 'verbosity'
program.failfast = 'failfast'
program.buffer = 'buffer'
program.runTests()
self.assertEqual(FakeRunner.initArgs, {'verbosity': 'verbosity',
'failfast': 'failfast',
'buffer': 'buffer'})
self.assertEqual(FakeRunner.test, 'test')
self.assertIs(program.result, RESULT)
def testRunTestsRunnerInstance(self):
program = self.program
program.testRunner = FakeRunner()
FakeRunner.initArgs = None
program.runTests()
# A new FakeRunner should not have been instantiated
self.assertIsNone(FakeRunner.initArgs)
self.assertEqual(FakeRunner.test, 'test')
self.assertIs(program.result, RESULT)
def testRunTestsOldRunnerClass(self):
program = self.program
FakeRunner.raiseError = True
program.testRunner = FakeRunner
program.verbosity = 'verbosity'
program.failfast = 'failfast'
program.buffer = 'buffer'
program.test = 'test'
program.runTests()
# If initializing raises a type error it should be retried
# without the new keyword arguments
self.assertEqual(FakeRunner.initArgs, {})
self.assertEqual(FakeRunner.test, 'test')
self.assertIs(program.result, RESULT)
def testCatchBreakInstallsHandler(self):
module = sys.modules['unittest.main']
original = module.installHandler
def restore():
module.installHandler = original
self.addCleanup(restore)
self.installed = False
def fakeInstallHandler():
self.installed = True
module.installHandler = fakeInstallHandler
program = self.program
program.catchbreak = True
program.testRunner = FakeRunner
program.runTests()
self.assertTrue(self.installed)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -5,693,879,083,653,559,000 | 5,365,675,733,326,569,000 | 28.627451 | 86 | 0.589146 | false |
RossMcKenzie/ACJ | ACJ.py | 1 | 20954 | from __future__ import division
import random
import os
import numpy as np
import pickle
import datetime
import json
class Decision(object):
def __init__(self, pair, result, reviewer, time):
self.pair = pair
self.result = result
self.reviewer = reviewer
self.time = time
def dict(self):
return {'Pair':[str(self.pair[0]),str(self.pair[1])], 'Result':str(self.result), 'reviewer':str(self.reviewer), 'time':str(self.time)}
def ACJ(data, maxRounds, noOfChoices = 1, logPath = None, optionNames = ["Choice"]):
if noOfChoices < 2:
return UniACJ(data, maxRounds, logPath, optionNames)
else:
return MultiACJ(data, maxRounds, noOfChoices, logPath, optionNames)
class MultiACJ(object):
'''Holds multiple ACJ objects for running comparisons with multiple choices.
The first element of the list of acj objects keeps track of the used pairs.'''
def __init__(self, data, maxRounds, noOfChoices, logPath = None, optionNames = None):
self.data = list(data)
self.n = len(data)
self.round = 0
self.step = 0
self.noOfChoices = noOfChoices
self.acjs = [ACJ(data, maxRounds) for _ in range(noOfChoices)]
self.logPath = logPath
if optionNames == None:
self.optionNames = [str(i) for i in range(noOfChoices)]
else:
self.optionNames = optionNames
self.nextRound()
def getScript(self, ID):
'''Gets script with ID'''
return self.acjs[0].getScript(ID)
def getID(self, script):
'''Gets ID of script'''
return self.acjs[0].getID(script)
def infoPairs(self):
'''Returns pairs based on summed selection arrays from Progressive Adaptive Comparitive Judgement
Politt(2012) + Barrada, Olea, Ponsoda, and Abad (2010)'''
pairs = []
#Create
sA = np.zeros((self.n, self.n))
for acj in self.acjs:
sA = sA+acj.selectionArray()
while(np.max(sA)>0):
iA, iB = np.unravel_index(sA.argmax(), sA.shape)
pairs.append([self.data[iA], self.data[iB]])
sA[iA,:] = 0
sA[iB,:] = 0
sA[:,iA] = 0
sA[:,iB] = 0
return pairs
def nextRound(self):
'''Returns next round of pairs'''
roundList = self.infoPairs()
for acj in self.acjs:
acj.nextRound(roundList)
acj.step = 0
self.round = self.acjs[0].round
self.step = self.acjs[0].step
return self.acjs[0].roundList
def nextPair(self):
'''gets next pair from main acj'''
p = self.acjs[0].nextPair(startNext=False)
if p == -1:
if self.nextRound() != None:
p = self.acjs[0].nextPair(startNext=False)
else:
return None
self.step = self.acjs[0].step
return p
def nextIDPair(self):
'''Gets ID of next pair'''
pair = self.nextPair()
if pair == None:
return None
idPair = []
for p in pair:
idPair.append(self.getID(p))
return idPair
def WMS(self):
ret = []
for acj in self.acjs:
ret.append(acj.WMS())
return ret
def comp(self, pair, result = None, update = None, reviewer = 'Unknown', time = 0):
'''Adds in a result between a and b where true is a wins and False is b wins'''
if result == None:
result = [True for _ in range(self.noOfChoices)]
if self.noOfChoices != len(result):
raise StandardError('Results list needs to be noOfChoices in length')
for i in range(self.noOfChoices):
self.acjs[i].comp(pair, result[i], update, reviewer, time)
if self.logPath != None:
self.log(self.logPath, pair, result, reviewer, time)
def IDComp(self, idPair, result = None, update = None, reviewer = 'Unknown', time = 0):
'''Adds in a result between a and b where true is a wins and False is b wins. Uses IDs'''
pair = []
for p in idPair:
pair.append(self.getScript(p))
self.comp(pair, result, update, reviewer, time)
def rankings(self, value=True):
'''Returns current rankings
Default is by value but score can be used'''
rank = []
for acj in self.acjs:
rank.append(acj.rankings(value))
return rank
def reliability(self):
'''Calculates reliability'''
rel = []
for acj in self.acjs:
rel.append(acj.reliability()[0])
return rel
def log(self, path, pair, result, reviewer = 'Unknown', time = 0):
'''Writes out a log of a comparison'''
timestamp = datetime.datetime.now().strftime('_%Y_%m_%d_%H_%M_%S_%f')
with open(path+os.sep+str(reviewer)+timestamp+".log", 'w+') as file:
file.write("Reviewer:%s\n" % str(reviewer))
file.write("A:%s\n" % str(pair[0]))
file.write("B:%s\n" % str(pair[1]))
for i in range(len(result)):
file.write("Winner of %s:%s\n" %(self.optionNames[i], "A" if result[i] else "B"))
file.write("Time:%s\n" % str(time))
def JSONLog(self):
'''Write acjs states to JSON files'''
for acj in self.acjs:
acj.JSONLog()
def percentReturned(self):
return self.acjs[0].percentReturned()
def results(self):
'''Prints a list of scripts and thier value scaled between 0 and 100'''
rank = []
for r in self.rankings():
rank.append(list(zip(r[0], (r[1]-r[1].min())*100/(r[1].max()-r[1].min()))))
return rank
def decisionCount(self, reviewer):
return self.acjs[0].decisionCount(reviewer)
class UniACJ(object):
'''Base object to hold comparison data and run algorithm
script is used to refer to anything that is being ranked with ACJ
Dat is an array to hold the scripts with rows being [id, script, score, quality, trials]
Track is an array with each value representing number of times a winner (dim 0) has beaten the loser (dim 1)
Decisions keeps track of all the descisions madein descision objects
'''
def __init__(self, data, maxRounds, logPath = None, optionNames = None):
self.reviewers = []
self.optionNames = optionNames
self.noOfChoices = 1
self.round = 0
self.maxRounds = maxRounds
self.update = False
self.data = list(data)
self.dat = np.zeros((5, len(data)))
self.dat[0] = np.asarray(range(len(data)))
#self.dat[1] = np.asarray(data)
#self.dat[2] = np.zeros(len(data), dtype=float)
#self.dat[3] = np.zeros(len(data), dtype=float)
#self.dat[4] = np.zeros(len(data), dtype=float)
self.track = np.zeros((len(data), len(data)))
self.n = len(data)
self.swis = 5
self.roundList = []
self.step = -1
self.decay = 1
self.returned = []
self.logPath = logPath
self.decisions = []
def nextRound(self, extRoundList = None):
'''Returns next round of pairs'''
print("Hello")
self.round = self.round+1
self.step = 0
if self.round > self.maxRounds:
self.maxRounds = self.round
#print(self.round)
if self.round > 1:
self.updateAll()
if extRoundList == None:
self.roundList = self.infoPairs()
else:
self.roundList = extRoundList
self.returned = [False for i in range(len(self.roundList))]
return self.roundList
def polittNextRound(self):
self.round = self.round+1
if self.round > self.maxRounds:
self.roundList = None
elif self.round<2:
self.roundList = self.randomPairs()
elif self.round<2+self.swis:
self.updateAll()
self.roundList = self.scorePairs()
else:
#if self.round == 1+swis:
#self.dat[3] = (1/self.dat[1].size)*self.dat[2][:]
self.updateAll()
self.roundList = self.valuePairs()
return self.roundList
#return self.scorePairs()
def getID(self, script):
'''Gets ID of script'''
return self.data.index(script)
def getScript(self, ID):
'''Gets script with ID'''
return self.data[ID]
def nextPair(self, startNext = True):
'''Returns next pair. Will start new rounds automatically if startNext is true'''
self.step = self.step + 1
if self.step >= len(self.roundList):
if all(self.returned):
if (startNext):
self.nextRound()
#self.polittNextRound()
if self.roundList == None or self.roundList == []:
return None
else:
return -1
else:
o = [p for p in self.roundList if not self.returned[self.roundList.index(p)]]
return random.choice(o)
return self.roundList[self.step]
def nextIDPair(self, startNext = True):
'''Returns ID of next pair'''
pair = self.nextPair()
if pair == None:
return None
idPair = []
for p in pair:
idPair.append(self.getID(p))
return idPair
def singleProb(self, iA, iB):
prob = np.exp(self.dat[3][iA]-self.dat[3][iB])/(1+np.exp(self.dat[3][iA]-self.dat[3][iB]))
return prob
def prob(self, iA):
'''Returns a numpy array of the probability of A beating other values
Based on the Bradley-Terry-Luce model (Bradley and Terry 1952; Luce 1959)'''
probs = np.exp(self.dat[3][iA]-self.dat[3])/(1+np.exp(self.dat[3][iA]-self.dat[3]))
return probs
def fullProb(self):
'''Returns a 2D array of all probabilities of x beating y'''
pr = np.zeros((self.n, self.n))
for i in range(self.n):
pr[i] = self.dat[3][i]
return np.exp(pr-self.dat[3])/(1+np.exp(pr-self.dat[3]))
def fisher(self):
'''returns fisher info array'''
prob = self.fullProb()
return ((prob**2)*(1-prob)**2)+((prob.T**2)*(1-prob.T)**2)
def selectionArray(self):
'''Returns a selection array based on Progressive Adaptive Comparitive Judgement
Politt(2012) + Barrada, Olea, Ponsoda, and Abad (2010)'''
F = self.fisher()*np.logical_not(np.identity(self.n))
ran = np.random.rand(self.n, self.n)*np.max(F)
a = 0
b = 0
#Create array from fisher mixed with noise
for i in range(1, self.round+1):
a = a + (i-1)**self.decay
for i in range(1, self.maxRounds+1):
b = b + (i-1)**self.decay
W = a/b
S = ((1-W)*ran)+(W*F)
#Remove i=j and already compared scripts
return S*np.logical_not(np.identity(self.n))*np.logical_not(self.track+self.track.T)
def updateValue(self, iA):
'''Updates the value of script A using Newton's Method'''
scoreA = self.dat[2][iA]
valA = self.dat[3][iA]
probA = self.prob(iA)
x = np.sum(probA)-0.5#Subtract where i = a
y = np.sum(probA*(1-probA))-0.25#Subtract where i = a
if x == 0:
exit()
#print(self.dat[3])
return self.dat[3][iA]+((self.dat[2][iA]-x)/y)
#print(self.dat[3][iA])
#print("--------")
def updateAll(self):
'''Updates the value of all scripts using Newton's Method'''
newDat = np.zeros(self.dat[3].size)
for i in self.dat[0]:
newDat[i] = self.updateValue(i)
self.dat[3] = newDat[:]
def randomPairs(self, dat = None):
'''Returns a list of random pairs from dat'''
if dat == None:
dat = self.data
shufDat = np.array(dat, copy=True)
ranPairs = []
while len(shufDat)>1:
a = shufDat[0]
b = shufDat[1]
shufDat = shufDat[2:]
ranPairs.append([a,b])
return ranPairs
def scorePairs(self, dat = None, scores = None):
'''Returns random pairs with matching scores or close if no match'''
if dat == None:
dat = self.dat
shuf = np.array(dat[:3], copy=True)
np.random.shuffle(shuf.T)
shuf.T
shuf = shuf[:, np.argsort(shuf[2])]
pairs = []
i = 0
#Pairs matching scores
while i<(shuf[0].size-1):
aID = shuf[0][i]
bID = shuf[0][i+1]
if (self.track[aID][bID]+self.track[bID][aID])==0 and shuf[2][i]==shuf[2][i+1]:
pairs.append([self.data[shuf[0][i]], self.data[shuf[0][i+1]]])
shuf = np.delete(shuf, [i, i+1], 1)
else:
i = i+1
#Add on closest score couplings of unmatched scores
i = 0
while i<shuf[0].size-1:
aID = shuf[0][i]
j = i+1
while j<shuf[0].size:
bID = shuf[0][j]
if (self.track[aID][bID]+self.track[bID][aID])==0:
pairs.append([self.data[shuf[0][i]], self.data[shuf[0][j]]])
shuf = np.delete(shuf, [i, j], 1)
break
else:
j = j+1
if j == shuf[0].size:
i = i+1
return pairs
def valuePairs(self):
'''Returns pairs matched by close values Politt(2012)'''
shuf = np.array(self.dat, copy=True)#Transpose to shuffle columns rather than rows
np.random.shuffle(shuf.T)
shuf.T
pairs = []
i = 0
while i<shuf[0].size-1:
aID = shuf[0][i]
newShuf = shuf[:, np.argsort(np.abs(shuf[3] - shuf[3][i]))]
j = 0
while j<newShuf[0].size:
bID = newShuf[0][j]
if (self.track[aID][bID]+self.track[bID][aID])==0 and self.data[aID]!=self.data[bID]:
pairs.append([self.data[shuf[0][i]], self.data[newShuf[0][j]]])
iJ = np.where(shuf[0]==newShuf[0][j])[0][0]
shuf = np.delete(shuf, [i, iJ], 1)
break
else:
j = j+1
if j == shuf[0].size:
i = i+1
return pairs
def infoPairs(self):
'''Returns pairs based on selection array from Progressive Adaptive Comparitive Judgement
Politt(2012) + Barrada, Olea, Ponsoda, and Abad (2010)'''
pairs = []
#Create
sA = self.selectionArray()
while(np.max(sA)>0):
iA, iB = np.unravel_index(sA.argmax(), sA.shape)
pairs.append([self.data[iA], self.data[iB]])
sA[iA,:] = 0
sA[iB,:] = 0
sA[:,iA] = 0
sA[:,iB] = 0
return pairs
def rmse(self):
'''Calculate rmse'''
prob = self.fullProb()
y = 1/np.sqrt(np.sum(prob*(1-prob), axis=1)-0.25)
return np.sqrt(np.mean(np.square(y)))
def trueSD(self):
'''Calculate true standard deviation'''
sd = np.std(self.dat[3])
return ((sd**2)/(self.rmse()**2))**(0.5)
def reliability(self):
'''Calculates reliability'''
G = self.trueSD()/self.rmse()
return [(G**2)/(1+(G**2))]
def SR(self, pair, result):
'''Calculates the Squared Residual and weight of a decision'''
p = [self.getID(a) for a in pair]
if result:
prob = self.singleProb(p[0], p[1])
else:
prob = self.singleProb(p[1], p[0])
res = 1-prob
weight = prob*(1-prob)
SR = (res**2)
return SR, weight
def addDecision(self, pair, result, reviewer, time = 0):
'''Adds an SSR to the SSR array'''
self.decisions.append(Decision(pair, result,reviewer, time))
def revID(self, reviewer):
return self.reviewers.index(reviewer)
def WMS(self, decisions = None):
'''Builds data lists:
[reviewer] [sum of SR, sum of weights]
and uses it to make dict reviewer: WMS
WMS = Sum SR/Sum weights
also returns mean and std div'''
if decisions == None:
decisions = self.decisions
self.reviewers = []
SRs = []
weights = []
for dec in decisions:
if dec.reviewer not in self.reviewers:
self.reviewers.append(dec.reviewer)
SRs.append(0)
weights.append(0)
SR, weight = self.SR(dec.pair, dec.result)
revID = self.reviewers.index(dec.reviewer)
SRs[revID] = SRs[revID] + SR
weights[revID] = weights[revID] + weight
WMSs = []
WMSDict = {}
for i in range(len(self.reviewers)):
WMS = SRs[i]/weights[i]
WMSs.append(WMS)
WMSDict[self.reviewers[i]]=WMS
return WMSDict, np.mean(WMSs), np.std(WMSs)
def comp(self, pair, result = True, update = None, reviewer = 'Unknown', time = 0):
'''Adds in a result between a and b where true is a wins and False is b wins'''
self.addDecision(pair, result, reviewer, time)
if pair[::-1] in self.roundList:
pair = pair[::-1]
result = not result
if pair in self.roundList:
self.returned[self.roundList.index(pair)] = True
a = pair[0]
b = pair[1]
if update == None:
update = self.update
iA = self.data.index(a)
iB = self.data.index(b)
if result:
self.track[iA,iB] = 1
self.track[iB,iA] = 0
else:
self.track[iA,iB] = 0
self.track[iB,iA] = 1
self.dat[2,iA] = np.sum(self.track[iA,:])
self.dat[2,iB] = np.sum(self.track[iB,:])
self.dat[4,iA] = self.dat[4][iA]+1
self.dat[4,iB] = self.dat[4][iB]+1
if self.logPath != None:
self.log(self.logPath, pair, result, reviewer, time)
def IDComp(self, idPair, result = True, update = None, reviewer = 'Unknown', time=0):
'''Adds in a result between a and b where true is a wins and False is b wins, Uses IDs'''
pair = []
for p in idPair:
pair.append(self.getScript(p))
self.comp(pair, result, update, reviewer, time)
def percentReturned(self):
if len(self.returned) == 0:
return 0
return (sum(self.returned)/len(self.returned))*100
def log(self, path, pair, result, reviewer = 'Unknown', time = 0):
'''Writes out a log of a comparison'''
timestamp = datetime.datetime.now().strftime('_%Y_%m_%d_%H_%M_%S_%f')
with open(path+os.sep+str(reviewer)+timestamp+".log", 'w+') as file:
file.write("Reviewer:%s\n" % str(reviewer))
file.write("A:%s\n" % str(pair[0]))
file.write("B:%s\n" % str(pair[1]))
file.write("Winner:%s\n" %("A" if result else "B"))
file.write("Time:%s\n" % str(time))
def JSONLog(self, path = None):
'''Writes out a JSON containing data from ACJ'''
if path == None:
path = self.logPath
choice = self.optionNames[0].replace(" ", "_")
ACJDict = {"Criteria":choice, "Scripts":self.scriptDict(), "Reviewers":self.reviewerDict(), "Decisions":self.decisionList()}
with open(path+os.sep+"ACJ_"+choice+".json", 'w+') as file:
json.dump(ACJDict, file, indent=4)
def decisionCount(self, reviewer):
c = 0
for dec in self.decisions:
if (dec.reviewer == reviewer):
c = c + 1
return c
def reviewerDict(self):
revs = {}
WMSs, _, _ = self.WMS()
for rev in self.reviewers:
revDict = {'decisions':self.decisionCount(rev), 'WMS':WMSs[rev]}
revs[str(rev)]= revDict
print(len(revs))
return revs
def scriptDict(self):
scr = {}
r = self.results()[0]
for i in range(len(r)):
scrDict = {"Score":r[i][1]}
scr[str(r[i][0])] = scrDict
return scr
def decisionList(self):
dec = []
for d in self.decisions:
dec.append(d.dict())
return dec
def rankings(self, value=True):
'''Returns current rankings
Default is by value but score can be used'''
if value:
return [np.asarray(self.data)[np.argsort(self.dat[3])], self.dat[3][np.argsort(self.dat[3])]]
else:
return self.data[np.argsort(self.dat[2])]
def results(self):
'''Prints a list of scripts and thier value scaled between 0 and 100'''
r = self.rankings()
rank = list(zip(r[0], (r[1]-r[1].min())*100/(r[1].max()-r[1].min())))
return [rank]
| mit | 7,481,933,980,465,072,000 | 2,843,694,409,037,205,500 | 34.818803 | 142 | 0.537654 | false |
andmos/ansible | lib/ansible/modules/network/cloudengine/ce_snmp_traps.py | 25 | 19335 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_snmp_traps
version_added: "2.4"
short_description: Manages SNMP traps configuration on HUAWEI CloudEngine switches.
description:
- Manages SNMP traps configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@QijunPan)
options:
feature_name:
description:
- Alarm feature name.
choices: ['aaa', 'arp', 'bfd', 'bgp', 'cfg', 'configuration', 'dad', 'devm',
'dhcpsnp', 'dldp', 'driver', 'efm', 'erps', 'error-down', 'fcoe',
'fei', 'fei_comm', 'fm', 'ifnet', 'info', 'ipsg', 'ipv6', 'isis',
'l3vpn', 'lacp', 'lcs', 'ldm', 'ldp', 'ldt', 'lldp', 'mpls_lspm',
'msdp', 'mstp', 'nd', 'netconf', 'nqa', 'nvo3', 'openflow', 'ospf',
'ospfv3', 'pim', 'pim-std', 'qos', 'radius', 'rm', 'rmon', 'securitytrap',
'smlktrap', 'snmp', 'ssh', 'stackmng', 'sysclock', 'sysom', 'system',
'tcp', 'telnet', 'trill', 'trunk', 'tty', 'vbst', 'vfs', 'virtual-perception',
'vrrp', 'vstm', 'all']
trap_name:
description:
- Alarm trap name.
interface_type:
description:
- Interface type.
choices: ['Ethernet', 'Eth-Trunk', 'Tunnel', 'NULL', 'LoopBack', 'Vlanif', '100GE',
'40GE', 'MTunnel', '10GE', 'GE', 'MEth', 'Vbdif', 'Nve']
interface_number:
description:
- Interface number.
port_number:
description:
- Source port number.
'''
EXAMPLES = '''
- name: CloudEngine snmp traps test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config SNMP trap all enable"
ce_snmp_traps:
state: present
feature_name: all
provider: "{{ cli }}"
- name: "Config SNMP trap interface"
ce_snmp_traps:
state: present
interface_type: 40GE
interface_number: 2/0/1
provider: "{{ cli }}"
- name: "Config SNMP trap port"
ce_snmp_traps:
state: present
port_number: 2222
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"feature_name": "all",
"state": "present"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {"snmp-agent trap": [],
"undo snmp-agent trap": []}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"snmp-agent trap": ["enable"],
"undo snmp-agent trap": []}
updates:
description: command sent to the device
returned: always
type: list
sample: ["snmp-agent trap enable"]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_config, load_config, ce_argument_spec, run_commands
class SnmpTraps(object):
""" Manages SNMP trap configuration """
def __init__(self, **kwargs):
""" Class init """
# module
argument_spec = kwargs["argument_spec"]
self.spec = argument_spec
self.module = AnsibleModule(
argument_spec=self.spec,
required_together=[("interface_type", "interface_number")],
supports_check_mode=True
)
# config
self.cur_cfg = dict()
self.cur_cfg["snmp-agent trap"] = []
self.cur_cfg["undo snmp-agent trap"] = []
# module args
self.state = self.module.params['state']
self.feature_name = self.module.params['feature_name']
self.trap_name = self.module.params['trap_name']
self.interface_type = self.module.params['interface_type']
self.interface_number = self.module.params['interface_number']
self.port_number = self.module.params['port_number']
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.existing["snmp-agent trap"] = []
self.existing["undo snmp-agent trap"] = []
self.end_state = dict()
self.end_state["snmp-agent trap"] = []
self.end_state["undo snmp-agent trap"] = []
commands = list()
cmd1 = 'display interface brief'
commands.append(cmd1)
self.interface = run_commands(self.module, commands)
def check_args(self):
""" Check invalid args """
if self.port_number:
if self.port_number.isdigit():
if int(self.port_number) < 1025 or int(self.port_number) > 65535:
self.module.fail_json(
msg='Error: The value of port_number is out of [1025 - 65535].')
else:
self.module.fail_json(
msg='Error: The port_number is not digit.')
if self.interface_type and self.interface_number:
tmp_interface = self.interface_type + self.interface_number
if tmp_interface not in self.interface[0]:
self.module.fail_json(
msg='Error: The interface %s is not in the device.' % tmp_interface)
def get_proposed(self):
""" Get proposed state """
self.proposed["state"] = self.state
if self.feature_name:
self.proposed["feature_name"] = self.feature_name
if self.trap_name:
self.proposed["trap_name"] = self.trap_name
if self.interface_type:
self.proposed["interface_type"] = self.interface_type
if self.interface_number:
self.proposed["interface_number"] = self.interface_number
if self.port_number:
self.proposed["port_number"] = self.port_number
def get_existing(self):
""" Get existing state """
tmp_cfg = self.cli_get_config()
if tmp_cfg:
temp_cfg_lower = tmp_cfg.lower()
temp_data = tmp_cfg.split("\n")
temp_data_lower = temp_cfg_lower.split("\n")
for item in temp_data:
if "snmp-agent trap source-port " in item:
if self.port_number:
item_tmp = item.split("snmp-agent trap source-port ")
self.cur_cfg["trap source-port"] = item_tmp[1]
self.existing["trap source-port"] = item_tmp[1]
elif "snmp-agent trap source " in item:
if self.interface_type:
item_tmp = item.split("snmp-agent trap source ")
self.cur_cfg["trap source interface"] = item_tmp[1]
self.existing["trap source interface"] = item_tmp[1]
if self.feature_name:
for item in temp_data_lower:
if item == "snmp-agent trap enable":
self.cur_cfg["snmp-agent trap"].append("enable")
self.existing["snmp-agent trap"].append("enable")
elif item == "snmp-agent trap disable":
self.cur_cfg["snmp-agent trap"].append("disable")
self.existing["snmp-agent trap"].append("disable")
elif "undo snmp-agent trap enable " in item:
item_tmp = item.split("undo snmp-agent trap enable ")
self.cur_cfg[
"undo snmp-agent trap"].append(item_tmp[1])
self.existing[
"undo snmp-agent trap"].append(item_tmp[1])
elif "snmp-agent trap enable " in item:
item_tmp = item.split("snmp-agent trap enable ")
self.cur_cfg["snmp-agent trap"].append(item_tmp[1])
self.existing["snmp-agent trap"].append(item_tmp[1])
else:
del self.existing["snmp-agent trap"]
del self.existing["undo snmp-agent trap"]
def get_end_state(self):
""" Get end_state state """
tmp_cfg = self.cli_get_config()
if tmp_cfg:
temp_cfg_lower = tmp_cfg.lower()
temp_data = tmp_cfg.split("\n")
temp_data_lower = temp_cfg_lower.split("\n")
for item in temp_data:
if "snmp-agent trap source-port " in item:
if self.port_number:
item_tmp = item.split("snmp-agent trap source-port ")
self.end_state["trap source-port"] = item_tmp[1]
elif "snmp-agent trap source " in item:
if self.interface_type:
item_tmp = item.split("snmp-agent trap source ")
self.end_state["trap source interface"] = item_tmp[1]
if self.feature_name:
for item in temp_data_lower:
if item == "snmp-agent trap enable":
self.end_state["snmp-agent trap"].append("enable")
elif item == "snmp-agent trap disable":
self.end_state["snmp-agent trap"].append("disable")
elif "undo snmp-agent trap enable " in item:
item_tmp = item.split("undo snmp-agent trap enable ")
self.end_state[
"undo snmp-agent trap"].append(item_tmp[1])
elif "snmp-agent trap enable " in item:
item_tmp = item.split("snmp-agent trap enable ")
self.end_state["snmp-agent trap"].append(item_tmp[1])
else:
del self.end_state["snmp-agent trap"]
del self.end_state["undo snmp-agent trap"]
def cli_load_config(self, commands):
""" Load configure through cli """
if not self.module.check_mode:
load_config(self.module, commands)
def cli_get_config(self):
""" Get configure through cli """
regular = "| include snmp | include trap"
flags = list()
flags.append(regular)
tmp_cfg = get_config(self.module, flags)
return tmp_cfg
def set_trap_feature_name(self):
""" Set feature name for trap """
if self.feature_name == "all":
cmd = "snmp-agent trap enable"
else:
cmd = "snmp-agent trap enable feature-name %s" % self.feature_name
if self.trap_name:
cmd += " trap-name %s" % self.trap_name
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_trap_feature_name(self):
""" Undo feature name for trap """
if self.feature_name == "all":
cmd = "undo snmp-agent trap enable"
else:
cmd = "undo snmp-agent trap enable feature-name %s" % self.feature_name
if self.trap_name:
cmd += " trap-name %s" % self.trap_name
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def set_trap_source_interface(self):
""" Set source interface for trap """
cmd = "snmp-agent trap source %s %s" % (
self.interface_type, self.interface_number)
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_trap_source_interface(self):
""" Undo source interface for trap """
cmd = "undo snmp-agent trap source"
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def set_trap_source_port(self):
""" Set source port for trap """
cmd = "snmp-agent trap source-port %s" % self.port_number
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def undo_trap_source_port(self):
""" Undo source port for trap """
cmd = "undo snmp-agent trap source-port"
self.updates_cmd.append(cmd)
cmds = list()
cmds.append(cmd)
self.cli_load_config(cmds)
self.changed = True
def work(self):
""" The work function """
self.check_args()
self.get_proposed()
self.get_existing()
find_flag = False
find_undo_flag = False
tmp_interface = None
if self.state == "present":
if self.feature_name:
if self.trap_name:
tmp_cfg = "feature-name %s trap-name %s" % (
self.feature_name, self.trap_name.lower())
else:
tmp_cfg = "feature-name %s" % self.feature_name
find_undo_flag = False
if self.cur_cfg["undo snmp-agent trap"]:
for item in self.cur_cfg["undo snmp-agent trap"]:
if item == tmp_cfg:
find_undo_flag = True
elif tmp_cfg in item:
find_undo_flag = True
elif self.feature_name == "all":
find_undo_flag = True
if find_undo_flag:
self.set_trap_feature_name()
if not find_undo_flag:
find_flag = False
if self.cur_cfg["snmp-agent trap"]:
for item in self.cur_cfg["snmp-agent trap"]:
if item == "enable":
find_flag = True
elif item == tmp_cfg:
find_flag = True
if not find_flag:
self.set_trap_feature_name()
if self.interface_type:
find_flag = False
tmp_interface = self.interface_type + self.interface_number
if "trap source interface" in self.cur_cfg.keys():
if self.cur_cfg["trap source interface"] == tmp_interface:
find_flag = True
if not find_flag:
self.set_trap_source_interface()
if self.port_number:
find_flag = False
if "trap source-port" in self.cur_cfg.keys():
if self.cur_cfg["trap source-port"] == self.port_number:
find_flag = True
if not find_flag:
self.set_trap_source_port()
else:
if self.feature_name:
if self.trap_name:
tmp_cfg = "feature-name %s trap-name %s" % (
self.feature_name, self.trap_name.lower())
else:
tmp_cfg = "feature-name %s" % self.feature_name
find_flag = False
if self.cur_cfg["snmp-agent trap"]:
for item in self.cur_cfg["snmp-agent trap"]:
if item == tmp_cfg:
find_flag = True
elif item == "enable":
find_flag = True
elif tmp_cfg in item:
find_flag = True
else:
find_flag = True
find_undo_flag = False
if self.cur_cfg["undo snmp-agent trap"]:
for item in self.cur_cfg["undo snmp-agent trap"]:
if item == tmp_cfg:
find_undo_flag = True
elif tmp_cfg in item:
find_undo_flag = True
if find_undo_flag:
pass
elif find_flag:
self.undo_trap_feature_name()
if self.interface_type:
if "trap source interface" in self.cur_cfg.keys():
self.undo_trap_source_interface()
if self.port_number:
if "trap source-port" in self.cur_cfg.keys():
self.undo_trap_source_port()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
self.results['updates'] = self.updates_cmd
self.module.exit_json(**self.results)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
feature_name=dict(choices=['aaa', 'arp', 'bfd', 'bgp', 'cfg', 'configuration', 'dad',
'devm', 'dhcpsnp', 'dldp', 'driver', 'efm', 'erps', 'error-down',
'fcoe', 'fei', 'fei_comm', 'fm', 'ifnet', 'info', 'ipsg', 'ipv6',
'isis', 'l3vpn', 'lacp', 'lcs', 'ldm', 'ldp', 'ldt', 'lldp',
'mpls_lspm', 'msdp', 'mstp', 'nd', 'netconf', 'nqa', 'nvo3',
'openflow', 'ospf', 'ospfv3', 'pim', 'pim-std', 'qos', 'radius',
'rm', 'rmon', 'securitytrap', 'smlktrap', 'snmp', 'ssh', 'stackmng',
'sysclock', 'sysom', 'system', 'tcp', 'telnet', 'trill', 'trunk',
'tty', 'vbst', 'vfs', 'virtual-perception', 'vrrp', 'vstm', 'all']),
trap_name=dict(type='str'),
interface_type=dict(choices=['Ethernet', 'Eth-Trunk', 'Tunnel', 'NULL', 'LoopBack', 'Vlanif',
'100GE', '40GE', 'MTunnel', '10GE', 'GE', 'MEth', 'Vbdif', 'Nve']),
interface_number=dict(type='str'),
port_number=dict(type='str')
)
argument_spec.update(ce_argument_spec)
module = SnmpTraps(argument_spec=argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 | 5,119,446,813,966,782,000 | 4,975,318,458,497,493,000 | 34.938662 | 111 | 0.514766 | false |
memtoko/django | django/db/migrations/loader.py | 56 | 15911 | from __future__ import unicode_literals
import os
import sys
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.db.migrations.graph import MigrationGraph, NodeNotFoundError
from django.db.migrations.recorder import MigrationRecorder
from django.utils import six
MIGRATIONS_MODULE_NAME = 'migrations'
class MigrationLoader(object):
"""
Loads migration files from disk, and their status from the database.
Migration files are expected to live in the "migrations" directory of
an app. Their names are entirely unimportant from a code perspective,
but will probably follow the 1234_name.py convention.
On initialization, this class will scan those directories, and open and
read the python files, looking for a class called Migration, which should
inherit from django.db.migrations.Migration. See
django.db.migrations.migration for what that looks like.
Some migrations will be marked as "replacing" another set of migrations.
These are loaded into a separate set of migrations away from the main ones.
If all the migrations they replace are either unapplied or missing from
disk, then they are injected into the main set, replacing the named migrations.
Any dependency pointers to the replaced migrations are re-pointed to the
new migration.
This does mean that this class MUST also talk to the database as well as
to disk, but this is probably fine. We're already not just operating
in memory.
"""
def __init__(self, connection, load=True, ignore_no_migrations=False):
self.connection = connection
self.disk_migrations = None
self.applied_migrations = None
self.ignore_no_migrations = ignore_no_migrations
if load:
self.build_graph()
@classmethod
def migrations_module(cls, app_label):
if app_label in settings.MIGRATION_MODULES:
return settings.MIGRATION_MODULES[app_label]
else:
app_package_name = apps.get_app_config(app_label).name
return '%s.%s' % (app_package_name, MIGRATIONS_MODULE_NAME)
def load_disk(self):
"""
Loads the migrations from all INSTALLED_APPS from disk.
"""
self.disk_migrations = {}
self.unmigrated_apps = set()
self.migrated_apps = set()
for app_config in apps.get_app_configs():
# Get the migrations module directory
module_name = self.migrations_module(app_config.label)
was_loaded = module_name in sys.modules
try:
module = import_module(module_name)
except ImportError as e:
# I hate doing this, but I don't want to squash other import errors.
# Might be better to try a directory check directly.
if "No module named" in str(e) and MIGRATIONS_MODULE_NAME in str(e):
self.unmigrated_apps.add(app_config.label)
continue
raise
else:
# PY3 will happily import empty dirs as namespaces.
if not hasattr(module, '__file__'):
continue
# Module is not a package (e.g. migrations.py).
if not hasattr(module, '__path__'):
continue
# Force a reload if it's already loaded (tests need this)
if was_loaded:
six.moves.reload_module(module)
self.migrated_apps.add(app_config.label)
directory = os.path.dirname(module.__file__)
# Scan for .py files
migration_names = set()
for name in os.listdir(directory):
if name.endswith(".py"):
import_name = name.rsplit(".", 1)[0]
if import_name[0] not in "_.~":
migration_names.add(import_name)
# Load them
south_style_migrations = False
for migration_name in migration_names:
try:
migration_module = import_module("%s.%s" % (module_name, migration_name))
except ImportError as e:
# Ignore South import errors, as we're triggering them
if "south" in str(e).lower():
south_style_migrations = True
break
raise
if not hasattr(migration_module, "Migration"):
raise BadMigrationError(
"Migration %s in app %s has no Migration class" % (migration_name, app_config.label)
)
# Ignore South-style migrations
if hasattr(migration_module.Migration, "forwards"):
south_style_migrations = True
break
self.disk_migrations[app_config.label, migration_name] = migration_module.Migration(migration_name, app_config.label)
if south_style_migrations:
self.unmigrated_apps.add(app_config.label)
def get_migration(self, app_label, name_prefix):
"Gets the migration exactly named, or raises `graph.NodeNotFoundError`"
return self.graph.nodes[app_label, name_prefix]
def get_migration_by_prefix(self, app_label, name_prefix):
"Returns the migration(s) which match the given app label and name _prefix_"
# Do the search
results = []
for l, n in self.disk_migrations:
if l == app_label and n.startswith(name_prefix):
results.append((l, n))
if len(results) > 1:
raise AmbiguityError(
"There is more than one migration for '%s' with the prefix '%s'" % (app_label, name_prefix)
)
elif len(results) == 0:
raise KeyError("There no migrations for '%s' with the prefix '%s'" % (app_label, name_prefix))
else:
return self.disk_migrations[results[0]]
def check_key(self, key, current_app):
if (key[1] != "__first__" and key[1] != "__latest__") or key in self.graph:
return key
# Special-case __first__, which means "the first migration" for
# migrated apps, and is ignored for unmigrated apps. It allows
# makemigrations to declare dependencies on apps before they even have
# migrations.
if key[0] == current_app:
# Ignore __first__ references to the same app (#22325)
return
if key[0] in self.unmigrated_apps:
# This app isn't migrated, but something depends on it.
# The models will get auto-added into the state, though
# so we're fine.
return
if key[0] in self.migrated_apps:
try:
if key[1] == "__first__":
return list(self.graph.root_nodes(key[0]))[0]
else: # "__latest__"
return list(self.graph.leaf_nodes(key[0]))[0]
except IndexError:
if self.ignore_no_migrations:
return None
else:
raise ValueError("Dependency on app with no migrations: %s" % key[0])
raise ValueError("Dependency on unknown app: %s" % key[0])
def build_graph(self):
"""
Builds a migration dependency graph using both the disk and database.
You'll need to rebuild the graph if you apply migrations. This isn't
usually a problem as generally migration stuff runs in a one-shot process.
"""
# Load disk data
self.load_disk()
# Load database data
if self.connection is None:
self.applied_migrations = set()
else:
recorder = MigrationRecorder(self.connection)
self.applied_migrations = recorder.applied_migrations()
# Do a first pass to separate out replacing and non-replacing migrations
normal = {}
replacing = {}
for key, migration in self.disk_migrations.items():
if migration.replaces:
replacing[key] = migration
else:
normal[key] = migration
# Calculate reverse dependencies - i.e., for each migration, what depends on it?
# This is just for dependency re-pointing when applying replacements,
# so we ignore run_before here.
reverse_dependencies = {}
for key, migration in normal.items():
for parent in migration.dependencies:
reverse_dependencies.setdefault(parent, set()).add(key)
# Remember the possible replacements to generate more meaningful error
# messages
reverse_replacements = {}
for key, migration in replacing.items():
for replaced in migration.replaces:
reverse_replacements.setdefault(replaced, set()).add(key)
# Carry out replacements if we can - that is, if all replaced migrations
# are either unapplied or missing.
for key, migration in replacing.items():
# Ensure this replacement migration is not in applied_migrations
self.applied_migrations.discard(key)
# Do the check. We can replace if all our replace targets are
# applied, or if all of them are unapplied.
applied_statuses = [(target in self.applied_migrations) for target in migration.replaces]
can_replace = all(applied_statuses) or (not any(applied_statuses))
if not can_replace:
continue
# Alright, time to replace. Step through the replaced migrations
# and remove, repointing dependencies if needs be.
for replaced in migration.replaces:
if replaced in normal:
# We don't care if the replaced migration doesn't exist;
# the usage pattern here is to delete things after a while.
del normal[replaced]
for child_key in reverse_dependencies.get(replaced, set()):
if child_key in migration.replaces:
continue
# child_key may appear in a replacement
if child_key in reverse_replacements:
for replaced_child_key in reverse_replacements[child_key]:
if replaced in replacing[replaced_child_key].dependencies:
replacing[replaced_child_key].dependencies.remove(replaced)
replacing[replaced_child_key].dependencies.append(key)
else:
normal[child_key].dependencies.remove(replaced)
normal[child_key].dependencies.append(key)
normal[key] = migration
# Mark the replacement as applied if all its replaced ones are
if all(applied_statuses):
self.applied_migrations.add(key)
# Finally, make a graph and load everything into it
self.graph = MigrationGraph()
for key, migration in normal.items():
self.graph.add_node(key, migration)
def _reraise_missing_dependency(migration, missing, exc):
"""
Checks if ``missing`` could have been replaced by any squash
migration but wasn't because the the squash migration was partially
applied before. In that case raise a more understandable exception.
#23556
"""
if missing in reverse_replacements:
candidates = reverse_replacements.get(missing, set())
is_replaced = any(candidate in self.graph.nodes for candidate in candidates)
if not is_replaced:
tries = ', '.join('%s.%s' % c for c in candidates)
exc_value = NodeNotFoundError(
"Migration {0} depends on nonexistent node ('{1}', '{2}'). "
"Django tried to replace migration {1}.{2} with any of [{3}] "
"but wasn't able to because some of the replaced migrations "
"are already applied.".format(
migration, missing[0], missing[1], tries
),
missing)
exc_value.__cause__ = exc
six.reraise(NodeNotFoundError, exc_value, sys.exc_info()[2])
raise exc
# Add all internal dependencies first to ensure __first__ dependencies
# find the correct root node.
for key, migration in normal.items():
for parent in migration.dependencies:
if parent[0] != key[0] or parent[1] == '__first__':
# Ignore __first__ references to the same app (#22325)
continue
try:
self.graph.add_dependency(migration, key, parent)
except NodeNotFoundError as e:
# Since we added "key" to the nodes before this implies
# "parent" is not in there. To make the raised exception
# more understandable we check if parent could have been
# replaced but hasn't (eg partially applied squashed
# migration)
_reraise_missing_dependency(migration, parent, e)
for key, migration in normal.items():
for parent in migration.dependencies:
if parent[0] == key[0]:
# Internal dependencies already added.
continue
parent = self.check_key(parent, key[0])
if parent is not None:
try:
self.graph.add_dependency(migration, key, parent)
except NodeNotFoundError as e:
# Since we added "key" to the nodes before this implies
# "parent" is not in there.
_reraise_missing_dependency(migration, parent, e)
for child in migration.run_before:
child = self.check_key(child, key[0])
if child is not None:
try:
self.graph.add_dependency(migration, child, key)
except NodeNotFoundError as e:
# Since we added "key" to the nodes before this implies
# "child" is not in there.
_reraise_missing_dependency(migration, child, e)
def detect_conflicts(self):
"""
Looks through the loaded graph and detects any conflicts - apps
with more than one leaf migration. Returns a dict of the app labels
that conflict with the migration names that conflict.
"""
seen_apps = {}
conflicting_apps = set()
for app_label, migration_name in self.graph.leaf_nodes():
if app_label in seen_apps:
conflicting_apps.add(app_label)
seen_apps.setdefault(app_label, set()).add(migration_name)
return {app_label: seen_apps[app_label] for app_label in conflicting_apps}
def project_state(self, nodes=None, at_end=True):
"""
Returns a ProjectState object representing the most recent state
that the migrations we loaded represent.
See graph.make_state for the meaning of "nodes" and "at_end"
"""
return self.graph.make_state(nodes=nodes, at_end=at_end, real_apps=list(self.unmigrated_apps))
class BadMigrationError(Exception):
"""
Raised when there's a bad migration (unreadable/bad format/etc.)
"""
pass
class AmbiguityError(Exception):
"""
Raised when more than one migration matches a name prefix
"""
pass
| bsd-3-clause | 7,249,727,922,314,216,000 | -820,802,064,489,198,700 | 45.797059 | 133 | 0.577274 | false |
gdreich/geonode | geonode/geoserver/tests.py | 12 | 16664 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import base64
import json
from django.contrib.auth import get_user_model
from django.http import HttpRequest
from django.core.exceptions import ImproperlyConfigured
from django.conf import settings
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from guardian.shortcuts import assign_perm, get_anonymous_user
from geonode.geoserver.helpers import OGC_Servers_Handler
from geonode.base.populate_test_data import create_models
from geonode.layers.populate_layers_data import create_layer_data
from geonode.layers.models import Layer
class LayerTests(TestCase):
fixtures = ['initial_data.json', 'bobby']
def setUp(self):
self.user = 'admin'
self.passwd = 'admin'
create_models(type='layer')
create_layer_data()
def test_style_manager(self):
"""
Ensures the layer_style_manage route returns a 200.
"""
layer = Layer.objects.all()[0]
bob = get_user_model().objects.get(username='bobby')
assign_perm('change_layer_style', bob, layer)
logged_in = self.client.login(username='bobby', password='bob')
self.assertEquals(logged_in, True)
response = self.client.get(reverse('layer_style_manage', args=(layer.typename,)))
self.assertEqual(response.status_code, 200)
def test_feature_edit_check(self):
"""Verify that the feature_edit_check view is behaving as expected
"""
# Setup some layer names to work with
valid_layer_typename = Layer.objects.all()[0].typename
Layer.objects.all()[0].set_default_permissions()
invalid_layer_typename = "n0ch@nc3"
# Test that an invalid layer.typename is handled for properly
response = self.client.post(
reverse(
'feature_edit_check',
args=(
invalid_layer_typename,
)))
self.assertEquals(response.status_code, 404)
# First test un-authenticated
response = self.client.post(
reverse(
'feature_edit_check',
args=(
valid_layer_typename,
)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], False)
# Next Test with a user that does NOT have the proper perms
logged_in = self.client.login(username='bobby', password='bob')
self.assertEquals(logged_in, True)
response = self.client.post(
reverse(
'feature_edit_check',
args=(
valid_layer_typename,
)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], False)
# Login as a user with the proper permission and test the endpoint
logged_in = self.client.login(username='admin', password='admin')
self.assertEquals(logged_in, True)
response = self.client.post(
reverse(
'feature_edit_check',
args=(
valid_layer_typename,
)))
# Test that the method returns 401 because it's not a datastore
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], False)
layer = Layer.objects.all()[0]
layer.storeType = "dataStore"
layer.save()
# Test that the method returns authorized=True if it's a datastore
if settings.OGC_SERVER['default']['DATASTORE']:
# The check was moved from the template into the view
response = self.client.post(
reverse(
'feature_edit_check',
args=(
valid_layer_typename,
)))
response_json = json.loads(response.content)
self.assertEquals(response_json['authorized'], True)
def test_layer_acls(self):
""" Verify that the layer_acls view is behaving as expected
"""
# Test that HTTP_AUTHORIZATION in request.META is working properly
valid_uname_pw = '%s:%s' % ('bobby', 'bob')
invalid_uname_pw = '%s:%s' % ('n0t', 'v@l1d')
valid_auth_headers = {
'HTTP_AUTHORIZATION': 'basic ' + base64.b64encode(valid_uname_pw),
}
invalid_auth_headers = {
'HTTP_AUTHORIZATION': 'basic ' +
base64.b64encode(invalid_uname_pw),
}
bob = get_user_model().objects.get(username='bobby')
layer_ca = Layer.objects.get(typename='geonode:CA')
assign_perm('change_layer_data', bob, layer_ca)
# Test that requesting when supplying the geoserver credentials returns
# the expected json
expected_result = {
u'email': u'[email protected]',
u'fullname': u'bobby',
u'is_anonymous': False,
u'is_superuser': False,
u'name': u'bobby',
u'ro': [u'geonode:layer2',
u'geonode:mylayer',
u'geonode:foo',
u'geonode:whatever',
u'geonode:fooey',
u'geonode:quux',
u'geonode:fleem'],
u'rw': [u'geonode:CA']
}
response = self.client.get(reverse('layer_acls'), **valid_auth_headers)
response_json = json.loads(response.content)
# 'ro' and 'rw' are unsorted collections
self.assertEquals(sorted(expected_result), sorted(response_json))
# Test that requesting when supplying invalid credentials returns the
# appropriate error code
response = self.client.get(reverse('layer_acls'), **invalid_auth_headers)
self.assertEquals(response.status_code, 401)
# Test logging in using Djangos normal auth system
self.client.login(username='admin', password='admin')
# Basic check that the returned content is at least valid json
response = self.client.get(reverse('layer_acls'))
response_json = json.loads(response.content)
self.assertEquals('admin', response_json['fullname'])
self.assertEquals('', response_json['email'])
# TODO Lots more to do here once jj0hns0n understands the ACL system
# better
def test_resolve_user(self):
"""Verify that the resolve_user view is behaving as expected
"""
# Test that HTTP_AUTHORIZATION in request.META is working properly
valid_uname_pw = "%s:%s" % ('admin', 'admin')
invalid_uname_pw = "%s:%s" % ("n0t", "v@l1d")
valid_auth_headers = {
'HTTP_AUTHORIZATION': 'basic ' + base64.b64encode(valid_uname_pw),
}
invalid_auth_headers = {
'HTTP_AUTHORIZATION': 'basic ' +
base64.b64encode(invalid_uname_pw),
}
response = self.client.get(reverse('layer_resolve_user'), **valid_auth_headers)
response_json = json.loads(response.content)
self.assertEquals({'geoserver': False,
'superuser': True,
'user': 'admin',
'fullname': 'admin',
'email': ''},
response_json)
# Test that requesting when supplying invalid credentials returns the
# appropriate error code
response = self.client.get(reverse('layer_acls'), **invalid_auth_headers)
self.assertEquals(response.status_code, 401)
# Test logging in using Djangos normal auth system
self.client.login(username='admin', password='admin')
# Basic check that the returned content is at least valid json
response = self.client.get(reverse('layer_resolve_user'))
response_json = json.loads(response.content)
self.assertEquals('admin', response_json['user'])
self.assertEquals('admin', response_json['fullname'])
self.assertEquals('', response_json['email'])
class UtilsTests(TestCase):
def setUp(self):
self.OGC_DEFAULT_SETTINGS = {
'default': {
'BACKEND': 'geonode.geoserver',
'LOCATION': 'http://localhost:8080/geoserver/',
'USER': 'admin',
'PASSWORD': 'geoserver',
'MAPFISH_PRINT_ENABLED': True,
'PRINT_NG_ENABLED': True,
'GEONODE_SECURITY_ENABLED': True,
'GEOGIG_ENABLED': False,
'WMST_ENABLED': False,
'BACKEND_WRITE_ENABLED': True,
'WPS_ENABLED': False,
'DATASTORE': str(),
'GEOGIG_DATASTORE_DIR': str(),
}
}
self.UPLOADER_DEFAULT_SETTINGS = {
'BACKEND': 'geonode.rest',
'OPTIONS': {
'TIME_ENABLED': False,
'MOSAIC_ENABLED': False,
'GEOGIG_ENABLED': False}}
self.DATABASE_DEFAULT_SETTINGS = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'development.db'}}
def test_ogc_server_settings(self):
"""
Tests the OGC Servers Handler class.
"""
with override_settings(OGC_SERVER=self.OGC_DEFAULT_SETTINGS, UPLOADER=self.UPLOADER_DEFAULT_SETTINGS):
OGC_SERVER = self.OGC_DEFAULT_SETTINGS.copy()
OGC_SERVER.update(
{'PUBLIC_LOCATION': 'http://localhost:8080/geoserver/'})
ogc_settings = OGC_Servers_Handler(OGC_SERVER)['default']
default = OGC_SERVER.get('default')
self.assertEqual(ogc_settings.server, default)
self.assertEqual(ogc_settings.BACKEND, default.get('BACKEND'))
self.assertEqual(ogc_settings.LOCATION, default.get('LOCATION'))
self.assertEqual(
ogc_settings.PUBLIC_LOCATION,
default.get('PUBLIC_LOCATION'))
self.assertEqual(ogc_settings.USER, default.get('USER'))
self.assertEqual(ogc_settings.PASSWORD, default.get('PASSWORD'))
self.assertEqual(ogc_settings.DATASTORE, str())
self.assertEqual(ogc_settings.credentials, ('admin', 'geoserver'))
self.assertTrue(ogc_settings.MAPFISH_PRINT_ENABLED)
self.assertTrue(ogc_settings.PRINT_NG_ENABLED)
self.assertTrue(ogc_settings.GEONODE_SECURITY_ENABLED)
self.assertFalse(ogc_settings.GEOGIG_ENABLED)
self.assertFalse(ogc_settings.WMST_ENABLED)
self.assertTrue(ogc_settings.BACKEND_WRITE_ENABLED)
self.assertFalse(ogc_settings.WPS_ENABLED)
def test_ogc_server_defaults(self):
"""
Tests that OGC_SERVER_SETTINGS are built if they do not exist in the settings.
"""
OGC_SERVER = {'default': dict()}
defaults = self.OGC_DEFAULT_SETTINGS.get('default')
ogc_settings = OGC_Servers_Handler(OGC_SERVER)['default']
self.assertEqual(ogc_settings.server, defaults)
self.assertEqual(ogc_settings.rest, defaults['LOCATION'] + 'rest')
self.assertEqual(ogc_settings.ows, defaults['LOCATION'] + 'ows')
# Make sure we get None vs a KeyError when the key does not exist
self.assertIsNone(ogc_settings.SFDSDFDSF)
def test_importer_configuration(self):
"""
Tests that the OGC_Servers_Handler throws an ImproperlyConfigured exception when using the importer
backend without a vector database and a datastore configured.
"""
database_settings = self.DATABASE_DEFAULT_SETTINGS.copy()
ogc_server_settings = self.OGC_DEFAULT_SETTINGS.copy()
uploader_settings = self.UPLOADER_DEFAULT_SETTINGS.copy()
uploader_settings['BACKEND'] = 'geonode.importer'
self.assertTrue(['geonode_imports' not in database_settings.keys()])
with self.settings(UPLOADER=uploader_settings, OGC_SERVER=ogc_server_settings, DATABASES=database_settings):
# Test the importer backend without specifying a datastore or
# corresponding database.
with self.assertRaises(ImproperlyConfigured):
OGC_Servers_Handler(ogc_server_settings)['default']
ogc_server_settings['default']['DATASTORE'] = 'geonode_imports'
# Test the importer backend with a datastore but no corresponding
# database.
with self.settings(UPLOADER=uploader_settings, OGC_SERVER=ogc_server_settings, DATABASES=database_settings):
with self.assertRaises(ImproperlyConfigured):
OGC_Servers_Handler(ogc_server_settings)['default']
database_settings['geonode_imports'] = database_settings[
'default'].copy()
database_settings['geonode_imports'].update(
{'NAME': 'geonode_imports'})
# Test the importer backend with a datastore and a corresponding
# database, no exceptions should be thrown.
with self.settings(UPLOADER=uploader_settings, OGC_SERVER=ogc_server_settings, DATABASES=database_settings):
OGC_Servers_Handler(ogc_server_settings)['default']
class SecurityTest(TestCase):
"""
Tests for the Geonode security app.
"""
def setUp(self):
self.admin, created = get_user_model().objects.get_or_create(
username='admin', password='admin', is_superuser=True)
def test_login_middleware(self):
"""
Tests the Geonode login required authentication middleware.
"""
from geonode.security.middleware import LoginRequiredMiddleware
middleware = LoginRequiredMiddleware()
white_list = [
reverse('account_ajax_login'),
reverse('account_confirm_email', kwargs=dict(key='test')),
reverse('account_login'),
reverse('account_password_reset'),
reverse('forgot_username'),
reverse('layer_acls'),
reverse('layer_resolve_user'),
]
black_list = [
reverse('account_signup'),
reverse('document_browse'),
reverse('maps_browse'),
reverse('layer_browse'),
reverse('layer_detail', kwargs=dict(layername='geonode:Test')),
reverse('layer_remove', kwargs=dict(layername='geonode:Test')),
reverse('profile_browse'),
]
request = HttpRequest()
request.user = get_anonymous_user()
# Requests should be redirected to the the `redirected_to` path when un-authenticated user attempts to visit
# a black-listed url.
for path in black_list:
request.path = path
response = middleware.process_request(request)
self.assertEqual(response.status_code, 302)
self.assertTrue(
response.get('Location').startswith(
middleware.redirect_to))
# The middleware should return None when an un-authenticated user
# attempts to visit a white-listed url.
for path in white_list:
request.path = path
response = middleware.process_request(request)
self.assertIsNone(
response,
msg="Middleware activated for white listed path: {0}".format(path))
self.client.login(username='admin', password='admin')
self.assertTrue(self.admin.is_authenticated())
request.user = self.admin
# The middleware should return None when an authenticated user attempts
# to visit a black-listed url.
for path in black_list:
request.path = path
response = middleware.process_request(request)
self.assertIsNone(response)
| gpl-3.0 | -7,082,910,537,978,679,000 | -3,298,942,924,522,628,600 | 38.301887 | 116 | 0.599556 | false |
alexei-matveev/ccp1gui | jobmanager/slaveprocess.py | 1 | 12014 | """
This collection of routines are alternatives to those
in subprocess.py but which create additional controlling
threads.
Since this feature is not needed in the GUI as a separate thread
is spawned of to handle each job they are no longer needed,
but retained for possible future use.
"""
import os,sys
if __name__ == "__main__":
# Need to add the gui directory to the python path so
# that all the modules can be imported
gui_path = os.path.split(os.path.dirname( os.path.realpath( __file__ ) ))[0]
sys.path.append(gui_path)
import threading
import subprocess
import time
import Queue
import unittest
import ccp1gui_subprocess
class SlavePipe(ccp1gui_subprocess.SubProcess):
"""Spawn a thread which then uses a pipe to run the commmand
This method runs the requested command in a subthread
the wait method can be used to check progress
however there is no kill available (no child pid)
... maybe there is a way to destroy the thread together with the child??
for consistency with spawn it would be ideal if stdin,out,err could
be provided to route these streams, at the moment they are echoed and saved in.
"""
def __init__(self,cmd,**kw):
ccp1gui_subprocess.SubProcess.__init__(self,cmd,**kw)
def run(self):
# create a Lock
self.lock = threading.RLock()
# Create the queues
self.queue = Queue.Queue()
self.status = ccp1gui_subprocess.SLAVE_PIPE
self.slavethread = SlaveThread(self.lock, self.queue, None, self.__slave_pipe_proc)
if self.debug:
print t.time(),'SlavePipe: slave thread starting'
self.slavethread.start()
if self.debug:
print t.time(),'SlavePipe thread started'
def wait(self,timeout=None):
"""Wait.. """
count = 0
if timeout:
tester = timeout
incr = 1
else:
tester = 1
incr = 0
while count < tester:
if timeout:
count = count + incr
try:
tt = self.queue.get(0)
if tt == ccp1gui_subprocess.CHILD_STDOUT:
tt2 = self.queue.get(0)
for x in tt2:
self.output.append(x)
print 'stdout>',x,
elif tt == ccp1gui_subprocess.CHILD_STDERR:
tt2 = self.queue.get(0)
for x in tt2:
self.err.append(x)
print 'stderr>',x,
elif tt == ccp1gui_subprocess.CHILD_EXITS:
code = self.queue.get(0)
if self.debug:
print t.time(),'done'
return code
except Queue.Empty:
if self.debug:
print t.time(), 'queue from slave empty, sleep .1'
time.sleep(0.1)
#print t.time(),'wait timed out'
def kill(self):
"""(not implemented) """
if self.debug:
print t.time(), 'kill'
print 'kill not available for SlavePipe class'
def get_output(self):
"""Retrieve any pending data on the pipe to the slave process """
while 1:
try:
tt = self.queue.get(0)
if tt == ccp1gui_subprocess.CHILD_STDOUT:
tt2 = self.queue.get(0)
for x in tt2:
self.output.append(x)
print 'stdout>',x,
elif tt == ccp1gui_subprocess.CHILD_STDERR:
tt2 = self.queue.get(0)
for x in tt2:
self.err.append(x)
print 'stderr>',x,
elif tt == ccp1gui_subprocess.CHILD_EXITS:
code = self.queue.get(0)
if self.debug:
print t.time(),'done'
return code
except Queue.Empty:
break
return self.output
def __slave_pipe_proc(self,lock,queue,queue1):
""" this is the code executed in the slave thread when a
(foreground) pipe is required
will return stdout and stderr over the queue
queue1 is not used
"""
cmd = self.cmd_as_string()
if self.debug:
print t.time(), 'invoke command',cmd
#(stdin,stdout,stderr) = os.popen3(cmd)
p =subprocess.Popen(cmd,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
(stdin, stdout, stderr) = (p.stdin, p.stdout, p.stderr)
if self.debug:
print t.time(),'command exits'
while 1:
if self.debug:
print t.time(),'read out'
txt = stdout.readlines()
if txt:
if self.debug:
print t.time(),'read out returns', txt[0],' etc'
queue.put(ccp1gui_subprocess.CHILD_STDOUT)
queue.put(txt)
else:
if self.debug:
print 'out is none'
txt2 = stderr.readlines()
if txt2:
if self.debug:
print t.time(),'read err returns', txt2[0],' etc'
queue.put(CHILD_STDERR)
queue.put(txt2)
else:
if self.debug:
print 'err is none'
if not txt or not txt2:
break
status = stdout.close()
if self.debug:
print 'stdout close status',status
status = stdin.close()
if self.debug:
print 'stdin close status',status
status = stderr.close()
if self.debug:
print 'stderr close status',status
if self.debug:
print t.time(),'put to close:', ccp1gui_subprocess.CHILD_EXITS
queue.put(ccp1gui_subprocess.CHILD_EXITS)
code = 0
queue.put(code)
class SlaveSpawn(ccp1gui_subprocess.SubProcess):
"""Use a pythonwin process or fork with controlling thread
2 queues connect launching thread to control thread
issues ...
spawn will need its streams, part
"""
def __init__(self,cmd,**kw):
ccp1gui_subprocess.SubProcess.__init__(self,cmd,**kw)
def run(self,stdin=None,stdout=None,stderr=None):
self.stdin=stdin
self.stdout=stdout
self.stderr=stderr
# create a Lock
self.lock = threading.RLock()
# Create the queues
self.queue = Queue.Queue()
self.queue1 = Queue.Queue()
self.status = ccp1gui_subprocess.SLAVE_SPAWN
self.slavethread = SlaveThread(self.lock, self.queue ,self.queue1,self.__slave_spawn_proc)
if self.debug:
print t.time(),'threadedSpawn: slave thread starting'
self.slavethread.start()
if self.debug:
print t.time(),'threadedSpawn returns'
def kill(self):
"""pass kill signal to controlling thread """
if self.debug:
print t.time(), 'queue.put ',ccp1gui_subprocess.KILL_CHILD
self.queue1.put(ccp1gui_subprocess.KILL_CHILD)
def __slave_spawn_proc(self,loc,queue,queue1):
""" this is the code executed in the slave thread
when a (background) spawn/fork is required
will return stdout and stderr over the queue
"""
if self.debug:
print t.time(), 'slave spawning', self.cmd_as_string()
self._spawn_child()
while 1:
if self.debug:
print t.time(),'check loop'
# check status of child
# this should return immediately
code = self._wait_child(timeout=0)
if self.debug:
print t.time(),'check code',code
if code != 999:
# child has exited pass back return code
queue.put(ccp1gui_subprocess.CHILD_EXITS)
queue.put(code)
# Attempt to execute any termination code
if self.on_end:
self.on_end()
break
# check for intervention
try:
if self.debug:
print t.time(), 'slave get'
tt = queue1.get(0)
if self.debug:
print t.time(), 'slave gets message for child', tt
if tt == ccp1gui_subprocess.KILL_CHILD:
code = self._kill_child()
break
except Queue.Empty:
if self.debug:
print t.time(), 'no child message sleeping'
time.sleep(0.1)
queue.put(ccp1gui_subprocess.CHILD_EXITS)
queue.put(code)
#
# Currently these are not set up
# here (cf the popen3 based one)
#
#status = stdout.close()
#status = stdin.close()
#status = stderr.close()
def wait(self,timeout=None):
"""wait for process to finish """
if self.debug:
print t.time(), 'wait'
count = 0
if timeout:
tester = timeout
incr = 1
else:
tester = 1
incr = 0
while count < tester:
if timeout:
count = count + incr
try:
tt = self.queue.get(0)
if tt == ccp1gui_subprocess.CHILD_STDOUT:
tt2 = self.queue.get(0)
for x in tt2:
print 'stdout>',x,
elif tt == ccp1gui_subprocess.CHILD_STDERR:
tt2 = self.queue.get(0)
for x in tt2:
print 'stderr>',x,
elif tt == ccp1gui_subprocess.CHILD_EXITS:
code = self.queue.get(0)
if self.debug:
print t.time(),'done'
return code
except Queue.Empty:
if self.debug:
print t.time(), 'queue from slave empty, sleep .1'
time.sleep(0.1)
#print t.time(),'wait timed out'
class SlaveThread(threading.Thread):
"""The slave thread runs separate thread
For control it has
- a lock (not used at the moment)
- a queue object to communicate with the GUI thread
- a procedure to run
"""
def __init__(self,lock,queue,queue1,proc):
threading.Thread.__init__(self,None,None,"JobMan")
self.lock = lock
self.queue = queue
self.queue1 = queue1
self.proc = proc
def run(self):
""" call the specified procedure"""
try:
code = self.proc(self.lock,self.queue,self.queue1)
except RuntimeError, e:
self.queue.put(ccp1gui_subprocess.RUNTIME_ERROR)
##########################################################
#
#
# Unittesting stuff goes here
#
#
##########################################################
class testSlaveSpawn(unittest.TestCase):
"""fork/pythonwin process management with extra process"""
# this is not longer needed for GUI operation
# it also has not been adapted to take cmd + args separately
# however it does seem to work
def testA(self):
"""check echo on local host using stdout redirection"""
self.proc = SlaveSpawn('echo a b',debug=0)
o = open('test.out','w')
self.proc.run(stdout=o)
self.proc.wait()
o.close()
o = open('test.out','r')
output = o.readlines()
print 'output=',output
self.assertEqual(output,['a b\n'])
if __name__ == "__main__":
# Run all tests automatically
unittest.main()
| gpl-2.0 | 8,508,694,813,298,899,000 | -2,643,106,496,614,529,000 | 29.569975 | 98 | 0.512735 | false |
kbussell/pydocusign | pydocusign/client.py | 1 | 20977 | """DocuSign client."""
from collections import namedtuple
import base64
import json
import logging
import os
import warnings
import requests
from pydocusign import exceptions
logger = logging.getLogger(__name__)
Response = namedtuple('Response', ['status_code', 'text'])
class DocuSignClient(object):
"""DocuSign client."""
def __init__(self,
root_url='',
username='',
password='',
integrator_key='',
account_id='',
account_url='',
app_token=None,
oauth2_token=None,
timeout=None):
"""Configure DocuSign client."""
#: Root URL of DocuSign API.
#:
#: If not explicitely provided or empty, then ``DOCUSIGN_ROOT_URL``
#: environment variable, if available, is used.
self.root_url = root_url
if not self.root_url:
self.root_url = os.environ.get('DOCUSIGN_ROOT_URL', '')
#: API username.
#:
#: If not explicitely provided or empty, then ``DOCUSIGN_USERNAME``
#: environment variable, if available, is used.
self.username = username
if not self.username:
self.username = os.environ.get('DOCUSIGN_USERNAME', '')
#: API password.
#:
#: If not explicitely provided or empty, then ``DOCUSIGN_PASSWORD``
#: environment variable, if available, is used.
self.password = password
if not self.password:
self.password = os.environ.get('DOCUSIGN_PASSWORD', '')
#: API integrator key.
#:
#: If not explicitely provided or empty, then
#: ``DOCUSIGN_INTEGRATOR_KEY`` environment variable, if available, is
#: used.
self.integrator_key = integrator_key
if not self.integrator_key:
self.integrator_key = os.environ.get('DOCUSIGN_INTEGRATOR_KEY',
'')
#: API account ID.
#: This attribute can be guessed via :meth:`login_information`.
#:
#: If not explicitely provided or empty, then ``DOCUSIGN_ACCOUNT_ID``
#: environment variable, if available, is used.
self.account_id = account_id
if not self.account_id:
self.account_id = os.environ.get('DOCUSIGN_ACCOUNT_ID', '')
#: API AppToken.
#:
#: If not explicitely provided or empty, then ``DOCUSIGN_APP_TOKEN``
#: environment variable, if available, is used.
self.app_token = app_token
if not self.app_token:
self.app_token = os.environ.get('DOCUSIGN_APP_TOKEN', '')
#: OAuth2 Token.
#:
#: If not explicitely provided or empty, then ``DOCUSIGN_OAUTH2_TOKEN``
#: environment variable, if available, is used.
self.oauth2_token = oauth2_token
if not self.oauth2_token:
self.oauth2_token = os.environ.get('DOCUSIGN_OAUTH2_TOKEN', '')
#: User's URL, i.e. the one mentioning :attr:`account_id`.
#: This attribute can be guessed via :meth:`login_information`.
self.account_url = account_url
if self.root_url and self.account_id and not self.account_url:
self.account_url = '{root}/accounts/{account}'.format(
root=self.root_url,
account=self.account_id)
# Connection timeout.
if timeout is None:
timeout = float(os.environ.get('DOCUSIGN_TIMEOUT', 30))
self.timeout = timeout
def get_timeout(self):
"""Return connection timeout."""
return self._timeout
def set_timeout(self, value):
"""Set connection timeout. Converts ``value`` to a float.
Raises :class:`ValueError` in case the value is lower than 0.001.
"""
if value < 0.001:
raise ValueError('Cannot set timeout lower than 0.001')
self._timeout = int(value * 1000) / 1000.
def del_timeout(self):
"""Remove timeout attribute."""
del self._timeout
timeout = property(
get_timeout,
set_timeout,
del_timeout,
"""Connection timeout, in seconds, for HTTP requests to DocuSign's API.
This is not timeout for full request, only connection.
Precision is limited to milliseconds:
>>> client = DocuSignClient(timeout=1.2345)
>>> client.timeout
1.234
Setting timeout lower than 0.001 is forbidden.
>>> client.timeout = 0.0009 # Doctest: +ELLIPSIS
Traceback (most recent call last):
...
ValueError: Cannot set timeout lower than 0.001
"""
)
def base_headers(self, sobo_email=None):
"""Return dictionary of base headers for all HTTP requests.
:param sobo_email: if specified, will set the appropriate header to act
on behalf of that user. The authenticated account must have the
appropriate permissions. See:
https://www.docusign.com/p/RESTAPIGuide/RESTAPIGuide.htm#SOBO/Send%20On%20Behalf%20Of%20Functionality%20in%20the%20DocuSign%20REST%20API.htm
"""
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json',
}
if self.oauth2_token:
headers['Authorization'] = 'Bearer ' + self.oauth2_token
if sobo_email:
headers['X-DocuSign-Act-As-User'] = sobo_email
else:
auth = {
'Username': self.username,
'Password': self.password,
'IntegratorKey': self.integrator_key,
}
if sobo_email:
auth['SendOnBehalfOf'] = sobo_email
headers['X-DocuSign-Authentication'] = json.dumps(auth)
return headers
def _request(self, url, method='GET', headers=None, data=None,
json_data=None, expected_status_code=200, sobo_email=None):
"""Shortcut to perform HTTP requests."""
do_url = '{root}{path}'.format(root=self.root_url, path=url)
do_request = getattr(requests, method.lower())
if headers is None:
headers = {}
do_headers = self.base_headers(sobo_email)
do_headers.update(headers)
if data is not None:
do_data = json.dumps(data)
else:
do_data = None
try:
response = do_request(do_url, headers=do_headers, data=do_data,
json=json_data, timeout=self.timeout)
except requests.exceptions.RequestException as exception:
msg = "DocuSign request error: " \
"{method} {url} failed ; " \
"Error: {exception}" \
.format(method=method, url=do_url, exception=exception)
logger.error(msg)
raise exceptions.DocuSignException(msg)
if response.status_code != expected_status_code:
msg = "DocuSign request failed: " \
"{method} {url} returned code {status} " \
"while expecting code {expected}; " \
"Message: {message} ; " \
.format(
method=method,
url=do_url,
status=response.status_code,
expected=expected_status_code,
message=response.text,
)
logger.error(msg)
raise exceptions.DocuSignException(msg)
if response.headers.get('Content-Type', '') \
.startswith('application/json'):
return response.json()
return response.text
def get(self, *args, **kwargs):
"""Shortcut to perform GET operations on DocuSign API."""
return self._request(method='GET', *args, **kwargs)
def post(self, *args, **kwargs):
"""Shortcut to perform POST operations on DocuSign API."""
return self._request(method='POST', *args, **kwargs)
def put(self, *args, **kwargs):
"""Shortcut to perform PUT operations on DocuSign API."""
return self._request(method='PUT', *args, **kwargs)
def delete(self, *args, **kwargs):
"""Shortcut to perform DELETE operations on DocuSign API."""
return self._request(method='DELETE', *args, **kwargs)
def login_information(self):
"""Return dictionary of /login_information.
Populate :attr:`account_id` and :attr:`account_url`.
"""
url = '/login_information'
headers = {
}
data = self.get(url, headers=headers)
self.account_id = data['loginAccounts'][0]['accountId']
self.account_url = '{root}/accounts/{account}'.format(
root=self.root_url,
account=self.account_id)
return data
@classmethod
def oauth2_token_request(cls, root_url, username, password,
integrator_key):
url = root_url + '/oauth2/token'
data = {
'grant_type': 'password',
'client_id': integrator_key,
'username': username,
'password': password,
'scope': 'api',
}
headers = {
'Accept': 'application/json',
'Content-Type': 'application/x-www-form-urlencoded',
}
response = requests.post(url, headers=headers, data=data)
if response.status_code != 200:
raise exceptions.DocuSignOAuth2Exception(response.json())
return response.json()['access_token']
@classmethod
def oauth2_token_revoke(cls, root_url, token):
url = root_url + '/oauth2/revoke'
data = {
'token': token,
}
headers = {
'Accept': 'application/json',
'Content-Type': 'application/x-www-form-urlencoded',
}
response = requests.post(url, headers=headers, data=data)
if response.status_code != 200:
raise exceptions.DocuSignOAuth2Exception(response.json())
def get_account_information(self, account_id=None):
"""Return dictionary of /accounts/:accountId.
Uses :attr:`account_id` (see :meth:`login_information`) if
``account_id`` is ``None``.
"""
if account_id is None:
account_id = self.account_id
url = self.account_url
else:
url = '/accounts/{accountId}/'.format(accountId=self.account_id)
return self.get(url)
def get_account_provisioning(self):
"""Return dictionary of /accounts/provisioning."""
url = '/accounts/provisioning'
headers = {
'X-DocuSign-AppToken': self.app_token,
}
return self.get(url, headers=headers)
def post_account(self, data):
"""Create account."""
url = '/accounts'
return self.post(url, data=data, expected_status_code=201)
def delete_account(self, accountId):
"""Create account."""
url = '/accounts/{accountId}'.format(accountId=accountId)
data = self.delete(url)
return data.strip() == ''
def _create_envelope_from_documents_request(self, envelope):
"""Return parts of the POST request for /envelopes.
.. warning::
Only one document is supported at the moment. This is a limitation
of `pydocusign`, not of `DocuSign`.
"""
data = envelope.to_dict()
documents = []
for document in envelope.documents:
documents.append({
"documentId": document.documentId,
"name": document.name,
"fileExtension": "pdf",
"documentBase64": base64.b64encode(
document.data.read()).decode('utf-8')
})
data['documents'] = documents
return data
def _create_envelope_from_template_request(self, envelope):
"""Return parts of the POST request for /envelopes,
for creating an envelope from a template.
"""
return envelope.to_dict()
def _create_envelope(self, envelope, data):
"""POST to /envelopes and return created envelope ID.
Called by ``create_envelope_from_document`` and
``create_envelope_from_template`` methods.
"""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes'.format(
accountId=self.account_id)
response_data = self._request(
url, method='POST', json_data=data, expected_status_code=201)
if not envelope.client:
envelope.client = self
if not envelope.envelopeId:
envelope.envelopeId = response_data['envelopeId']
return response_data['envelopeId']
def create_envelope_from_documents(self, envelope):
"""POST to /envelopes and return created envelope ID.
If ``envelope`` has no (or empty) ``envelopeId`` attribute, this
method sets the value.
If ``envelope`` has no (or empty) ``client`` attribute, this method
sets the value.
"""
data = self._create_envelope_from_documents_request(envelope)
return self._create_envelope(envelope, data)
def create_envelope_from_document(self, envelope):
warnings.warn("This method will be deprecated, use "
"create_envelope_from_documents instead.",
DeprecationWarning)
data = self._create_envelope_from_documents_request(envelope)
return self._create_envelope(envelope, data)
def create_envelope_from_template(self, envelope):
"""POST to /envelopes and return created envelope ID.
If ``envelope`` has no (or empty) ``envelopeId`` attribute, this
method sets the value.
If ``envelope`` has no (or empty) ``client`` attribute, this method
sets the value.
"""
data = self._create_envelope_from_template_request(envelope)
return self._create_envelope(envelope, data)
def void_envelope(self, envelopeId, voidedReason):
"""PUT to /{account}/envelopes/{envelopeId} with 'voided' status and
voidedReason, and return JSON."""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
data = {
'status': 'voided',
'voidedReason': voidedReason
}
return self.put(url, data=data)
def get_envelope(self, envelopeId):
"""GET {account}/envelopes/{envelopeId} and return JSON."""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
return self.get(url)
def get_envelope_recipients(self, envelopeId):
"""GET {account}/envelopes/{envelopeId}/recipients and return JSON."""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/recipients' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
return self.get(url)
def post_recipient_view(self, authenticationMethod=None,
clientUserId='', email='', envelopeId='',
returnUrl='', userId='', userName=''):
"""POST to {account}/envelopes/{envelopeId}/views/recipient.
This is the method to start embedded signing for recipient.
Return JSON from DocuSign response.
"""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/views/recipient' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
if authenticationMethod is None:
authenticationMethod = 'none'
data = {
'authenticationMethod': authenticationMethod,
'clientUserId': clientUserId,
'email': email,
'envelopeId': envelopeId,
'returnUrl': returnUrl,
'userId': userId,
'userName': userName,
}
return self.post(url, data=data, expected_status_code=201)
def get_envelope_document_list(self, envelopeId):
"""GET the list of envelope's documents."""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/documents' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
data = self.get(url)
return data['envelopeDocuments']
def get_envelope_document(self, envelopeId, documentId):
"""Download one document in envelope, return file-like object."""
if not self.account_url:
self.login_information()
url = '{root}/accounts/{accountId}/envelopes/{envelopeId}' \
'/documents/{documentId}' \
.format(root=self.root_url,
accountId=self.account_id,
envelopeId=envelopeId,
documentId=documentId)
headers = self.base_headers()
response = requests.get(url, headers=headers, stream=True)
return response.raw
def get_template(self, templateId):
"""GET the definition of the template."""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/templates/{templateId}' \
.format(accountId=self.account_id,
templateId=templateId)
return self.get(url)
def get_connect_failures(self):
"""GET a list of DocuSign Connect failures."""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/connect/failures' \
.format(accountId=self.account_id)
return self.get(url)['failures']
def add_envelope_recipients(self, envelopeId, recipients,
resend_envelope=False):
"""Add one or more recipients to an envelope
DocuSign reference:
https://docs.docusign.com/esign/restapi/Envelopes/EnvelopeRecipients/create/
"""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/recipients' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
if resend_envelope:
url += '?resend_envelope=true'
data = {'signers': [recipient.to_dict() for recipient in recipients]}
return self.post(url, data=data)
def update_envelope_recipients(self, envelopeId, recipients,
resend_envelope=False):
"""Modify recipients in a draft envelope or correct recipient information
for an in process envelope
DocuSign reference:
https://docs.docusign.com/esign/restapi/Envelopes/EnvelopeRecipients/update/
"""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/recipients' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
if resend_envelope:
url += '?resend_envelope=true'
data = {'signers': [recipient.to_dict() for recipient in recipients]}
return self.put(url, data=data)
def delete_envelope_recipient(self, envelopeId, recipientId):
"""Deletes one or more recipients from a draft or sent envelope.
DocuSign reference:
https://docs.docusign.com/esign/restapi/Envelopes/EnvelopeRecipients/delete/
"""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/recipients/' \
'{recipientId}'.format(accountId=self.account_id,
envelopeId=envelopeId,
recipientId=recipientId)
return self.delete(url)
def delete_envelope_recipients(self, envelopeId, recipientIds):
"""Deletes one or more recipients from a draft or sent envelope.
DocuSign reference:
https://docs.docusign.com/esign/restapi/Envelopes/EnvelopeRecipients/deleteList/
"""
if not self.account_url:
self.login_information()
url = '/accounts/{accountId}/envelopes/{envelopeId}/recipients' \
.format(accountId=self.account_id,
envelopeId=envelopeId)
data = {'signers': [{'recipientId': id_} for id_ in recipientIds]}
return self.delete(url, data=data)
| bsd-3-clause | 6,970,222,259,278,829,000 | 8,976,442,587,090,064,000 | 36.259325 | 148 | 0.576727 | false |
Thhhza/XlsxWriter | examples/chart_gradient.py | 9 | 1685 | #######################################################################
#
# An example of creating an Excel charts with gradient fills using
# Python and XlsxWriter.
#
# Copyright 2013-2015, John McNamara, [email protected]
#
import xlsxwriter
workbook = xlsxwriter.Workbook('chart_gradient.xlsx')
worksheet = workbook.add_worksheet()
bold = workbook.add_format({'bold': 1})
# Add the worksheet data that the charts will refer to.
headings = ['Number', 'Batch 1', 'Batch 2']
data = [
[2, 3, 4, 5, 6, 7],
[10, 40, 50, 20, 10, 50],
[30, 60, 70, 50, 40, 30],
]
worksheet.write_row('A1', headings, bold)
worksheet.write_column('A2', data[0])
worksheet.write_column('B2', data[1])
worksheet.write_column('C2', data[2])
# Create a new column chart.
chart = workbook.add_chart({'type': 'column'})
# Configure the first series, including a gradient.
chart.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
'gradient': {'colors': ['#963735', '#F1DCDB']}
})
# Configure the second series, including a gradient.
chart.add_series({
'name': '=Sheet1!$C$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$C$2:$C$7',
'gradient': {'colors': ['#E36C0A', '#FCEADA']}
})
# Set a gradient for the plotarea.
chart.set_plotarea({
'gradient': {'colors': ['#FFEFD1', '#F0EBD5', '#B69F66']}
})
# Add some axis labels.
chart.set_x_axis({'name': 'Test number'})
chart.set_y_axis({'name': 'Sample length (mm)'})
# Turn off the chart legend.
chart.set_legend({'none': True})
# Insert the chart into the worksheet.
worksheet.insert_chart('E2', chart)
workbook.close()
| bsd-2-clause | 2,255,965,720,607,864,000 | -7,412,584,165,937,901,000 | 25.746032 | 71 | 0.605341 | false |
wangsai/oppia | core/platform/email/gae_email_services_test.py | 15 | 1874 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the GAE mail API wrapper."""
__author__ = 'Sean Lip'
from core.platform.email import gae_email_services
from core.tests import test_utils
import feconf
class EmailTests(test_utils.GenericTestBase):
"""Tests for sending emails."""
def test_sending_email_to_admin(self):
# Emails are not sent if the CAN_SEND_EMAILS_TO_ADMIN setting
# is not turned on.
with self.swap(feconf, 'CAN_SEND_EMAILS_TO_ADMIN', False):
gae_email_services.send_mail_to_admin(
'[email protected]', 'subject', 'body')
messages = self.mail_stub.get_sent_messages(
to=feconf.ADMIN_EMAIL_ADDRESS)
self.assertEqual(0, len(messages))
with self.swap(feconf, 'CAN_SEND_EMAILS_TO_ADMIN', True):
gae_email_services.send_mail_to_admin(
'[email protected]', 'subject', 'body')
messages = self.mail_stub.get_sent_messages(
to=feconf.ADMIN_EMAIL_ADDRESS)
self.assertEqual(1, len(messages))
self.assertEqual(feconf.ADMIN_EMAIL_ADDRESS, messages[0].to)
self.assertIn(
'(Sent from %s)' % self.EXPECTED_TEST_APP_ID,
messages[0].body.decode())
| apache-2.0 | 3,494,695,631,199,760,000 | -7,901,414,713,319,450,000 | 37.244898 | 74 | 0.656884 | false |
bdoner/SickRage | lib/unidecode/x08a.py | 253 | 4647 | data = (
'Yan ', # 0x00
'Yan ', # 0x01
'Ding ', # 0x02
'Fu ', # 0x03
'Qiu ', # 0x04
'Qiu ', # 0x05
'Jiao ', # 0x06
'Hong ', # 0x07
'Ji ', # 0x08
'Fan ', # 0x09
'Xun ', # 0x0a
'Diao ', # 0x0b
'Hong ', # 0x0c
'Cha ', # 0x0d
'Tao ', # 0x0e
'Xu ', # 0x0f
'Jie ', # 0x10
'Yi ', # 0x11
'Ren ', # 0x12
'Xun ', # 0x13
'Yin ', # 0x14
'Shan ', # 0x15
'Qi ', # 0x16
'Tuo ', # 0x17
'Ji ', # 0x18
'Xun ', # 0x19
'Yin ', # 0x1a
'E ', # 0x1b
'Fen ', # 0x1c
'Ya ', # 0x1d
'Yao ', # 0x1e
'Song ', # 0x1f
'Shen ', # 0x20
'Yin ', # 0x21
'Xin ', # 0x22
'Jue ', # 0x23
'Xiao ', # 0x24
'Ne ', # 0x25
'Chen ', # 0x26
'You ', # 0x27
'Zhi ', # 0x28
'Xiong ', # 0x29
'Fang ', # 0x2a
'Xin ', # 0x2b
'Chao ', # 0x2c
'She ', # 0x2d
'Xian ', # 0x2e
'Sha ', # 0x2f
'Tun ', # 0x30
'Xu ', # 0x31
'Yi ', # 0x32
'Yi ', # 0x33
'Su ', # 0x34
'Chi ', # 0x35
'He ', # 0x36
'Shen ', # 0x37
'He ', # 0x38
'Xu ', # 0x39
'Zhen ', # 0x3a
'Zhu ', # 0x3b
'Zheng ', # 0x3c
'Gou ', # 0x3d
'Zi ', # 0x3e
'Zi ', # 0x3f
'Zhan ', # 0x40
'Gu ', # 0x41
'Fu ', # 0x42
'Quan ', # 0x43
'Die ', # 0x44
'Ling ', # 0x45
'Di ', # 0x46
'Yang ', # 0x47
'Li ', # 0x48
'Nao ', # 0x49
'Pan ', # 0x4a
'Zhou ', # 0x4b
'Gan ', # 0x4c
'Yi ', # 0x4d
'Ju ', # 0x4e
'Ao ', # 0x4f
'Zha ', # 0x50
'Tuo ', # 0x51
'Yi ', # 0x52
'Qu ', # 0x53
'Zhao ', # 0x54
'Ping ', # 0x55
'Bi ', # 0x56
'Xiong ', # 0x57
'Qu ', # 0x58
'Ba ', # 0x59
'Da ', # 0x5a
'Zu ', # 0x5b
'Tao ', # 0x5c
'Zhu ', # 0x5d
'Ci ', # 0x5e
'Zhe ', # 0x5f
'Yong ', # 0x60
'Xu ', # 0x61
'Xun ', # 0x62
'Yi ', # 0x63
'Huang ', # 0x64
'He ', # 0x65
'Shi ', # 0x66
'Cha ', # 0x67
'Jiao ', # 0x68
'Shi ', # 0x69
'Hen ', # 0x6a
'Cha ', # 0x6b
'Gou ', # 0x6c
'Gui ', # 0x6d
'Quan ', # 0x6e
'Hui ', # 0x6f
'Jie ', # 0x70
'Hua ', # 0x71
'Gai ', # 0x72
'Xiang ', # 0x73
'Wei ', # 0x74
'Shen ', # 0x75
'Chou ', # 0x76
'Tong ', # 0x77
'Mi ', # 0x78
'Zhan ', # 0x79
'Ming ', # 0x7a
'E ', # 0x7b
'Hui ', # 0x7c
'Yan ', # 0x7d
'Xiong ', # 0x7e
'Gua ', # 0x7f
'Er ', # 0x80
'Beng ', # 0x81
'Tiao ', # 0x82
'Chi ', # 0x83
'Lei ', # 0x84
'Zhu ', # 0x85
'Kuang ', # 0x86
'Kua ', # 0x87
'Wu ', # 0x88
'Yu ', # 0x89
'Teng ', # 0x8a
'Ji ', # 0x8b
'Zhi ', # 0x8c
'Ren ', # 0x8d
'Su ', # 0x8e
'Lang ', # 0x8f
'E ', # 0x90
'Kuang ', # 0x91
'E ', # 0x92
'Shi ', # 0x93
'Ting ', # 0x94
'Dan ', # 0x95
'Bo ', # 0x96
'Chan ', # 0x97
'You ', # 0x98
'Heng ', # 0x99
'Qiao ', # 0x9a
'Qin ', # 0x9b
'Shua ', # 0x9c
'An ', # 0x9d
'Yu ', # 0x9e
'Xiao ', # 0x9f
'Cheng ', # 0xa0
'Jie ', # 0xa1
'Xian ', # 0xa2
'Wu ', # 0xa3
'Wu ', # 0xa4
'Gao ', # 0xa5
'Song ', # 0xa6
'Pu ', # 0xa7
'Hui ', # 0xa8
'Jing ', # 0xa9
'Shuo ', # 0xaa
'Zhen ', # 0xab
'Shuo ', # 0xac
'Du ', # 0xad
'Yasashi ', # 0xae
'Chang ', # 0xaf
'Shui ', # 0xb0
'Jie ', # 0xb1
'Ke ', # 0xb2
'Qu ', # 0xb3
'Cong ', # 0xb4
'Xiao ', # 0xb5
'Sui ', # 0xb6
'Wang ', # 0xb7
'Xuan ', # 0xb8
'Fei ', # 0xb9
'Chi ', # 0xba
'Ta ', # 0xbb
'Yi ', # 0xbc
'Na ', # 0xbd
'Yin ', # 0xbe
'Diao ', # 0xbf
'Pi ', # 0xc0
'Chuo ', # 0xc1
'Chan ', # 0xc2
'Chen ', # 0xc3
'Zhun ', # 0xc4
'Ji ', # 0xc5
'Qi ', # 0xc6
'Tan ', # 0xc7
'Zhui ', # 0xc8
'Wei ', # 0xc9
'Ju ', # 0xca
'Qing ', # 0xcb
'Jian ', # 0xcc
'Zheng ', # 0xcd
'Ze ', # 0xce
'Zou ', # 0xcf
'Qian ', # 0xd0
'Zhuo ', # 0xd1
'Liang ', # 0xd2
'Jian ', # 0xd3
'Zhu ', # 0xd4
'Hao ', # 0xd5
'Lun ', # 0xd6
'Shen ', # 0xd7
'Biao ', # 0xd8
'Huai ', # 0xd9
'Pian ', # 0xda
'Yu ', # 0xdb
'Die ', # 0xdc
'Xu ', # 0xdd
'Pian ', # 0xde
'Shi ', # 0xdf
'Xuan ', # 0xe0
'Shi ', # 0xe1
'Hun ', # 0xe2
'Hua ', # 0xe3
'E ', # 0xe4
'Zhong ', # 0xe5
'Di ', # 0xe6
'Xie ', # 0xe7
'Fu ', # 0xe8
'Pu ', # 0xe9
'Ting ', # 0xea
'Jian ', # 0xeb
'Qi ', # 0xec
'Yu ', # 0xed
'Zi ', # 0xee
'Chuan ', # 0xef
'Xi ', # 0xf0
'Hui ', # 0xf1
'Yin ', # 0xf2
'An ', # 0xf3
'Xian ', # 0xf4
'Nan ', # 0xf5
'Chen ', # 0xf6
'Feng ', # 0xf7
'Zhu ', # 0xf8
'Yang ', # 0xf9
'Yan ', # 0xfa
'Heng ', # 0xfb
'Xuan ', # 0xfc
'Ge ', # 0xfd
'Nuo ', # 0xfe
'Qi ', # 0xff
)
| gpl-3.0 | 7,074,482,303,016,185,000 | -1,897,620,129,772,546,000 | 17.011628 | 21 | 0.392511 | false |
kineticadb/kinetica-api-python | gpudb/packages/avro/avro_py2/ipc.py | 2 | 18070 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Support for inter-process calls.
"""
import httplib
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from avro import io
from avro import protocol
from avro import schema
#
# Constants
#
# Handshake schema is pulled in during build
HANDSHAKE_REQUEST_SCHEMA = schema.parse("""
{
"type": "record",
"name": "HandshakeRequest", "namespace":"org.apache.avro.ipc",
"fields": [
{"name": "clientHash",
"type": {"type": "fixed", "name": "MD5", "size": 16}},
{"name": "clientProtocol", "type": ["null", "string"]},
{"name": "serverHash", "type": "MD5"},
{"name": "meta", "type": ["null", {"type": "map", "values": "bytes"}]}
]
}
""")
HANDSHAKE_RESPONSE_SCHEMA = schema.parse("""
{
"type": "record",
"name": "HandshakeResponse", "namespace": "org.apache.avro.ipc",
"fields": [
{"name": "match",
"type": {"type": "enum", "name": "HandshakeMatch",
"symbols": ["BOTH", "CLIENT", "NONE"]}},
{"name": "serverProtocol",
"type": ["null", "string"]},
{"name": "serverHash",
"type": ["null", {"type": "fixed", "name": "MD5", "size": 16}]},
{"name": "meta",
"type": ["null", {"type": "map", "values": "bytes"}]}
]
}
""")
HANDSHAKE_REQUESTOR_WRITER = io.DatumWriter(HANDSHAKE_REQUEST_SCHEMA)
HANDSHAKE_REQUESTOR_READER = io.DatumReader(HANDSHAKE_RESPONSE_SCHEMA)
HANDSHAKE_RESPONDER_WRITER = io.DatumWriter(HANDSHAKE_RESPONSE_SCHEMA)
HANDSHAKE_RESPONDER_READER = io.DatumReader(HANDSHAKE_REQUEST_SCHEMA)
META_SCHEMA = schema.parse('{"type": "map", "values": "bytes"}')
META_WRITER = io.DatumWriter(META_SCHEMA)
META_READER = io.DatumReader(META_SCHEMA)
SYSTEM_ERROR_SCHEMA = schema.parse('["string"]')
# protocol cache
REMOTE_HASHES = {}
REMOTE_PROTOCOLS = {}
BIG_ENDIAN_INT_STRUCT = io.struct_class('!I')
BUFFER_HEADER_LENGTH = 4
BUFFER_SIZE = 8192
#
# Exceptions
#
class AvroRemoteException(schema.AvroException):
"""
Raised when an error message is sent by an Avro requestor or responder.
"""
def __init__(self, fail_msg=None):
schema.AvroException.__init__(self, fail_msg)
class ConnectionClosedException(schema.AvroException):
pass
#
# Base IPC Classes (Requestor/Responder)
#
class BaseRequestor(object):
"""Base class for the client side of a protocol interaction."""
def __init__(self, local_protocol, transceiver):
self._local_protocol = local_protocol
self._transceiver = transceiver
self._remote_protocol = None
self._remote_hash = None
self._send_protocol = None
# read-only properties
local_protocol = property(lambda self: self._local_protocol)
transceiver = property(lambda self: self._transceiver)
# read/write properties
def set_remote_protocol(self, new_remote_protocol):
self._remote_protocol = new_remote_protocol
REMOTE_PROTOCOLS[self.transceiver.remote_name] = self.remote_protocol
remote_protocol = property(lambda self: self._remote_protocol,
set_remote_protocol)
def set_remote_hash(self, new_remote_hash):
self._remote_hash = new_remote_hash
REMOTE_HASHES[self.transceiver.remote_name] = self.remote_hash
remote_hash = property(lambda self: self._remote_hash, set_remote_hash)
def set_send_protocol(self, new_send_protocol):
self._send_protocol = new_send_protocol
send_protocol = property(lambda self: self._send_protocol, set_send_protocol)
def request(self, message_name, request_datum):
"""
Writes a request message and reads a response or error message.
"""
# build handshake and call request
buffer_writer = StringIO()
buffer_encoder = io.BinaryEncoder(buffer_writer)
self.write_handshake_request(buffer_encoder)
self.write_call_request(message_name, request_datum, buffer_encoder)
# send the handshake and call request; block until call response
call_request = buffer_writer.getvalue()
return self.issue_request(call_request, message_name, request_datum)
def write_handshake_request(self, encoder):
local_hash = self.local_protocol.md5
remote_name = self.transceiver.remote_name
remote_hash = REMOTE_HASHES.get(remote_name)
if remote_hash is None:
remote_hash = local_hash
self.remote_protocol = self.local_protocol
request_datum = {}
request_datum['clientHash'] = local_hash
request_datum['serverHash'] = remote_hash
if self.send_protocol:
request_datum['clientProtocol'] = str(self.local_protocol)
HANDSHAKE_REQUESTOR_WRITER.write(request_datum, encoder)
def write_call_request(self, message_name, request_datum, encoder):
"""
The format of a call request is:
* request metadata, a map with values of type bytes
* the message name, an Avro string, followed by
* the message parameters. Parameters are serialized according to
the message's request declaration.
"""
# request metadata (not yet implemented)
request_metadata = {}
META_WRITER.write(request_metadata, encoder)
# message name
message = self.local_protocol.messages.get(message_name)
if message is None:
raise schema.AvroException('Unknown message: %s' % message_name)
encoder.write_utf8(message.name)
# message parameters
self.write_request(message.request, request_datum, encoder)
def write_request(self, request_schema, request_datum, encoder):
datum_writer = io.DatumWriter(request_schema)
datum_writer.write(request_datum, encoder)
def read_handshake_response(self, decoder):
handshake_response = HANDSHAKE_REQUESTOR_READER.read(decoder)
match = handshake_response.get('match')
if match == 'BOTH':
self.send_protocol = False
return True
elif match == 'CLIENT':
if self.send_protocol:
raise schema.AvroException('Handshake failure.')
self.remote_protocol = protocol.parse(
handshake_response.get('serverProtocol'))
self.remote_hash = handshake_response.get('serverHash')
self.send_protocol = False
return True
elif match == 'NONE':
if self.send_protocol:
raise schema.AvroException('Handshake failure.')
self.remote_protocol = protocol.parse(
handshake_response.get('serverProtocol'))
self.remote_hash = handshake_response.get('serverHash')
self.send_protocol = True
return False
else:
raise schema.AvroException('Unexpected match: %s' % match)
def read_call_response(self, message_name, decoder):
"""
The format of a call response is:
* response metadata, a map with values of type bytes
* a one-byte error flag boolean, followed by either:
o if the error flag is false,
the message response, serialized per the message's response schema.
o if the error flag is true,
the error, serialized per the message's error union schema.
"""
# response metadata
response_metadata = META_READER.read(decoder)
# remote response schema
remote_message_schema = self.remote_protocol.messages.get(message_name)
if remote_message_schema is None:
raise schema.AvroException('Unknown remote message: %s' % message_name)
# local response schema
local_message_schema = self.local_protocol.messages.get(message_name)
if local_message_schema is None:
raise schema.AvroException('Unknown local message: %s' % message_name)
# error flag
if not decoder.read_boolean():
writers_schema = remote_message_schema.response
readers_schema = local_message_schema.response
return self.read_response(writers_schema, readers_schema, decoder)
else:
writers_schema = remote_message_schema.errors
readers_schema = local_message_schema.errors
raise self.read_error(writers_schema, readers_schema, decoder)
def read_response(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
result = datum_reader.read(decoder)
return result
def read_error(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
return AvroRemoteException(datum_reader.read(decoder))
class Requestor(BaseRequestor):
def issue_request(self, call_request, message_name, request_datum):
call_response = self.transceiver.transceive(call_request)
# process the handshake and call response
buffer_decoder = io.BinaryDecoder(StringIO(call_response))
call_response_exists = self.read_handshake_response(buffer_decoder)
if call_response_exists:
return self.read_call_response(message_name, buffer_decoder)
else:
return self.request(message_name, request_datum)
class Responder(object):
"""Base class for the server side of a protocol interaction."""
def __init__(self, local_protocol):
self._local_protocol = local_protocol
self._local_hash = self.local_protocol.md5
self._protocol_cache = {}
self.set_protocol_cache(self.local_hash, self.local_protocol)
# read-only properties
local_protocol = property(lambda self: self._local_protocol)
local_hash = property(lambda self: self._local_hash)
protocol_cache = property(lambda self: self._protocol_cache)
# utility functions to manipulate protocol cache
def get_protocol_cache(self, hash):
return self.protocol_cache.get(hash)
def set_protocol_cache(self, hash, protocol):
self.protocol_cache[hash] = protocol
def respond(self, call_request):
"""
Called by a server to deserialize a request, compute and serialize
a response or error. Compare to 'handle()' in Thrift.
"""
buffer_reader = StringIO(call_request)
buffer_decoder = io.BinaryDecoder(buffer_reader)
buffer_writer = StringIO()
buffer_encoder = io.BinaryEncoder(buffer_writer)
error = None
response_metadata = {}
try:
remote_protocol = self.process_handshake(buffer_decoder, buffer_encoder)
# handshake failure
if remote_protocol is None:
return buffer_writer.getvalue()
# read request using remote protocol
request_metadata = META_READER.read(buffer_decoder)
remote_message_name = buffer_decoder.read_utf8()
# get remote and local request schemas so we can do
# schema resolution (one fine day)
remote_message = remote_protocol.messages.get(remote_message_name)
if remote_message is None:
fail_msg = 'Unknown remote message: %s' % remote_message_name
raise schema.AvroException(fail_msg)
local_message = self.local_protocol.messages.get(remote_message_name)
if local_message is None:
fail_msg = 'Unknown local message: %s' % remote_message_name
raise schema.AvroException(fail_msg)
writers_schema = remote_message.request
readers_schema = local_message.request
request = self.read_request(writers_schema, readers_schema,
buffer_decoder)
# perform server logic
try:
response = self.invoke(local_message, request)
except AvroRemoteException as e:
error = e
except Exception as e:
error = AvroRemoteException(str(e))
# write response using local protocol
META_WRITER.write(response_metadata, buffer_encoder)
buffer_encoder.write_boolean(error is not None)
if error is None:
writers_schema = local_message.response
self.write_response(writers_schema, response, buffer_encoder)
else:
writers_schema = local_message.errors
self.write_error(writers_schema, error, buffer_encoder)
except schema.AvroException as e:
error = AvroRemoteException(str(e))
buffer_encoder = io.BinaryEncoder(StringIO())
META_WRITER.write(response_metadata, buffer_encoder)
buffer_encoder.write_boolean(True)
self.write_error(SYSTEM_ERROR_SCHEMA, error, buffer_encoder)
return buffer_writer.getvalue()
def process_handshake(self, decoder, encoder):
handshake_request = HANDSHAKE_RESPONDER_READER.read(decoder)
handshake_response = {}
# determine the remote protocol
client_hash = handshake_request.get('clientHash')
client_protocol = handshake_request.get('clientProtocol')
remote_protocol = self.get_protocol_cache(client_hash)
if remote_protocol is None and client_protocol is not None:
remote_protocol = protocol.parse(client_protocol)
self.set_protocol_cache(client_hash, remote_protocol)
# evaluate remote's guess of the local protocol
server_hash = handshake_request.get('serverHash')
if self.local_hash == server_hash:
if remote_protocol is None:
handshake_response['match'] = 'NONE'
else:
handshake_response['match'] = 'BOTH'
else:
if remote_protocol is None:
handshake_response['match'] = 'NONE'
else:
handshake_response['match'] = 'CLIENT'
if handshake_response['match'] != 'BOTH':
handshake_response['serverProtocol'] = str(self.local_protocol)
handshake_response['serverHash'] = self.local_hash
HANDSHAKE_RESPONDER_WRITER.write(handshake_response, encoder)
return remote_protocol
def invoke(self, local_message, request):
"""
Aactual work done by server: cf. handler in thrift.
"""
pass
def read_request(self, writers_schema, readers_schema, decoder):
datum_reader = io.DatumReader(writers_schema, readers_schema)
return datum_reader.read(decoder)
def write_response(self, writers_schema, response_datum, encoder):
datum_writer = io.DatumWriter(writers_schema)
datum_writer.write(response_datum, encoder)
def write_error(self, writers_schema, error_exception, encoder):
datum_writer = io.DatumWriter(writers_schema)
datum_writer.write(str(error_exception), encoder)
#
# Utility classes
#
class FramedReader(object):
"""Wrapper around a file-like object to read framed data."""
def __init__(self, reader):
self._reader = reader
# read-only properties
reader = property(lambda self: self._reader)
def read_framed_message(self):
message = []
while True:
buffer = StringIO()
buffer_length = self._read_buffer_length()
if buffer_length == 0:
return ''.join(message)
while buffer.tell() < buffer_length:
chunk = self.reader.read(buffer_length - buffer.tell())
if chunk == '':
raise ConnectionClosedException("Reader read 0 bytes.")
buffer.write(chunk)
message.append(buffer.getvalue())
def _read_buffer_length(self):
read = self.reader.read(BUFFER_HEADER_LENGTH)
if read == '':
raise ConnectionClosedException("Reader read 0 bytes.")
return BIG_ENDIAN_INT_STRUCT.unpack(read)[0]
class FramedWriter(object):
"""Wrapper around a file-like object to write framed data."""
def __init__(self, writer):
self._writer = writer
# read-only properties
writer = property(lambda self: self._writer)
def write_framed_message(self, message):
message_length = len(message)
total_bytes_sent = 0
while message_length - total_bytes_sent > 0:
if message_length - total_bytes_sent > BUFFER_SIZE:
buffer_length = BUFFER_SIZE
else:
buffer_length = message_length - total_bytes_sent
self.write_buffer(message[total_bytes_sent:
(total_bytes_sent + buffer_length)])
total_bytes_sent += buffer_length
# A message is always terminated by a zero-length buffer.
self.write_buffer_length(0)
def write_buffer(self, chunk):
buffer_length = len(chunk)
self.write_buffer_length(buffer_length)
self.writer.write(chunk)
def write_buffer_length(self, n):
self.writer.write(BIG_ENDIAN_INT_STRUCT.pack(n))
#
# Transceiver Implementations
#
class HTTPTransceiver(object):
"""
A simple HTTP-based transceiver implementation.
Useful for clients but not for servers
"""
def __init__(self, host, port, req_resource='/'):
self.req_resource = req_resource
self.conn = httplib.HTTPConnection(host, port)
self.conn.connect()
# read-only properties
sock = property(lambda self: self.conn.sock)
remote_name = property(lambda self: self.sock.getsockname())
# read/write properties
def set_conn(self, new_conn):
self._conn = new_conn
conn = property(lambda self: self._conn, set_conn)
req_resource = '/'
def transceive(self, request):
self.write_framed_message(request)
result = self.read_framed_message()
return result
def read_framed_message(self):
response = self.conn.getresponse()
response_reader = FramedReader(response)
framed_message = response_reader.read_framed_message()
response.read() # ensure we're ready for subsequent requests
return framed_message
def write_framed_message(self, message):
req_method = 'POST'
req_headers = {'Content-Type': 'avro/binary'}
req_body_buffer = FramedWriter(StringIO())
req_body_buffer.write_framed_message(message)
req_body = req_body_buffer.writer.getvalue()
self.conn.request(req_method, self.req_resource, req_body, req_headers)
def close(self):
self.conn.close()
#
# Server Implementations (none yet)
#
| mit | -788,397,100,199,882,500 | -1,300,278,412,153,061,600 | 34.362035 | 79 | 0.687327 | false |
chfoo/cloaked-octo-nemesis | visibli/visibli_url_grab.py | 1 | 14609 | '''Grab Visibli hex shortcodes'''
# Copyright 2013 Christopher Foo <[email protected]>
# Licensed under GPLv3. See COPYING.txt for details.
import argparse
import base64
import collections
import gzip
import html.parser
import http.client
import logging
import logging.handlers
import math
import os
import queue
import random
import re
import sqlite3
import threading
import time
import atexit
_logger = logging.getLogger(__name__)
class UnexpectedResult(ValueError):
pass
class UserAgent(object):
def __init__(self, filename):
self.strings = []
with open(filename, 'rt') as f:
while True:
line = f.readline().strip()
if not line:
break
self.strings.append(line)
self.strings = tuple(self.strings)
_logger.info('Initialized with %d user agents', len(self.strings))
class AbsSineyRateFunc(object):
def __init__(self, avg_rate=1.0):
self._avg_rate = avg_rate
self._amplitude = 1.0 / self._avg_rate * 5.6
self._x = 1.0
def get(self):
y = abs(self._amplitude * math.sin(self._x) * math.sin(self._x ** 2)
/ self._x)
self._x += 0.05
if self._x > 2 * math.pi:
self._x = 1.0
return y
class HTTPClientProcessor(threading.Thread):
def __init__(self, request_queue, response_queue, host, port):
threading.Thread.__init__(self)
self.daemon = True
self._request_queue = request_queue
self._response_queue = response_queue
self._http_client = http.client.HTTPConnection(host, port)
self.start()
def run(self):
while True:
path, headers, shortcode = self._request_queue.get()
try:
_logger.debug('Get %s %s', path, headers)
self._http_client.request('GET', path, headers=headers)
response = self._http_client.getresponse()
except http.client.HTTPException:
_logger.exception('Got an http error.')
self._http_client.close()
time.sleep(120)
else:
_logger.debug('Got response %s %s',
response.status, response.reason)
data = response.read()
self._response_queue.put((response, data, shortcode))
class InsertQueue(threading.Thread):
def __init__(self, db_path):
threading.Thread.__init__(self)
self.daemon = True
self._queue = queue.Queue(maxsize=100)
self._event = threading.Event()
self._running = True
self._db_path = db_path
self.start()
def run(self):
self._db = sqlite3.connect(self._db_path)
while self._running:
self._process()
self._event.wait(timeout=10)
def _process(self):
with self._db:
while True:
try:
statement, values = self._queue.get_nowait()
except queue.Empty:
break
_logger.debug('Executing statement')
self._db.execute(statement, values)
def stop(self):
self._running = False
self._event.set()
def add(self, statement, values):
self._queue.put((statement, values))
class VisibliHexURLGrab(object):
def __init__(self, sequential=False, reverse_sequential=False,
avg_items_per_sec=0.5, database_dir='', user_agent_filename=None,
http_client_threads=2, save_reports=False):
db_path = os.path.join(database_dir, 'visibli.db')
self.database_dir = database_dir
self.db = sqlite3.connect(db_path)
self.db.execute('PRAGMA journal_mode=WAL')
with self.db:
self.db.execute('''CREATE TABLE IF NOT EXISTS visibli_hex
(shortcode INTEGER PRIMARY KEY ASC, url TEXT, not_exist INTEGER)
''')
self.host = 'localhost'
self.port = 8123
self.save_reports = save_reports
self.request_queue = queue.Queue(maxsize=1)
self.response_queue = queue.Queue(maxsize=10)
self.http_clients = self.new_clients(http_client_threads)
self.throttle_time = 1
self.sequential = sequential
self.reverse_sequential = reverse_sequential
self.seq_num = 0xffffff if self.reverse_sequential else 0
self.session_count = 0
#self.total_count = self.get_count() or 0
self.total_count = 0
self.user_agent = UserAgent(user_agent_filename)
self.headers = {
'Accept-Encoding': 'gzip',
'Host': 'links.sharedby.co',
}
self.average_deque = collections.deque(maxlen=100)
self.rate_func = AbsSineyRateFunc(avg_items_per_sec)
self.miss_count = 0
self.hit_count = 0
self.insert_queue = InsertQueue(db_path)
atexit.register(self.insert_queue.stop)
def new_clients(self, http_client_threads=2):
return [HTTPClientProcessor(self.request_queue, self.response_queue,
self.host, self.port)
for dummy in range(http_client_threads)]
def shortcode_to_int(self, shortcode):
return int.from_bytes(shortcode, byteorder='big', signed=False)
def new_shortcode(self):
while True:
if self.sequential or self.reverse_sequential:
s = '{:06x}'.format(self.seq_num)
shortcode = base64.b16decode(s.encode(), casefold=True)
if self.reverse_sequential:
self.seq_num -= 1
if self.seq_num < 0:
return None
else:
self.seq_num += 1
if self.seq_num > 0xffffff:
return None
else:
shortcode = os.urandom(3)
rows = self.db.execute('SELECT 1 FROM visibli_hex WHERE '
'shortcode = ? LIMIT 1', [self.shortcode_to_int(shortcode)])
if not len(list(rows)):
return shortcode
def run(self):
self.check_proxy_tor()
while True:
if not self.insert_queue.is_alive():
raise Exception('Insert queue died!')
shortcode = self.new_shortcode()
if shortcode is None:
break
shortcode_str = base64.b16encode(shortcode).lower().decode()
path = 'http://links.sharedby.co/links/{}'.format(shortcode_str)
headers = self.get_headers()
while True:
try:
self.request_queue.put_nowait((path, headers, shortcode))
except queue.Full:
self.read_responses()
else:
break
if self.session_count % 10 == 0:
_logger.info('Session={}, hit={}, total={}, {:.3f} u/s'.format(
self.session_count, self.hit_count,
self.session_count + self.total_count,
self.calc_avg()))
t = self.rate_func.get()
_logger.debug('Sleep {:.3f}'.format(t))
time.sleep(t)
self.read_responses()
_logger.info('Shutting down...')
time.sleep(30)
self.read_responses()
self.insert_queue.stop()
self.insert_queue.join()
def get_headers(self):
d = dict(self.headers)
d['User-Agent'] = random.choice(self.user_agent.strings)
return d
def read_responses(self):
while True:
try:
response, data, shortcode = self.response_queue.get(block=True,
timeout=0.05)
except queue.Empty:
break
self.session_count += 1
shortcode_str = base64.b16encode(shortcode).lower().decode()
try:
url = self.read_response(response, data)
except UnexpectedResult as e:
_logger.warn('Unexpected result %s', e)
if self.save_reports:
try:
self.write_report(e, shortcode_str, response, data)
except:
_logger.exception('Error writing report')
self.throttle(None, force=True)
continue
if not url:
self.add_no_url(shortcode)
self.miss_count += 1
else:
self.add_url(shortcode, url)
self.miss_count = 0
self.hit_count += 1
_logger.info('%s->%s...', shortcode_str,
url[:30] if url else '(none)')
self.throttle(response.status)
def read_response(self, response, data):
if response.getheader('Content-Encoding') == 'gzip':
_logger.debug('Got gzip data')
data = gzip.decompress(data)
if response.status == 301:
url = response.getheader('Location')
return url
elif response.status == 200:
match = re.search(br'<iframe id="[^"]+" src="([^"]+)">', data)
if not match:
raise UnexpectedResult('No iframe found')
url = match.group(1).decode()
url = html.parser.HTMLParser().unescape(url)
return url
elif response.status == 302:
location = response.getheader('Location')
# if location and 'sharedby' not in location \
# and 'visibli' not in location:
if location and location.startswith('http://yahoo.com'):
raise UnexpectedResult(
'Weird 302 redirect to {}'.format(location))
elif not location:
raise UnexpectedResult('No redirect location')
return
else:
raise UnexpectedResult('Unexpected status {}'.format(
response.status))
def throttle(self, status_code, force=False):
if force or 400 <= status_code <= 499 or 500 <= status_code <= 999 \
or self.miss_count > 2:
_logger.info('Throttle %d seconds', self.throttle_time)
time.sleep(self.throttle_time)
self.throttle_time *= 2
self.throttle_time = min(3600, self.throttle_time)
else:
self.throttle_time /= 2
self.throttle_time = min(600, self.throttle_time)
self.throttle_time = max(1, self.throttle_time)
def add_url(self, shortcode, url):
_logger.debug('Insert %s %s', shortcode, url)
self.insert_queue.add('INSERT OR IGNORE INTO visibli_hex VALUES (?, ?, ?)',
[self.shortcode_to_int(shortcode), url, None])
def add_no_url(self, shortcode):
_logger.debug('Mark no url %s', shortcode)
self.insert_queue.add('INSERT OR IGNORE INTO visibli_hex VALUES (?, ?, ?)',
[self.shortcode_to_int(shortcode), None, 1])
def get_count(self):
for row in self.db.execute('SELECT COUNT(ROWID) FROM visibli_hex '
'LIMIT 1'):
return int(row[0])
def calc_avg(self):
self.average_deque.append((self.session_count, time.time()))
try:
avg = ((self.session_count - self.average_deque[0][0])
/ (time.time() - self.average_deque[0][1]))
except ArithmeticError:
avg = 0
return avg
def check_proxy_tor(self):
http_client = http.client.HTTPConnection(self.host, self.port)
http_client.request('GET', 'http://check.torproject.org/',
headers={'Host': 'check.torproject.org'})
response = http_client.getresponse()
data = response.read()
_logger.debug('Check proxy got data=%s', data.decode())
if response.status != 200:
raise UnexpectedResult('Check tor page returned %d',
response.status)
if b'Congratulations. Your browser is configured to use Tor.' \
not in data:
raise UnexpectedResult('Not configured to use tor')
_logger.info('Using tor proxy')
def write_report(self, error, shortcode_str, response, data):
path = os.path.join(self.database_dir,
'report_{:.04f}'.format(time.time()))
_logger.debug('Writing report to %s', path)
with open(path, 'wt') as f:
f.write('Error ')
f.write(str(error))
f.write('\n')
f.write('Code ')
f.write(shortcode_str)
f.write('\n')
f.write(str(response.status))
f.write(response.reason)
f.write('\n')
f.write(str(response.getheaders()))
f.write('\n\nData\n\n')
f.write(str(data))
f.write('\n\nEnd Report\n')
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--sequential', action='store_true')
arg_parser.add_argument('--reverse-sequential', action='store_true')
arg_parser.add_argument('--save-reports', action='store_true')
arg_parser.add_argument('--average-rate', type=float, default=1.0)
arg_parser.add_argument('--quiet', action='store_true')
arg_parser.add_argument('--database-dir', default=os.getcwd())
arg_parser.add_argument('--log-dir', default=os.getcwd())
arg_parser.add_argument('--user-agent-file',
default=os.path.join(os.getcwd(), 'user-agents.txt'))
arg_parser.add_argument('--threads', type=int, default=2)
args = arg_parser.parse_args()
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
if not args.quiet:
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(
logging.Formatter('%(levelname)s %(message)s'))
root_logger.addHandler(console)
log_filename = os.path.join(args.log_dir, 'visibli_url_grab.log')
file_log = logging.handlers.RotatingFileHandler(log_filename,
maxBytes=1048576, backupCount=9)
file_log.setLevel(logging.DEBUG)
file_log.setFormatter(logging.Formatter(
'%(asctime)s %(name)s:%(lineno)d %(levelname)s %(message)s'))
root_logger.addHandler(file_log)
o = VisibliHexURLGrab(sequential=args.sequential,
reverse_sequential=args.reverse_sequential,
database_dir=args.database_dir,
avg_items_per_sec=args.average_rate,
user_agent_filename=args.user_agent_file,
http_client_threads=args.threads,
save_reports=args.save_reports,)
o.run()
| gpl-3.0 | 9,177,461,928,564,408,000 | 2,099,032,498,109,716,500 | 32.126984 | 83 | 0.56075 | false |
eiginn/coreemu | daemon/src/setup.py | 11 | 1148 | # Copyright (c)2010-2012 the Boeing Company.
# See the LICENSE file included in this distribution.
import os, glob
from distutils.core import setup, Extension
netns = Extension("netns", sources = ["netnsmodule.c", "netns.c"])
vcmd = Extension("vcmd",
sources = ["vcmdmodule.c",
"vnode_client.c",
"vnode_chnl.c",
"vnode_io.c",
"vnode_msg.c",
"vnode_cmd.c",
],
library_dirs = ["build/lib"],
libraries = ["ev"])
setup(name = "core-python-netns",
version = "1.0",
description = "Extension modules to support virtual nodes using " \
"Linux network namespaces",
ext_modules = [netns, vcmd],
url = "http://www.nrl.navy.mil/itd/ncs/products/core",
author = "Boeing Research & Technology",
author_email = "[email protected]",
license = "BSD",
long_description="Extension modules and utilities to support virtual " \
"nodes using Linux network namespaces")
| bsd-2-clause | 5,930,463,459,162,983,000 | 6,742,775,293,590,040,000 | 38.586207 | 78 | 0.533101 | false |
aavanian/bokeh | bokeh/tests/test_layouts.py | 5 | 2610 | import bokeh.layouts as lyt
import pytest
from bokeh.core.enums import SizingMode
from bokeh.plotting import figure
from bokeh.layouts import gridplot
from bokeh.models import Column, Row, Spacer
def test_gridplot_merge_tools_flat():
p1, p2, p3, p4 = figure(), figure(), figure(), figure()
lyt.gridplot([[p1, p2], [p3, p4]], merge_tools=True)
for p in p1, p2, p3, p4:
assert p.toolbar_location is None
def test_gridplot_merge_tools_with_None():
p1, p2, p3, p4 = figure(), figure(), figure(), figure()
lyt.gridplot([[p1, None, p2], [p3, p4, None]], merge_tools=True)
for p in p1, p2, p3, p4:
assert p.toolbar_location is None
def test_gridplot_merge_tools_nested():
p1, p2, p3, p4, p5, p6, p7 = figure(), figure(), figure(), figure(), figure(), figure(), figure()
r1 = lyt.row(p1, p2)
r2 = lyt.row(p3, p4)
c = lyt.column(lyt.row(p5), lyt.row(p6))
lyt.gridplot([[r1, r2], [c, p7]], merge_tools=True)
for p in p1, p2, p3, p4, p5, p6, p7:
assert p.toolbar_location is None
def test_gridplot_None():
def p():
p = figure()
p.circle([1, 2, 3], [4, 5, 6])
return p
g = gridplot([[p(), p()], [None, None], [p(), p()]])
assert isinstance(g, Column) and len(g.children) == 2
c = g.children[1]
assert isinstance(c, Column) and len(c.children) == 3
r = c.children[1]
assert isinstance(r, Row) and len(r.children) == 2
s0 = r.children[0]
assert isinstance(s0, Spacer) and s0.width == 0 and s0.height == 0
s1 = r.children[1]
assert isinstance(s1, Spacer) and s1.width == 0 and s1.height == 0
def test_layout_simple():
p1, p2, p3, p4 = figure(), figure(), figure(), figure()
grid = lyt.layout([[p1, p2], [p3, p4]], sizing_mode='fixed')
assert isinstance(grid, lyt.Column)
for row in grid.children:
assert isinstance(row, lyt.Row)
def test_layout_nested():
p1, p2, p3, p4, p5, p6 = figure(), figure(), figure(), figure(), figure(), figure()
grid = lyt.layout([[[p1, p1], [p2, p2]], [[p3, p4], [p5, p6]]], sizing_mode='fixed')
assert isinstance(grid, lyt.Column)
for row in grid.children:
assert isinstance(row, lyt.Row)
for col in row.children:
assert isinstance(col, lyt.Column)
@pytest.mark.parametrize('sizing_mode', SizingMode)
@pytest.mark.unit
def test_layout_sizing_mode(sizing_mode):
p1, p2, p3, p4 = figure(), figure(), figure(), figure()
lyt.layout([[p1, p2], [p3, p4]], sizing_mode=sizing_mode)
for p in p1, p2, p3, p4:
assert p1.sizing_mode == sizing_mode
| bsd-3-clause | -1,137,947,668,013,118,700 | -2,777,957,985,791,521,300 | 27.681319 | 101 | 0.60613 | false |
PetrDlouhy/django | tests/template_tests/filter_tests/test_date.py | 207 | 2534 | from datetime import datetime, time
from django.template.defaultfilters import date
from django.test import SimpleTestCase
from django.utils import timezone
from ..utils import setup
from .timezone_utils import TimezoneTestCase
class DateTests(TimezoneTestCase):
@setup({'date01': '{{ d|date:"m" }}'})
def test_date01(self):
output = self.engine.render_to_string('date01', {'d': datetime(2008, 1, 1)})
self.assertEqual(output, '01')
@setup({'date02': '{{ d|date }}'})
def test_date02(self):
output = self.engine.render_to_string('date02', {'d': datetime(2008, 1, 1)})
self.assertEqual(output, 'Jan. 1, 2008')
@setup({'date03': '{{ d|date:"m" }}'})
def test_date03(self):
"""
#9520: Make sure |date doesn't blow up on non-dates
"""
output = self.engine.render_to_string('date03', {'d': 'fail_string'})
self.assertEqual(output, '')
# ISO date formats
@setup({'date04': '{{ d|date:"o" }}'})
def test_date04(self):
output = self.engine.render_to_string('date04', {'d': datetime(2008, 12, 29)})
self.assertEqual(output, '2009')
@setup({'date05': '{{ d|date:"o" }}'})
def test_date05(self):
output = self.engine.render_to_string('date05', {'d': datetime(2010, 1, 3)})
self.assertEqual(output, '2009')
# Timezone name
@setup({'date06': '{{ d|date:"e" }}'})
def test_date06(self):
output = self.engine.render_to_string('date06', {'d': datetime(2009, 3, 12, tzinfo=timezone.get_fixed_timezone(30))})
self.assertEqual(output, '+0030')
@setup({'date07': '{{ d|date:"e" }}'})
def test_date07(self):
output = self.engine.render_to_string('date07', {'d': datetime(2009, 3, 12)})
self.assertEqual(output, '')
# #19370: Make sure |date doesn't blow up on a midnight time object
@setup({'date08': '{{ t|date:"H:i" }}'})
def test_date08(self):
output = self.engine.render_to_string('date08', {'t': time(0, 1)})
self.assertEqual(output, '00:01')
@setup({'date09': '{{ t|date:"H:i" }}'})
def test_date09(self):
output = self.engine.render_to_string('date09', {'t': time(0, 0)})
self.assertEqual(output, '00:00')
class FunctionTests(SimpleTestCase):
def test_date(self):
self.assertEqual(date(datetime(2005, 12, 29), "d F Y"), '29 December 2005')
def test_escape_characters(self):
self.assertEqual(date(datetime(2005, 12, 29), r'jS \o\f F'), '29th of December')
| bsd-3-clause | 5,212,996,066,239,570,000 | 2,442,378,535,675,898,000 | 34.690141 | 125 | 0.599842 | false |
icemac/pytest | testing/test_runner_xunit.py | 202 | 7133 | #
# test correct setup/teardowns at
# module, class, and instance level
def test_module_and_function_setup(testdir):
reprec = testdir.inline_runsource("""
modlevel = []
def setup_module(module):
assert not modlevel
module.modlevel.append(42)
def teardown_module(module):
modlevel.pop()
def setup_function(function):
function.answer = 17
def teardown_function(function):
del function.answer
def test_modlevel():
assert modlevel[0] == 42
assert test_modlevel.answer == 17
class TestFromClass:
def test_module(self):
assert modlevel[0] == 42
assert not hasattr(test_modlevel, 'answer')
""")
rep = reprec.matchreport("test_modlevel")
assert rep.passed
rep = reprec.matchreport("test_module")
assert rep.passed
def test_module_setup_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
l = []
def setup_module(module):
l.append(1)
0/0
def test_nothing():
pass
def teardown_module(module):
l.append(2)
""")
reprec.assertoutcome(failed=1)
calls = reprec.getcalls("pytest_runtest_setup")
assert calls[0].item.module.l == [1]
def test_setup_function_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
modlevel = []
def setup_function(function):
modlevel.append(1)
0/0
def teardown_function(module):
modlevel.append(2)
def test_func():
pass
""")
calls = reprec.getcalls("pytest_runtest_setup")
assert calls[0].item.module.modlevel == [1]
def test_class_setup(testdir):
reprec = testdir.inline_runsource("""
class TestSimpleClassSetup:
clslevel = []
def setup_class(cls):
cls.clslevel.append(23)
def teardown_class(cls):
cls.clslevel.pop()
def test_classlevel(self):
assert self.clslevel[0] == 23
class TestInheritedClassSetupStillWorks(TestSimpleClassSetup):
def test_classlevel_anothertime(self):
assert self.clslevel == [23]
def test_cleanup():
assert not TestSimpleClassSetup.clslevel
assert not TestInheritedClassSetupStillWorks.clslevel
""")
reprec.assertoutcome(passed=1+2+1)
def test_class_setup_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
class TestSimpleClassSetup:
clslevel = []
def setup_class(cls):
0/0
def teardown_class(cls):
cls.clslevel.append(1)
def test_classlevel(self):
pass
def test_cleanup():
assert not TestSimpleClassSetup.clslevel
""")
reprec.assertoutcome(failed=1, passed=1)
def test_method_setup(testdir):
reprec = testdir.inline_runsource("""
class TestSetupMethod:
def setup_method(self, meth):
self.methsetup = meth
def teardown_method(self, meth):
del self.methsetup
def test_some(self):
assert self.methsetup == self.test_some
def test_other(self):
assert self.methsetup == self.test_other
""")
reprec.assertoutcome(passed=2)
def test_method_setup_failure_no_teardown(testdir):
reprec = testdir.inline_runsource("""
class TestMethodSetup:
clslevel = []
def setup_method(self, method):
self.clslevel.append(1)
0/0
def teardown_method(self, method):
self.clslevel.append(2)
def test_method(self):
pass
def test_cleanup():
assert TestMethodSetup.clslevel == [1]
""")
reprec.assertoutcome(failed=1, passed=1)
def test_method_generator_setup(testdir):
reprec = testdir.inline_runsource("""
class TestSetupTeardownOnInstance:
def setup_class(cls):
cls.classsetup = True
def setup_method(self, method):
self.methsetup = method
def test_generate(self):
assert self.classsetup
assert self.methsetup == self.test_generate
yield self.generated, 5
yield self.generated, 2
def generated(self, value):
assert self.classsetup
assert self.methsetup == self.test_generate
assert value == 5
""")
reprec.assertoutcome(passed=1, failed=1)
def test_func_generator_setup(testdir):
reprec = testdir.inline_runsource("""
import sys
def setup_module(mod):
print ("setup_module")
mod.x = []
def setup_function(fun):
print ("setup_function")
x.append(1)
def teardown_function(fun):
print ("teardown_function")
x.pop()
def test_one():
assert x == [1]
def check():
print ("check")
sys.stderr.write("e\\n")
assert x == [1]
yield check
assert x == [1]
""")
rep = reprec.matchreport("test_one", names="pytest_runtest_logreport")
assert rep.passed
def test_method_setup_uses_fresh_instances(testdir):
reprec = testdir.inline_runsource("""
class TestSelfState1:
memory = []
def test_hello(self):
self.memory.append(self)
def test_afterhello(self):
assert self != self.memory[0]
""")
reprec.assertoutcome(passed=2, failed=0)
def test_setup_that_skips_calledagain(testdir):
p = testdir.makepyfile("""
import pytest
def setup_module(mod):
pytest.skip("x")
def test_function1():
pass
def test_function2():
pass
""")
reprec = testdir.inline_run(p)
reprec.assertoutcome(skipped=2)
def test_setup_fails_again_on_all_tests(testdir):
p = testdir.makepyfile("""
import pytest
def setup_module(mod):
raise ValueError(42)
def test_function1():
pass
def test_function2():
pass
""")
reprec = testdir.inline_run(p)
reprec.assertoutcome(failed=2)
def test_setup_funcarg_setup_when_outer_scope_fails(testdir):
p = testdir.makepyfile("""
import pytest
def setup_module(mod):
raise ValueError(42)
def pytest_funcarg__hello(request):
raise ValueError("xyz43")
def test_function1(hello):
pass
def test_function2(hello):
pass
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*function1*",
"*ValueError*42*",
"*function2*",
"*ValueError*42*",
"*2 error*"
])
assert "xyz43" not in result.stdout.str()
| mit | -6,234,697,200,986,044,000 | 2,594,397,341,750,756,400 | 27.305556 | 74 | 0.554045 | false |
davidl1/hortonworks-extension | build/contrib/hod/testing/testTypes.py | 182 | 7386 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import unittest, os, sys, re, threading, time
myDirectory = os.path.realpath(sys.argv[0])
rootDirectory = re.sub("/testing/.*", "", myDirectory)
sys.path.append(rootDirectory)
from testing.lib import BaseTestSuite
excludes = ['']
import tempfile, shutil, getpass, random
from hodlib.Common.types import typeValidator
# All test-case classes should have the naming convention test_.*
class test_typeValidator(unittest.TestCase):
def setUp(self):
self.originalDir = os.getcwd()
self.validator = typeValidator(self.originalDir)
self.tempDir = tempfile.mkdtemp(dir='/tmp/hod-%s' % getpass.getuser(),
prefix='test_Types_typeValidator_tempDir')
self.tempFile = tempfile.NamedTemporaryFile(dir=self.tempDir)
# verification : error strings
self.errorStringsForVerify = {
'pos_int' : 0,
'uri' : '%s is an invalid uri',
'directory' : 0,
'file' : 0,
}
# verification : valid vals
self.verifyValidVals = [
('pos_int', 0),
('pos_int', 1),
('directory', self.tempDir),
('directory', '/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempDir)),
('file', self.tempFile.name),
('file', '/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempFile.name)),
('uri', 'file://localhost/' + self.tempDir),
('uri', 'file:///' + self.tempDir),
('uri', 'file:///tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempDir)),
('uri', 'file://localhost/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempDir)),
('uri', 'http://hadoop.apache.org/core/'),
('uri', self.tempDir),
('uri', '/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempDir)),
]
# generate an invalid uri
randomNum = random.random()
while os.path.exists('/%s' % randomNum):
# Just to be sure :)
randomNum = random.random()
invalidUri = 'file://localhost/%s' % randomNum
# verification : invalid vals
self.verifyInvalidVals = [
('pos_int', -1),
('uri', invalidUri),
('directory', self.tempFile.name),
('file', self.tempDir),
]
# normalization : vals
self.normalizeVals = [
('pos_int', 1, 1),
('pos_int', '1', 1),
('directory', self.tempDir, self.tempDir),
('directory', '/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempDir),
self.tempDir),
('file', self.tempFile.name, self.tempFile.name),
('file', '/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempFile.name),
self.tempFile.name),
('uri', 'file://localhost' + self.tempDir,
'file://' + self.tempDir),
('uri', 'file://127.0.0.1' + self.tempDir,
'file://' + self.tempDir),
('uri', 'http://hadoop.apache.org/core',
'http://hadoop.apache.org/core'),
('uri', self.tempDir, self.tempDir),
('uri', '/tmp/hod-%s/../../%s' % \
(getpass.getuser(), self.tempDir),
self.tempDir),
]
pass
# All testMethods have to have their names start with 'test'
def testnormalize(self):
for (type, originalVal, normalizedVal) in self.normalizeVals:
# print type, originalVal, normalizedVal,\
# self.validator.normalize(type, originalVal)
assert(self.validator.normalize(type, originalVal) == normalizedVal)
pass
def test__normalize(self):
# Special test for functionality of private method __normalizedPath
tmpdir = tempfile.mkdtemp(dir=self.originalDir) #create in self.originalDir
oldWd = os.getcwd()
os.chdir('/')
tmpdirName = re.sub(".*/","",tmpdir)
# print re.sub(".*/","",tmpdirName)
# print os.path.join(self.originalDir,tmpdir)
(type, originalVal, normalizedVal) = \
('file', tmpdirName, \
os.path.join(self.originalDir,tmpdirName))
assert(self.validator.normalize(type, originalVal) == normalizedVal)
os.chdir(oldWd)
os.rmdir(tmpdir)
pass
def testverify(self):
# test verify method
# test valid vals
for (type,value) in self.verifyValidVals:
valueInfo = { 'isValid' : 0, 'normalized' : 0, 'errorData' : 0 }
valueInfo = self.validator.verify(type,value)
# print type, value, valueInfo
assert(valueInfo['isValid'] == 1)
# test invalid vals
for (type,value) in self.verifyInvalidVals:
valueInfo = { 'isValid' : 0, 'normalized' : 0, 'errorData' : 0 }
valueInfo = self.validator.verify(type,value)
# print type, value, valueInfo
assert(valueInfo['isValid'] == 0)
if valueInfo['errorData'] != 0:
# if there is any errorData, check
assert(valueInfo['errorData'] == \
self.errorStringsForVerify[type] % value)
pass
def tearDown(self):
self.tempFile.close()
if os.path.exists(self.tempDir):
shutil.rmtree(self.tempDir)
pass
class TypesTestSuite(BaseTestSuite):
def __init__(self):
# suite setup
BaseTestSuite.__init__(self, __name__, excludes)
pass
def cleanUp(self):
# suite tearDown
pass
def RunTypesTests():
# modulename_suite
suite = TypesTestSuite()
testResult = suite.runTests()
suite.cleanUp()
return testResult
if __name__ == "__main__":
RunTypesTests()
| apache-2.0 | -7,131,840,934,524,163,000 | 2,063,480,678,289,027,000 | 40.033333 | 79 | 0.508259 | false |
v-iam/azure-sdk-for-python | azure-batch/azure/batch/models/certificate_list_options.py | 3 | 2161 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CertificateListOptions(Model):
"""Additional parameters for the Certificate_list operation.
:param filter: An OData $filter clause.
:type filter: str
:param select: An OData $select clause.
:type select: str
:param max_results: The maximum number of items to return in the response.
A maximum of 1000 certificates can be returned. Default value: 1000 .
:type max_results: int
:param timeout: The maximum time that the server can spend processing the
request, in seconds. The default is 30 seconds. Default value: 30 .
:type timeout: int
:param client_request_id: The caller-generated request identity, in the
form of a GUID with no decoration such as curly braces, e.g.
9C4D50EE-2D56-4CD3-8152-34347DC9F2B0.
:type client_request_id: str
:param return_client_request_id: Whether the server should return the
client-request-id in the response. Default value: False .
:type return_client_request_id: bool
:param ocp_date: The time the request was issued. Client libraries
typically set this to the current system clock time; set it explicitly if
you are calling the REST API directly.
:type ocp_date: datetime
"""
def __init__(self, filter=None, select=None, max_results=1000, timeout=30, client_request_id=None, return_client_request_id=False, ocp_date=None):
self.filter = filter
self.select = select
self.max_results = max_results
self.timeout = timeout
self.client_request_id = client_request_id
self.return_client_request_id = return_client_request_id
self.ocp_date = ocp_date
| mit | -4,396,591,807,901,538,000 | -7,103,876,687,449,595,000 | 44.020833 | 150 | 0.665433 | false |
jsaponara/opentaxforms | opentaxforms/ut.py | 1 | 14660 | from __future__ import print_function
import logging
import os
import pkg_resources
import re
import six
import sys
from collections import (
namedtuple as ntuple,
defaultdict as ddict,
OrderedDict as odict)
from datetime import datetime
from os.path import join as pathjoin, exists
from pint import UnitRegistry
from pprint import pprint as pp, pformat as pf
from subprocess import Popen, PIPE
from sys import stdout, exc_info
try:
from cPickle import dump, load
except ImportError:
from pickle import dump, load
NL = '\n'
TAB = '\t'
quiet = False
Bbox = ntuple('Bbox', 'x0 y0 x1 y1')
def merge(bb1, bb2):
return Bbox(
min(bb1.x0, bb2.x0),
min(bb1.y0, bb2.y0),
max(bb1.x1, bb2.x1),
max(bb1.y1, bb2.y1))
def numerify(s):
try:
return int(''.join(d for d in s if d.isdigit()))
except ValueError:
return s
def compactify(multilineRegex):
# to avoid having to replace spaces in multilineRegex's with less readable
# '\s' etc no re.VERBOSE flag needed
r"""
line too long (folded):
titlePttn1=re.compile(r'(?:(\d\d\d\d) )?Form ([\w-]+(?: \w\w?)?)
(?: or ([\w-]+))?(?: ?\(?(?:Schedule ([\w-]+))\)?)?
(?: ?\((?:Rev|Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)
.+?\))?\s*$')
re.VERBOSE with spaces removed (else theyll be ignored in VERBOSE mode):
pttn=re.compile(
r'''(?:(\d\d\d\d)\s)? # 2016
Form\s([\w-]+ # Form 1040
(?:\s\w\w?)?) # AS
(?:\sor\s([\w-]+))? # or 1040A
(?:\s\s?\(?(?:Schedule\s([\w-]+))\)?)? # (Schedule B)
(?:\s\s?\((?:Rev|Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec).+?\))?\s*$''',re.VERBOSE)
using compactify:
>>> anyMonth = 'Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec'
>>> compactify(
... '''(?:(\d\d\d\d) )? # 2016
... Form ([\w-]+ # Form 1040
... (?: \w\w?)?) # AS
... (?: or ([\w-]+))? # or 1040A
... (?: ?\(?(?:Schedule ([\w-]+))\)?)? # (Schedule B)
... (?: ?\((?:Rev|'''+anyMonth+''').+?\))?\s*$''')
'(?:(\\d\\d\\d\\d) )?Form ([\\w-]+(?: \\w\\w?)?)(?: or ([\\w-]+))?'
'(?: ?\\(?(?:Schedule ([\\w-]+))\\)?)?'
'(?: ?\\('
'(?:Rev|Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec).+?\\))?'
'\\s*$'
# todo what should compactify return for these?
# [but note this entire docstring is raw]
#>>> compactify(r'\ # comment')
#>>> compactify(r'\\ # comment')
#>>> compactify( '\ # comment')
#>>> compactify( '\\ # comment')
#print len(multilineRegex),
'[%s%s]'%(multilineRegex[0],multilineRegex[1])
"""
def crunch(seg):
return re.sub(' *#.*$', '', seg.lstrip())
segs = multilineRegex.split(NL)
return ''.join(crunch(seg) for seg in segs)
class NoSuchPickle(Exception):
pass
class PickleException(Exception):
pass
def pickle(data, pickleFilePrefix):
picklname = '%s.pickl' % (pickleFilePrefix)
with open(picklname, 'wb') as pickl:
dump(data, pickl)
def unpickle(pickleFilePrefix, default=None):
picklname = '%s.pickl' % (pickleFilePrefix)
try:
with open(picklname, 'rb') as pickl:
data = load(pickl)
except IOError as e:
clas, exc, tb = exc_info()
if e.errno == 2: # no such file
if default == 'raise':
raise NoSuchPickle(NoSuchPickle(exc.args)).with_traceback(tb)
else:
data = default
else:
raise PickleException(PickleException(exc.args)).with_traceback(tb)
return data
def flattened(l):
# only works for single level of sublists
return [i for sublist in l for i in sublist]
def hasdups(l, key=None):
if key is None:
ll = l
else:
ll = [key(it) for it in l]
return any(it in ll[1 + i:] for i, it in enumerate(ll))
def uniqify(l):
'''uniqify in place'''
s = set()
idxs = [] # indexes of duplicate items
for i, item in enumerate(l):
if item in s:
idxs.append(i)
else:
s.add(item)
for i in reversed(idxs):
l.pop(i)
return l
def uniqify2(l):
'''uniqify in place; probably faster for small lists'''
for i, item in enumerate(reversed(l)):
if item in l[:i - 1]:
l.pop(i)
return l
log = logging.getLogger()
defaultLoglevel = 'WARN'
alreadySetupLogging = False
def setupLogging(loggerId, args=None):
global alreadySetupLogging
if alreadySetupLogging:
log.warn('ignoring extra call to setupLogging')
fname = log.name
else:
if args:
loglevel = args.loglevel.upper()
else:
loglevel = defaultLoglevel
loglevel = getattr(logging, loglevel)
if not isinstance(loglevel, int):
allowedLogLevels = 'debug info warn warning error critical exception'
raise ValueError('Invalid log level: %s, allowedLogLevels are %s' % (
args.loglevel, allowedLogLevels))
fname = loggerId + '.log'
filehandler=logging.FileHandler(fname, mode='w', encoding='utf-8')
filehandler.setLevel(loglevel)
log.setLevel(loglevel)
log.addHandler(filehandler)
alreadySetupLogging = True
return fname
def unsetupLogging():
global alreadySetupLogging
alreadySetupLogging=False
log.handlers = []
defaultOutput = stdout
def logg(msg, outputs=None):
'''
log=setupLogging('test')
logg('just testing',[stdout,log.warn])
'''
if outputs is None:
outputs = [defaultOutput]
for o in outputs:
m = msg
if o == stdout:
o = stdout.write
m = msg + '\n'
if quiet and o == stdout.write:
continue
o(m)
def jj(*args, **kw):
'''
jj is a more flexible join(), handy for debug output
>>> jj(330,'info',None)
'330 info None'
'''
delim = kw.get('delim', ' ')
try:
return delim.join(str(x) for x in args)
except Exception:
return delim.join(six.text_type(x) for x in args)
def jdb(*args, **kw):
logg(jj(*args, **kw), [log.debug])
def run0(cmd):
try:
# shell is handy for executable path, etc
proc = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
out, err = proc.communicate()
except OSError as exc:
err = str(exc)
out = None
return out, err
def run(cmd, logprefix='run', loglevel='INFO'):
loglevel = getattr(logging, loglevel.upper(), None)
out, err = run0(cmd)
out, err = out.strip(), err.strip()
msg = '%s: command [%s] returned error [%s] and output [%s]' % (
logprefix, cmd, err, out)
if err:
log.error(msg)
raise Exception(msg)
else:
log.log(loglevel, msg)
return out, err
class Resource(object):
def __init__(self, pkgname, fpath=None):
self.pkgname = pkgname
self.fpath = fpath
def path(self):
return pkg_resources.resource_filename(self.pkgname, self.fpath)
def content(self):
return pkg_resources.resource_string(self.pkgname, self.fpath)
class CharEnum(object):
# unlike a real enum, no order guarantee the simplest one from this url:
# http://stackoverflow.com/questions/2676133/
@classmethod
def keys(cls):
return [k for k in cls.__dict__ if not k.startswith('_')]
@classmethod
def vals(cls):
return [cls.__dict__[k] for k in cls.keys()]
@classmethod
def items(cls):
return zip(cls.keys(), cls.vals())
class ChainablyUpdatableOrderedDict(odict):
'''
handy for ordered initialization
>>> d=ChainablyUpdatableOrderedDict()(a=0)(b=1)(c=2)
>>> assert d.keys()==['a','b','c']
'''
def __init__(self):
super(ChainablyUpdatableOrderedDict, self).__init__()
def __call__(self, **kw):
self.update(kw)
return self
class Bag(object):
# after alexMartelli at http://stackoverflow.com/questions/2597278
def __init__(self, *maps, **kw):
'''
>>> b=Bag(a=0)
>>> b.a=1
>>> b.b=0
>>> c=Bag(b)
'''
for mapp in maps:
getdict = None
if type(mapp) == dict:
getdict = lambda x: x
# def getdict(x): return x
elif type(mapp) == Bag:
getdict = lambda x: x.__dict__
# def getdict(x): return x.__dict__
elif type(mapp) == tuple:
mapp, getdict = mapp
if getdict is not None:
self.__dict__.update(getdict(mapp))
else:
mapp, getitems = self._getGetitems(mapp)
for k, v in getitems(mapp):
self.__dict__[k] = v
self.__dict__.update(kw)
def _getGetitems(self, mapp):
if type(mapp) == tuple:
mapp, getitems = mapp
else:
getitems = lambda m: m.items()
# def getitems(m): return m.items()
return mapp, getitems
def __getitem__(self, key):
return self.__dict__[key]
def __setitem__(self, key, val):
self.__dict__[key] = val
def __len__(self):
return len(self.__dict__)
def __call__(self, *keys):
'''slicing interface
gimmicky but useful, and doesnt pollute key namespace
>>> b=Bag(a=1,b=2)
>>> assert b('a','b')==(1,2)
'''
return tuple(self.__dict__[k] for k in keys)
def clear(self):
self.__dict__={}
def update(self, *maps):
'''
>>> b=Bag(a=1,b=2)
>>> b.update(Bag(a=1,b=1,c=0))
Bag({'a': 1, 'b': 1, 'c': 0})
'''
for mapp in maps:
mapp, getitems = self._getGetitems(mapp)
for k, v in getitems(mapp):
self.__dict__[k] = v
return self
def __add__(self, *maps):
self.__iadd__(*maps)
return self
def __iadd__(self, *maps):
'''
>>> b=Bag(a=1,b=2)
>>> b+=Bag(a=1,b=1,c=0)
>>> assert b('a','b','c')==(2,3,0)
>>> b=Bag(a='1',b='2')
>>> b+=Bag(a='1',b='1',c='0')
>>> assert b('a','b','c')==('11','21','0')
'''
# todo error for empty maps[0]
zero = type(list(maps[0].values())[0])()
for mapp in maps:
mapp, getitems = self._getGetitems(mapp)
for k, v in getitems(mapp):
self.__dict__.setdefault(k, zero)
self.__dict__[k] += v
return self
def __iter__(self):
return self.iterkeys()
def iterkeys(self):
return iter(self.__dict__.keys())
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def iteritems(self):
return self.__dict__.iteritems()
def get(self, key, dflt=None):
return self.__dict__.get(key, dflt)
def __str__(self):
return 'Bag(' + pf(self.__dict__) + ')'
def __repr__(self):
return self.__str__()
ureg = UnitRegistry()
# interactive use: from pint import UnitRegistry as ureg; ur=ureg();
# qq=ur.Quantity
qq = ureg.Quantity
def notequalpatch(self, o):
return not self.__eq__(o)
setattr(qq, '__ne__', notequalpatch)
assert qq(1, 'mm') == qq(1, 'mm')
assert not qq(1, 'mm') != qq(1, 'mm')
class Qnty(qq):
@classmethod
def fromstring(cls, s):
'''
>>> Qnty.fromstring('25.4mm')
<Quantity(25.4, 'millimeter')>
'''
if ' ' in s:
qnty, unit = s.split()
else:
m = re.match(r'([\d\.\-]+)(\w+)', s)
if m:
qnty, unit = m.groups()
else:
raise Exception('unsupported Qnty format [%s]' % (s))
if '.' in qnty:
qnty = float(qnty)
else:
qnty = int(qnty)
unit = {
'pt': 'printers_point',
'in': 'inch',
}.get(unit, unit)
return Qnty(qnty, unit)
def __hash__(self):
return hash(repr(self))
def playQnty():
# pagewidth=Qnty(page.cropbox[2]-page.cropbox[0],'printers_point')
a = Qnty.fromstring('2in')
b = Qnty.fromstring('1in')
print(Qnty(a - b, 'printers_point'))
print(Qnty.fromstring('72pt'))
# cumColWidths=[sum(columnWidths[0:i],Qnty(0,columnWidths[0].units)) for i
# in range(len(columnWidths))]
print(Qnty(0, a.units))
# maxh=max([Qnty.fromstring(c.attrib.get('h',c.attrib.get('minH'))) for c
# in cells])
print(max(a, b))
s = set()
s.update([a, b])
assert len(s) == 1
def nth(n):
'''
>>> nth(2)
'2nd'
>>> nth(21)
'21st'
>>> nth('22')
'22nd'
>>> nth(23)
'23rd'
>>> nth(24)
'24th'
>>> nth(12)
'12th'
'''
n = str(n)
suffix = 'th'
if n[-1] == '1' and n[-2:] != '11':
suffix = 'st'
elif n[-1] == '2' and n[-2:] != '12':
suffix = 'nd'
elif n[-1] == '3' and n[-2:] != '13':
suffix = 'rd'
return n + suffix
def skip(s, substr):
'''
>>> skip('0123456789','45')
'6789'
'''
idx = s.index(substr)
return s[idx + len(substr):]
def until(s, substr):
'''
>>> until('0123456789','45')
'0123'
'''
try:
idx = s.index(substr)
return s[:idx]
except ValueError:
return s
def ensure_dir(folder):
'''ensure that directory exists'''
if not exists(folder):
os.makedirs(folder)
def now(format=None):
dt = datetime.now()
if format is None:
return dt.isoformat()
return dt.strftime(format)
def readImgSize(fname, dirName):
from PIL import Image
with open(pathjoin(dirName,fname), 'rb') as fh:
img = Image.open(fh)
imgw, imgh = img.size
return imgw, imgh
def asciiOnly(s):
if s:
s=''.join(c for c in s if ord(c)<127)
return s
if __name__ == "__main__":
args = sys.argv[1:]
if any('T' in arg for arg in args):
verbose = any('v' in arg for arg in args)
import doctest
doctest.testmod(verbose=verbose)
| agpl-3.0 | 6,152,638,138,313,396,000 | -8,237,851,368,268,299,000 | 25.178571 | 107 | 0.512005 | false |
SnabbCo/neutron | neutron/openstack/common/rpc/impl_zmq.py | 6 | 26443 | # Copyright 2011 Cloudscaling Group, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import pprint
import re
import socket
import sys
import types
import uuid
import eventlet
import greenlet
from oslo.config import cfg
import six
from six import moves
from neutron.openstack.common import excutils
from neutron.openstack.common.gettextutils import _, _LE, _LI
from neutron.openstack.common import importutils
from neutron.openstack.common import jsonutils
from neutron.openstack.common.rpc import common as rpc_common
zmq = importutils.try_import('eventlet.green.zmq')
# for convenience, are not modified.
pformat = pprint.pformat
Timeout = eventlet.timeout.Timeout
LOG = rpc_common.LOG
RemoteError = rpc_common.RemoteError
RPCException = rpc_common.RPCException
zmq_opts = [
cfg.StrOpt('rpc_zmq_bind_address', default='*',
help='ZeroMQ bind address. Should be a wildcard (*), '
'an ethernet interface, or IP. '
'The "host" option should point or resolve to this '
'address.'),
# The module.Class to use for matchmaking.
cfg.StrOpt(
'rpc_zmq_matchmaker',
default=('neutron.openstack.common.rpc.'
'matchmaker.MatchMakerLocalhost'),
help='MatchMaker driver',
),
# The following port is unassigned by IANA as of 2012-05-21
cfg.IntOpt('rpc_zmq_port', default=9501,
help='ZeroMQ receiver listening port'),
cfg.IntOpt('rpc_zmq_contexts', default=1,
help='Number of ZeroMQ contexts, defaults to 1'),
cfg.IntOpt('rpc_zmq_topic_backlog', default=None,
help='Maximum number of ingress messages to locally buffer '
'per topic. Default is unlimited.'),
cfg.StrOpt('rpc_zmq_ipc_dir', default='/var/run/openstack',
help='Directory for holding IPC sockets'),
cfg.StrOpt('rpc_zmq_host', default=socket.gethostname(),
help='Name of this node. Must be a valid hostname, FQDN, or '
'IP address. Must match "host" option, if running Nova.')
]
CONF = cfg.CONF
CONF.register_opts(zmq_opts)
ZMQ_CTX = None # ZeroMQ Context, must be global.
matchmaker = None # memorized matchmaker object
def _serialize(data):
"""Serialization wrapper.
We prefer using JSON, but it cannot encode all types.
Error if a developer passes us bad data.
"""
try:
return jsonutils.dumps(data, ensure_ascii=True)
except TypeError:
with excutils.save_and_reraise_exception():
LOG.error(_LE("JSON serialization failed."))
def _deserialize(data):
"""Deserialization wrapper."""
LOG.debug("Deserializing: %s", data)
return jsonutils.loads(data)
class ZmqSocket(object):
"""A tiny wrapper around ZeroMQ.
Simplifies the send/recv protocol and connection management.
Can be used as a Context (supports the 'with' statement).
"""
def __init__(self, addr, zmq_type, bind=True, subscribe=None):
self.sock = _get_ctxt().socket(zmq_type)
self.addr = addr
self.type = zmq_type
self.subscriptions = []
# Support failures on sending/receiving on wrong socket type.
self.can_recv = zmq_type in (zmq.PULL, zmq.SUB)
self.can_send = zmq_type in (zmq.PUSH, zmq.PUB)
self.can_sub = zmq_type in (zmq.SUB, )
# Support list, str, & None for subscribe arg (cast to list)
do_sub = {
list: subscribe,
str: [subscribe],
type(None): []
}[type(subscribe)]
for f in do_sub:
self.subscribe(f)
str_data = {'addr': addr, 'type': self.socket_s(),
'subscribe': subscribe, 'bind': bind}
LOG.debug("Connecting to %(addr)s with %(type)s", str_data)
LOG.debug("-> Subscribed to %(subscribe)s", str_data)
LOG.debug("-> bind: %(bind)s", str_data)
try:
if bind:
self.sock.bind(addr)
else:
self.sock.connect(addr)
except Exception:
raise RPCException(_("Could not open socket."))
def socket_s(self):
"""Get socket type as string."""
t_enum = ('PUSH', 'PULL', 'PUB', 'SUB', 'REP', 'REQ', 'ROUTER',
'DEALER')
return dict(map(lambda t: (getattr(zmq, t), t), t_enum))[self.type]
def subscribe(self, msg_filter):
"""Subscribe."""
if not self.can_sub:
raise RPCException("Cannot subscribe on this socket.")
LOG.debug("Subscribing to %s", msg_filter)
try:
self.sock.setsockopt(zmq.SUBSCRIBE, msg_filter)
except Exception:
return
self.subscriptions.append(msg_filter)
def unsubscribe(self, msg_filter):
"""Unsubscribe."""
if msg_filter not in self.subscriptions:
return
self.sock.setsockopt(zmq.UNSUBSCRIBE, msg_filter)
self.subscriptions.remove(msg_filter)
def close(self):
if self.sock is None or self.sock.closed:
return
# We must unsubscribe, or we'll leak descriptors.
if self.subscriptions:
for f in self.subscriptions:
try:
self.sock.setsockopt(zmq.UNSUBSCRIBE, f)
except Exception:
pass
self.subscriptions = []
try:
# Default is to linger
self.sock.close()
except Exception:
# While this is a bad thing to happen,
# it would be much worse if some of the code calling this
# were to fail. For now, lets log, and later evaluate
# if we can safely raise here.
LOG.error(_LE("ZeroMQ socket could not be closed."))
self.sock = None
def recv(self, **kwargs):
if not self.can_recv:
raise RPCException(_("You cannot recv on this socket."))
return self.sock.recv_multipart(**kwargs)
def send(self, data, **kwargs):
if not self.can_send:
raise RPCException(_("You cannot send on this socket."))
self.sock.send_multipart(data, **kwargs)
class ZmqClient(object):
"""Client for ZMQ sockets."""
def __init__(self, addr):
self.outq = ZmqSocket(addr, zmq.PUSH, bind=False)
def cast(self, msg_id, topic, data, envelope):
msg_id = msg_id or 0
if not envelope:
self.outq.send(map(bytes,
(msg_id, topic, 'cast', _serialize(data))))
return
rpc_envelope = rpc_common.serialize_msg(data[1], envelope)
zmq_msg = moves.reduce(lambda x, y: x + y, rpc_envelope.items())
self.outq.send(map(bytes,
(msg_id, topic, 'impl_zmq_v2', data[0]) + zmq_msg))
def close(self):
self.outq.close()
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.replies = []
super(RpcContext, self).__init__(**kwargs)
def deepcopy(self):
values = self.to_dict()
values['replies'] = self.replies
return self.__class__(**values)
def reply(self, reply=None, failure=None, ending=False):
if ending:
return
self.replies.append(reply)
@classmethod
def marshal(self, ctx):
ctx_data = ctx.to_dict()
return _serialize(ctx_data)
@classmethod
def unmarshal(self, data):
return RpcContext.from_dict(_deserialize(data))
class InternalContext(object):
"""Used by ConsumerBase as a private context for - methods."""
def __init__(self, proxy):
self.proxy = proxy
self.msg_waiter = None
def _get_response(self, ctx, proxy, topic, data):
"""Process a curried message and cast the result to topic."""
LOG.debug("Running func with context: %s", ctx.to_dict())
data.setdefault('version', None)
data.setdefault('args', {})
try:
result = proxy.dispatch(
ctx, data['version'], data['method'],
data.get('namespace'), **data['args'])
return ConsumerBase.normalize_reply(result, ctx.replies)
except greenlet.GreenletExit:
# ignore these since they are just from shutdowns
pass
except rpc_common.ClientException as e:
LOG.debug("Expected exception during message handling (%s)" %
e._exc_info[1])
return {'exc':
rpc_common.serialize_remote_exception(e._exc_info,
log_failure=False)}
except Exception:
LOG.error(_LE("Exception during message handling"))
return {'exc':
rpc_common.serialize_remote_exception(sys.exc_info())}
def reply(self, ctx, proxy,
msg_id=None, context=None, topic=None, msg=None):
"""Reply to a casted call."""
# NOTE(ewindisch): context kwarg exists for Grizzly compat.
# this may be able to be removed earlier than
# 'I' if ConsumerBase.process were refactored.
if type(msg) is list:
payload = msg[-1]
else:
payload = msg
response = ConsumerBase.normalize_reply(
self._get_response(ctx, proxy, topic, payload),
ctx.replies)
LOG.debug("Sending reply")
_multi_send(_cast, ctx, topic, {
'method': '-process_reply',
'args': {
'msg_id': msg_id, # Include for Folsom compat.
'response': response
}
}, _msg_id=msg_id)
class ConsumerBase(object):
"""Base Consumer."""
def __init__(self):
self.private_ctx = InternalContext(None)
@classmethod
def normalize_reply(self, result, replies):
#TODO(ewindisch): re-evaluate and document this method.
if isinstance(result, types.GeneratorType):
return list(result)
elif replies:
return replies
else:
return [result]
def process(self, proxy, ctx, data):
data.setdefault('version', None)
data.setdefault('args', {})
# Method starting with - are
# processed internally. (non-valid method name)
method = data.get('method')
if not method:
LOG.error(_LE("RPC message did not include method."))
return
# Internal method
# uses internal context for safety.
if method == '-reply':
self.private_ctx.reply(ctx, proxy, **data['args'])
return
proxy.dispatch(ctx, data['version'],
data['method'], data.get('namespace'), **data['args'])
class ZmqBaseReactor(ConsumerBase):
"""A consumer class implementing a centralized casting broker (PULL-PUSH).
Used for RoundRobin requests.
"""
def __init__(self, conf):
super(ZmqBaseReactor, self).__init__()
self.proxies = {}
self.threads = []
self.sockets = []
self.subscribe = {}
self.pool = eventlet.greenpool.GreenPool(conf.rpc_thread_pool_size)
def register(self, proxy, in_addr, zmq_type_in,
in_bind=True, subscribe=None):
LOG.info(_LI("Registering reactor"))
if zmq_type_in not in (zmq.PULL, zmq.SUB):
raise RPCException("Bad input socktype")
# Items push in.
inq = ZmqSocket(in_addr, zmq_type_in, bind=in_bind,
subscribe=subscribe)
self.proxies[inq] = proxy
self.sockets.append(inq)
LOG.info(_LI("In reactor registered"))
def consume_in_thread(self):
@excutils.forever_retry_uncaught_exceptions
def _consume(sock):
LOG.info(_LI("Consuming socket"))
while True:
self.consume(sock)
for k in self.proxies.keys():
self.threads.append(
self.pool.spawn(_consume, k)
)
def wait(self):
for t in self.threads:
t.wait()
def close(self):
for s in self.sockets:
s.close()
for t in self.threads:
t.kill()
class ZmqProxy(ZmqBaseReactor):
"""A consumer class implementing a topic-based proxy.
Forwards to IPC sockets.
"""
def __init__(self, conf):
super(ZmqProxy, self).__init__(conf)
pathsep = set((os.path.sep or '', os.path.altsep or '', '/', '\\'))
self.badchars = re.compile(r'[%s]' % re.escape(''.join(pathsep)))
self.topic_proxy = {}
def consume(self, sock):
ipc_dir = CONF.rpc_zmq_ipc_dir
data = sock.recv(copy=False)
topic = data[1].bytes
if topic.startswith('fanout~'):
sock_type = zmq.PUB
topic = topic.split('.', 1)[0]
elif topic.startswith('zmq_replies'):
sock_type = zmq.PUB
else:
sock_type = zmq.PUSH
if topic not in self.topic_proxy:
def publisher(waiter):
LOG.info(_LI("Creating proxy for topic: %s"), topic)
try:
# The topic is received over the network,
# don't trust this input.
if self.badchars.search(topic) is not None:
emsg = _("Topic contained dangerous characters.")
LOG.warn(emsg)
raise RPCException(emsg)
out_sock = ZmqSocket("ipc://%s/zmq_topic_%s" %
(ipc_dir, topic),
sock_type, bind=True)
except RPCException:
waiter.send_exception(*sys.exc_info())
return
self.topic_proxy[topic] = eventlet.queue.LightQueue(
CONF.rpc_zmq_topic_backlog)
self.sockets.append(out_sock)
# It takes some time for a pub socket to open,
# before we can have any faith in doing a send() to it.
if sock_type == zmq.PUB:
eventlet.sleep(.5)
waiter.send(True)
while(True):
data = self.topic_proxy[topic].get()
out_sock.send(data, copy=False)
wait_sock_creation = eventlet.event.Event()
eventlet.spawn(publisher, wait_sock_creation)
try:
wait_sock_creation.wait()
except RPCException:
LOG.error(_LE("Topic socket file creation failed."))
return
try:
self.topic_proxy[topic].put_nowait(data)
except eventlet.queue.Full:
LOG.error(_LE("Local per-topic backlog buffer full for topic "
"%(topic)s. Dropping message.") % {'topic': topic})
def consume_in_thread(self):
"""Runs the ZmqProxy service."""
ipc_dir = CONF.rpc_zmq_ipc_dir
consume_in = "tcp://%s:%s" % \
(CONF.rpc_zmq_bind_address,
CONF.rpc_zmq_port)
consumption_proxy = InternalContext(None)
try:
os.makedirs(ipc_dir)
except os.error:
if not os.path.isdir(ipc_dir):
with excutils.save_and_reraise_exception():
LOG.error(_LE("Required IPC directory does not exist at"
" %s") % (ipc_dir, ))
try:
self.register(consumption_proxy,
consume_in,
zmq.PULL)
except zmq.ZMQError:
if os.access(ipc_dir, os.X_OK):
with excutils.save_and_reraise_exception():
LOG.error(_LE("Permission denied to IPC directory at"
" %s") % (ipc_dir, ))
with excutils.save_and_reraise_exception():
LOG.error(_LE("Could not create ZeroMQ receiver daemon. "
"Socket may already be in use."))
super(ZmqProxy, self).consume_in_thread()
def unflatten_envelope(packenv):
"""Unflattens the RPC envelope.
Takes a list and returns a dictionary.
i.e. [1,2,3,4] => {1: 2, 3: 4}
"""
i = iter(packenv)
h = {}
try:
while True:
k = six.next(i)
h[k] = six.next(i)
except StopIteration:
return h
class ZmqReactor(ZmqBaseReactor):
"""A consumer class implementing a consumer for messages.
Can also be used as a 1:1 proxy
"""
def __init__(self, conf):
super(ZmqReactor, self).__init__(conf)
def consume(self, sock):
#TODO(ewindisch): use zero-copy (i.e. references, not copying)
data = sock.recv()
LOG.debug("CONSUMER RECEIVED DATA: %s", data)
proxy = self.proxies[sock]
if data[2] == 'cast': # Legacy protocol
packenv = data[3]
ctx, msg = _deserialize(packenv)
request = rpc_common.deserialize_msg(msg)
ctx = RpcContext.unmarshal(ctx)
elif data[2] == 'impl_zmq_v2':
packenv = data[4:]
msg = unflatten_envelope(packenv)
request = rpc_common.deserialize_msg(msg)
# Unmarshal only after verifying the message.
ctx = RpcContext.unmarshal(data[3])
else:
LOG.error(_LE("ZMQ Envelope version unsupported or unknown."))
return
self.pool.spawn_n(self.process, proxy, ctx, request)
class Connection(rpc_common.Connection):
"""Manages connections and threads."""
def __init__(self, conf):
self.topics = []
self.reactor = ZmqReactor(conf)
def create_consumer(self, topic, proxy, fanout=False):
# Register with matchmaker.
_get_matchmaker().register(topic, CONF.rpc_zmq_host)
# Subscription scenarios
if fanout:
sock_type = zmq.SUB
subscribe = ('', fanout)[type(fanout) == str]
topic = 'fanout~' + topic.split('.', 1)[0]
else:
sock_type = zmq.PULL
subscribe = None
topic = '.'.join((topic.split('.', 1)[0], CONF.rpc_zmq_host))
if topic in self.topics:
LOG.info(_LI("Skipping topic registration. Already registered."))
return
# Receive messages from (local) proxy
inaddr = "ipc://%s/zmq_topic_%s" % \
(CONF.rpc_zmq_ipc_dir, topic)
LOG.debug("Consumer is a zmq.%s",
['PULL', 'SUB'][sock_type == zmq.SUB])
self.reactor.register(proxy, inaddr, sock_type,
subscribe=subscribe, in_bind=False)
self.topics.append(topic)
def close(self):
_get_matchmaker().stop_heartbeat()
for topic in self.topics:
_get_matchmaker().unregister(topic, CONF.rpc_zmq_host)
self.reactor.close()
self.topics = []
def wait(self):
self.reactor.wait()
def consume_in_thread(self):
_get_matchmaker().start_heartbeat()
self.reactor.consume_in_thread()
def _cast(addr, context, topic, msg, timeout=None, envelope=False,
_msg_id=None):
timeout_cast = timeout or CONF.rpc_cast_timeout
payload = [RpcContext.marshal(context), msg]
with Timeout(timeout_cast, exception=rpc_common.Timeout):
try:
conn = ZmqClient(addr)
# assumes cast can't return an exception
conn.cast(_msg_id, topic, payload, envelope)
except zmq.ZMQError:
raise RPCException("Cast failed. ZMQ Socket Exception")
finally:
if 'conn' in vars():
conn.close()
def _call(addr, context, topic, msg, timeout=None,
envelope=False):
# timeout_response is how long we wait for a response
timeout = timeout or CONF.rpc_response_timeout
# The msg_id is used to track replies.
msg_id = uuid.uuid4().hex
# Replies always come into the reply service.
reply_topic = "zmq_replies.%s" % CONF.rpc_zmq_host
LOG.debug("Creating payload")
# Curry the original request into a reply method.
mcontext = RpcContext.marshal(context)
payload = {
'method': '-reply',
'args': {
'msg_id': msg_id,
'topic': reply_topic,
# TODO(ewindisch): safe to remove mcontext in I.
'msg': [mcontext, msg]
}
}
LOG.debug("Creating queue socket for reply waiter")
# Messages arriving async.
# TODO(ewindisch): have reply consumer with dynamic subscription mgmt
with Timeout(timeout, exception=rpc_common.Timeout):
try:
msg_waiter = ZmqSocket(
"ipc://%s/zmq_topic_zmq_replies.%s" %
(CONF.rpc_zmq_ipc_dir,
CONF.rpc_zmq_host),
zmq.SUB, subscribe=msg_id, bind=False
)
LOG.debug("Sending cast")
_cast(addr, context, topic, payload, envelope)
LOG.debug("Cast sent; Waiting reply")
# Blocks until receives reply
msg = msg_waiter.recv()
LOG.debug("Received message: %s", msg)
LOG.debug("Unpacking response")
if msg[2] == 'cast': # Legacy version
raw_msg = _deserialize(msg[-1])[-1]
elif msg[2] == 'impl_zmq_v2':
rpc_envelope = unflatten_envelope(msg[4:])
raw_msg = rpc_common.deserialize_msg(rpc_envelope)
else:
raise rpc_common.UnsupportedRpcEnvelopeVersion(
_("Unsupported or unknown ZMQ envelope returned."))
responses = raw_msg['args']['response']
# ZMQError trumps the Timeout error.
except zmq.ZMQError:
raise RPCException("ZMQ Socket Error")
except (IndexError, KeyError):
raise RPCException(_("RPC Message Invalid."))
finally:
if 'msg_waiter' in vars():
msg_waiter.close()
# It seems we don't need to do all of the following,
# but perhaps it would be useful for multicall?
# One effect of this is that we're checking all
# responses for Exceptions.
for resp in responses:
if isinstance(resp, types.DictType) and 'exc' in resp:
raise rpc_common.deserialize_remote_exception(CONF, resp['exc'])
return responses[-1]
def _multi_send(method, context, topic, msg, timeout=None,
envelope=False, _msg_id=None):
"""Wraps the sending of messages.
Dispatches to the matchmaker and sends message to all relevant hosts.
"""
conf = CONF
LOG.debug("%(msg)s" % {'msg': ' '.join(map(pformat, (topic, msg)))})
queues = _get_matchmaker().queues(topic)
LOG.debug("Sending message(s) to: %s", queues)
# Don't stack if we have no matchmaker results
if not queues:
LOG.warn(_("No matchmaker results. Not casting."))
# While not strictly a timeout, callers know how to handle
# this exception and a timeout isn't too big a lie.
raise rpc_common.Timeout(_("No match from matchmaker."))
# This supports brokerless fanout (addresses > 1)
for queue in queues:
(_topic, ip_addr) = queue
_addr = "tcp://%s:%s" % (ip_addr, conf.rpc_zmq_port)
if method.__name__ == '_cast':
eventlet.spawn_n(method, _addr, context,
_topic, msg, timeout, envelope,
_msg_id)
return
return method(_addr, context, _topic, msg, timeout,
envelope)
def create_connection(conf, new=True):
return Connection(conf)
def multicall(conf, *args, **kwargs):
"""Multiple calls."""
return _multi_send(_call, *args, **kwargs)
def call(conf, *args, **kwargs):
"""Send a message, expect a response."""
data = _multi_send(_call, *args, **kwargs)
return data[-1]
def cast(conf, *args, **kwargs):
"""Send a message expecting no reply."""
_multi_send(_cast, *args, **kwargs)
def fanout_cast(conf, context, topic, msg, **kwargs):
"""Send a message to all listening and expect no reply."""
# NOTE(ewindisch): fanout~ is used because it avoid splitting on .
# and acts as a non-subtle hint to the matchmaker and ZmqProxy.
_multi_send(_cast, context, 'fanout~' + str(topic), msg, **kwargs)
def notify(conf, context, topic, msg, envelope):
"""Send notification event.
Notifications are sent to topic-priority.
This differs from the AMQP drivers which send to topic.priority.
"""
# NOTE(ewindisch): dot-priority in rpc notifier does not
# work with our assumptions.
topic = topic.replace('.', '-')
cast(conf, context, topic, msg, envelope=envelope)
def cleanup():
"""Clean up resources in use by implementation."""
global ZMQ_CTX
if ZMQ_CTX:
ZMQ_CTX.term()
ZMQ_CTX = None
global matchmaker
matchmaker = None
def _get_ctxt():
if not zmq:
raise ImportError("Failed to import eventlet.green.zmq")
global ZMQ_CTX
if not ZMQ_CTX:
ZMQ_CTX = zmq.Context(CONF.rpc_zmq_contexts)
return ZMQ_CTX
def _get_matchmaker(*args, **kwargs):
global matchmaker
if not matchmaker:
mm = CONF.rpc_zmq_matchmaker
if mm.endswith('matchmaker.MatchMakerRing'):
mm.replace('matchmaker', 'matchmaker_ring')
LOG.warn(_('rpc_zmq_matchmaker = %(orig)s is deprecated; use'
' %(new)s instead') % dict(
orig=CONF.rpc_zmq_matchmaker, new=mm))
matchmaker = importutils.import_object(mm, *args, **kwargs)
return matchmaker
| apache-2.0 | -5,405,163,915,048,306,000 | 4,849,124,275,688,035,000 | 31.326406 | 78 | 0.567901 | false |
l0b0/cds-invenio-vengmark | modules/bibharvest/lib/oai_repository_admin.py | 4 | 30974 | ## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""CDS Invenio OAI Repository Administrator Interface."""
__revision__ = "$Id$"
import cgi
import os
from invenio.config import \
CFG_SITE_LANG, \
CFG_TMPDIR, \
CFG_SITE_URL
import invenio.access_control_engine as access_manager
from invenio.urlutils import create_html_link
from invenio.dbquery import run_sql
from invenio.oai_repository_updater import parse_set_definition
from invenio.messages import gettext_set_language
import invenio.template
bibharvest_templates = invenio.template.load('bibharvest')
tmppath = CFG_TMPDIR + '/oairepositoryadmin.' + str(os.getpid())
guideurl = "help/admin/oai-admin-guide"
oai_rep_admin_url = CFG_SITE_URL + \
"/admin/bibharvest/oairepositoryadmin.py"
def getnavtrail(previous = '', ln = CFG_SITE_LANG):
"""Get navtrail"""
return bibharvest_templates.tmpl_getnavtrail(previous = previous, ln = ln)
def perform_request_index(ln=CFG_SITE_LANG):
"""OAI Repository admin index"""
out = '''<p>Define below the sets to expose through the OAI harvesting
protocol. <br /> You will have to run the
<a href="%(siteurl)s/help/admin/oai-admin-guide?ln=%(ln)s#3.2"><code>oairepositoryupdater</code></a>
utility to apply the settings you have defined here.</p>''' % {'siteurl': CFG_SITE_URL,
'ln': ln}
titlebar = bibharvest_templates.tmpl_draw_titlebar(ln = ln,
title = "OAI repository",
guideurl = guideurl,
extraname = "add new OAI set",
extraurl = "admin/bibharvest/oairepositoryadmin.py/addset")
header = ['id', 'setSpec',
'setName', 'collection',
'p1', 'f1', 'm1', 'op1',
'p2', 'f2', 'm2', 'op2',
'p3', 'f3', 'm3', '', '']
oai_set = get_oai_set()
sets = []
for (id, setSpec, setName, setCollection, \
setDescription, p1, f1, m1, p2, f2, m2, \
p3, f3, m3, op1, op2) in oai_set:
del_request = '<a href="' + CFG_SITE_URL + "/" + \
"admin/bibharvest/oairepositoryadmin.py/delset?ln=" + \
ln + "&oai_set_id=" + str(id) + '">delete</a>'
edit_request = '<a href="' + CFG_SITE_URL + "/" + \
"admin/bibharvest/oairepositoryadmin.py/editset?ln=" + \
ln + "&oai_set_id=" + str(id) + '">edit</a>'
sets.append([id, cgi.escape(setSpec), cgi.escape(setName),
cgi.escape(setCollection),
cgi.escape(p1), f1, m1, op1,
cgi.escape(p2), f2, m2, op2,
cgi.escape(p3), f3, m3,
del_request, edit_request])
add_request = '<a href="' + CFG_SITE_URL + "/" + \
"admin/bibharvest/oairepositoryadmin.py/addset?ln=" + \
ln + '">Add new OAI set definition</a>'
sets.append(['', add_request, '', '', '', '', '',
'', '', '', '', '', '', '', '', '', ''])
out += transform_tuple(header=header, tuple=sets)
out += "<br /><br />"
return out
def perform_request_addset(oai_set_name='', oai_set_spec='',
oai_set_collection='',
oai_set_description='',
oai_set_definition='', oai_set_reclist='',
oai_set_p1='', oai_set_f1='',oai_set_m1='',
oai_set_p2='', oai_set_f2='',
oai_set_m2='', oai_set_p3='',
oai_set_f3='', oai_set_m3='',
oai_set_op1='a', oai_set_op2='a',
ln=CFG_SITE_LANG, func=0):
"""add a new OAI set"""
_ = gettext_set_language(ln)
out = ""
if func in ["0", 0]:
text = input_form(oai_set_name, oai_set_spec,
oai_set_collection, oai_set_description,
oai_set_definition, oai_set_reclist,
oai_set_p1, oai_set_f1,oai_set_m1,
oai_set_p2, oai_set_f2,oai_set_m2,
oai_set_p3, oai_set_f3, oai_set_m3,
oai_set_op1, oai_set_op2, ln=ln)
out = createform(action="addset",
text=text,
ln=ln,
button="Add new OAI set definition line",
func=1)
lnargs = [["ln", ln]]
if func in ["1", 1]:
out += "<br />"
res = add_oai_set(oai_set_name, oai_set_spec,
oai_set_collection, oai_set_description,
oai_set_definition, oai_set_reclist,
oai_set_p1, oai_set_f1, oai_set_m1,
oai_set_p2, oai_set_f2, oai_set_m2,
oai_set_p3, oai_set_f3, oai_set_m3,
oai_set_op1, oai_set_op2)
if res[0] == 1:
out += bibharvest_templates.tmpl_print_info(ln,
"OAI set definition %s added." % \
cgi.escape(oai_set_name))
out += "<br />"
out += "<br /><br />"
out += create_html_link(urlbase=oai_rep_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Return to main selection"))
return nice_box("", out)
def perform_request_editset(oai_set_id=None, oai_set_name='',
oai_set_spec='', oai_set_collection='',
oai_set_description='',
oai_set_definition='', oai_set_reclist='',
oai_set_p1='', oai_set_f1='',
oai_set_m1='', oai_set_p2='',
oai_set_f2='', oai_set_m2='',
oai_set_p3='', oai_set_f3='',
oai_set_m3='', oai_set_op1='a',
oai_set_op2='a', ln=CFG_SITE_LANG,
func=0):
"""creates html form to edit an OAI set."""
_ = gettext_set_language(ln)
if oai_set_id is None:
return "No OAI set ID selected."
out = ""
if func in [0, "0"]:
oai_set = get_oai_set(oai_set_id)
if not oai_set:
return "ERROR: oai_set_id %s seems invalid" % oai_set_id
oai_set_spec = oai_set[0][1]
oai_set_name = oai_set[0][2]
oai_set_collection = oai_set[0][3]
oai_set_description = oai_set[0][4]
oai_set_definition = ''
oai_set_reclist = ''
oai_set_p1 = oai_set[0][5]
oai_set_f1 = oai_set[0][6]
oai_set_m1 = oai_set[0][7]
oai_set_p2 = oai_set[0][8]
oai_set_f2 = oai_set[0][9]
oai_set_m2 = oai_set[0][10]
oai_set_p3 = oai_set[0][11]
oai_set_f3 = oai_set[0][12]
oai_set_m3 = oai_set[0][13]
oai_set_op1 = oai_set[0][14]
oai_set_op2 = oai_set[0][15]
text = input_form(oai_set_name,
oai_set_spec,
oai_set_collection,
oai_set_description,
oai_set_definition,
oai_set_reclist,
oai_set_p1,
oai_set_f1,
oai_set_m1,
oai_set_p2,
oai_set_f2,
oai_set_m2,
oai_set_p3,
oai_set_f3,
oai_set_m3,
oai_set_op1,
oai_set_op2,
ln=ln)
out += extended_input_form(action="editset",
text=text,
button="Modify",
oai_set_id=oai_set_id,
ln=ln,
func=1)
if func in [1, "1"]:
res = modify_oai_set(oai_set_id,
oai_set_name,
oai_set_spec,
oai_set_collection,
oai_set_description,
oai_set_p1,
oai_set_f1,
oai_set_m1,
oai_set_p2,
oai_set_f2,
oai_set_m2,
oai_set_p3,
oai_set_f3,
oai_set_m3,
oai_set_op1,
oai_set_op2)
out += "<br />"
if res[0] == 1:
out += bibharvest_templates.tmpl_print_info(ln,
"OAI set definition #%s edited." % oai_set_id)
out += "<br />"
else:
out += bibharvest_templates.tmpl_print_warning(ln,
"A problem was encountered: <br/>" + cgi.escape(res[1]))
out += "<br />"
out += "<br />"
out += create_html_link(urlbase=oai_rep_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Return to main selection"))
return nice_box("", out)
def perform_request_delset(oai_set_id=None, ln=CFG_SITE_LANG,
callback='yes', func=0):
"""creates html form to delete an OAI set"""
_ = gettext_set_language(ln)
out = ""
if oai_set_id:
oai_set = get_oai_set(oai_set_id)
if not oai_set:
return "ERROR: oai_set_id %s seems invalid" % oai_set_id
nameset = (oai_set[0][1])
pagetitle = """Delete OAI set: %s""" % cgi.escape(nameset)
if func in ["0", 0]:
oai_set = get_oai_set(oai_set_id)
oai_set_spec = oai_set[0][1]
oai_set_name = oai_set[0][2]
oai_set_collection = oai_set[0][3]
oai_set_description = oai_set[0][4]
oai_set_definition = ''
oai_set_reclist = ''
oai_set_p1 = oai_set[0][5]
oai_set_f1 = oai_set[0][6]
oai_set_m1 = oai_set[0][7]
oai_set_p2 = oai_set[0][8]
oai_set_f2 = oai_set[0][9]
oai_set_m2 = oai_set[0][10]
oai_set_p3 = oai_set[0][11]
oai_set_f3 = oai_set[0][12]
oai_set_m3 = oai_set[0][13]
oai_set_op1 = oai_set[0][14]
oai_set_op2 = oai_set[0][15]
if oai_set:
question = """Do you want to delete the OAI definition #%s?""" % oai_set_id
text = bibharvest_templates.tmpl_print_info(ln, question)
text += "<br /><br /><br />"
text += pagebody_text(
cgi.escape("%s-%s-%s-%s-%s-%s-%s-%s-%s-%s-%s-%s-%s-%s" % \
(oai_set_spec,
oai_set_name,
oai_set_collection,
oai_set_p1,
oai_set_f1,
oai_set_m1,
oai_set_op1,
oai_set_p2,
oai_set_f2,
oai_set_m2,
oai_set_op2,
oai_set_p3,
oai_set_f3,
oai_set_m3)))
out += createform(action="delset",
text=text,
button="Delete",
oai_set_id=oai_set_id,
func=1)
else:
return bibharvest_templates.tmpl_print_info(ln, "OAI set does not exist.")
elif func in ["1", 1]:
res = delete_oai_set(oai_set_id)
if res[0] == 1:
out += bibharvest_templates.tmpl_print_info(ln, "OAI set definition #%s deleted." % oai_set_id)
out += "<br />"
else:
pass
out += "<br /><br />"
out += create_html_link(urlbase=oai_rep_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Return to main selection"))
return nice_box("", out)
def get_oai_set(id=''):
"""Returns a row parameters for a given id"""
sets = []
sql = "SELECT id, setSpec, setName, setCollection, setDescription, p1,f1,m1, p2,f2,m2, p3,f3,m3, setDefinition FROM oaiREPOSITORY"
try:
if id:
sql += " WHERE id=%s" % id
sql += " ORDER BY setSpec asc"
res = run_sql(sql)
for row in res:
set = ['']*16
set[0] = row[0]
set[1] = row[1]
set[2] = row[2]
params = parse_set_definition(row[14])
set[3] = params.get('c', '')
set[5] = params.get('p1', '')
set[6] = params.get('f1', '')
set[7] = params.get('m1', '')
set[8] = params.get('p2', '')
set[9] = params.get('f2', '')
set[10] = params.get('m2', '')
set[11] = params.get('p3', '')
set[12] = params.get('f3', '')
set[13] = params.get('m3', '')
set[14] = params.get('op1', 'a')
set[15] = params.get('op2', 'a')
sets.append(set)
return sets
except StandardError, e:
return str(e)
def modify_oai_set(oai_set_id, oai_set_name, oai_set_spec,
oai_set_collection, oai_set_description,
oai_set_p1, oai_set_f1,oai_set_m1, oai_set_p2,
oai_set_f2, oai_set_m2, oai_set_p3, oai_set_f3,
oai_set_m3, oai_set_op1, oai_set_op2):
"""Modifies a row's parameters"""
try:
set_definition = 'c=' + oai_set_collection + ';' + \
'p1=' + oai_set_p1 + ';' + \
'f1=' + oai_set_f1 + ';' + \
'm1=' + oai_set_m1 + ';' + \
'op1='+ oai_set_op1 + ';' + \
'p2=' + oai_set_p2 + ';' + \
'f2=' + oai_set_f2 + ';' + \
'm2=' + oai_set_m2 + ';' + \
'op2='+ oai_set_op2 + ';' + \
'p3=' + oai_set_p3 + ';' + \
'f3=' + oai_set_f3 + ';' + \
'm3=' + oai_set_m3 + ';'
res = run_sql("""UPDATE oaiREPOSITORY SET
setName=%s,
setSpec=%s,
setCollection=%s,
setDescription=%s,
setDefinition=%s,
p1=%s,
f1=%s,
m1=%s,
p2=%s,
f2=%s,
m2=%s,
p3=%s,
f3=%s,
m3=%s
WHERE id=%s""",
(oai_set_name,
oai_set_spec,
oai_set_collection,
oai_set_description,
set_definition,
oai_set_p1,
oai_set_f1,
oai_set_m1,
oai_set_p2,
oai_set_f2,
oai_set_m2,
oai_set_p3,
oai_set_f3,
oai_set_m3,
oai_set_id))
return (1, "")
except StandardError, e:
return (0, str(e))
def add_oai_set(oai_set_name, oai_set_spec, oai_set_collection,
oai_set_description, oai_set_definition,
oai_set_reclist, oai_set_p1, oai_set_f1,oai_set_m1,
oai_set_p2, oai_set_f2,oai_set_m2, oai_set_p3,
oai_set_f3, oai_set_m3, oai_set_op1, oai_set_op2):
"""Add a definition into the OAI Repository"""
try:
set_definition = 'c=' + oai_set_collection + ';' + \
'p1=' + oai_set_p1 + ';' + \
'f1=' + oai_set_f1 + ';' + \
'm1=' + oai_set_m1 + ';' + \
'op1='+ oai_set_op1 + ';' + \
'p2=' + oai_set_p2 + ';' + \
'f2=' + oai_set_f2 + ';' + \
'm2=' + oai_set_m2 + ';' + \
'op2='+ oai_set_op2 + ';' + \
'p3=' + oai_set_p3 + ';' + \
'f3=' + oai_set_f3 + ';' + \
'm3=' + oai_set_m3 + ';'
res = run_sql("""INSERT INTO oaiREPOSITORY (id, setName, setSpec,
setCollection, setDescription, setDefinition,
setRecList, p1, f1, m1, p2, f2, m2, p3, f3, m3)
VALUES (0, %s, %s, %s, %s, %s, NULL, %s, %s, %s,
%s, %s, %s, %s, %s, %s)""",
(oai_set_name, oai_set_spec, oai_set_collection,
oai_set_description, set_definition, oai_set_p1,
oai_set_f1, oai_set_m1, oai_set_p2, oai_set_f2,
oai_set_m2, oai_set_p3, oai_set_f3, oai_set_m3))
return (1, "")
except StandardError, e:
return (0, e)
def delete_oai_set(oai_set_id):
""""""
try:
res = run_sql("DELETE FROM oaiREPOSITORY WHERE id=%s" % oai_set_id)
return (1, "")
except StandardError, e:
return (0, e)
def drop_down_menu(boxname, content):
"""
Returns the code of a drop down menu.
Parameters:
boxname - *str* name of the input form
content - *list(tuple3)* the content of the list. List of items
as tuple3 with:
- *str* value of the item
- *bool* if item is selected of not
- *str* label of the item (displayed value)
"""
text = "<select name=\"%s\">" % boxname
for (value, selectedflag, txt) in content:
text += "<option value=\""
text += "%s\"" % value
if selectedflag:
text += ' selected="selected"'
text += ">%s</option>" % txt
text += "</select>"
return text
def create_drop_down_menu_content(sql):
"""
Create the content to be used in the drop_down_menu(..) function
from an SQL statement
"""
content = []
res = run_sql(sql)
for item in res:
tmp_list = []
tmp_list.append(item)
tmp_list.append("")
tmp_list.append(item)
content.append(tmp_list)
return content
def createform(action="", text="", button="func", cnfrm='', **hidden):
""""""
out = '<form action="%s" method="post">\n' % (action, )
out += text
if cnfrm:
out += ' <input type="checkbox" name="func" value="1"/>'
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
out += ' <input type="hidden" name="%s" value="%s"/>\n' % (key, value)
else:
out += ' <input type="hidden" name="%s" value="%s"/>\n' % (key, hidden[key])
out += ' <input class="adminbutton" type="submit" value="%s"/>\n' % (button, )
out += '</form>\n'
return out
def input_text(ln, title, name, value):
""""""
if name is None:
name = ""
if value is None:
value = ""
text = """<table><tr><td width="100%%"><span class="adminlabel">%s</span></td>""" % title
text += """<td align="left">
<input class="admin_w200" type="text" name="%s" value="%s" />
</td></tr></table>""" % \
(cgi.escape(name, 1), cgi.escape(value, 1))
return text
def pagebody_text(title):
""""""
text = """<span class="admintd">%s</span>""" % title
return text
def bar_text(title):
""""""
text = """<span class="adminlabel">%s</span>""" % title
return text
def input_form(oai_set_name, oai_set_spec, oai_set_collection,
oai_set_description, oai_set_definition,
oai_set_reclist, oai_set_p1, oai_set_f1,oai_set_m1,
oai_set_p2, oai_set_f2,oai_set_m2, oai_set_p3,
oai_set_f3, oai_set_m3, oai_set_op1, oai_set_op2,
ln=CFG_SITE_LANG):
"""returns the standard settings form"""
modes = {
'r' : 'Regular Expression',
'a' : 'All of the words',
'y' : 'Any of the words',
'e' : 'Exact phrase',
'p' : 'Partial phrase'
}
mode_dropdown = [['r', '', modes['r']],
['e', '', modes['e']],
['p', '', modes['p']],
['a', '', modes['a']],
['y', '', modes['y']],
['', '', '']]
operators = {
'a' : 'AND',
'o' : 'OR',
'n' : 'AND NOT',
}
mode_operators_1 = [['a', '', operators['a']],
['o', '', operators['o']],
['n', '', operators['n']],
['a', '', '']]
mode_operators_2 = [['a', '', operators['a']],
['o', '', operators['o']],
['n', '', operators['n']],
['a', '', '']]
text = "<br />"
text += "<table><tr><td>"
text += input_text(ln = ln, title = "OAI Set spec:",
name = "oai_set_spec", value = oai_set_spec)
text += '</td><td colspan="3"><small><small><em>Optional: leave blank if not needed</em> [<a href="http://www.openarchives.org/OAI/openarchivesprotocol.html#Set" target="_blank">?</a>]</small></small>'
text += "</td></tr><tr><td>"
text += input_text(ln = ln,
title = "OAI Set name:",
name = "oai_set_name", value = oai_set_name)
text += '</td><td colspan="3"><small><small><em>Optional: leave blank if not needed</em> [<a href="http://www.openarchives.org/OAI/openarchivesprotocol.html#Set" target="_blank">?</a>]</small></small>'
text += "</td></tr><tr><td> </td></tr><tr><td>"
text += '</td></tr><tr><td colspan="4">Choose below the search query that defines the records that belong to this set:</td></tr><tr><td>'
text += "</td></tr><tr><td> </td></tr><tr><td>"
# text += input_text(ln = ln, title = "OAI Set description", name = "oai_set_description", value = oai_set_description)
#text += "</td><td colspan=2>"
#menu = create_drop_down_menu_content("SELECT distinct(name) from collection")
#menu.append(['','',''])
#if (oai_set_collection):
# menu.append([oai_set_collection,'selected',oai_set_collection])
#else:
# menu.append(['','selected','Collection'])
text += input_text(ln = ln, title = "Collection(s):",
name="oai_set_collection",
value=oai_set_collection)
#text += drop_down_menu("oai_set_collection", menu)
text += '</td><td colspan="3"><small><small>Eg:</small> <code>Published Articles, Preprints, Theses</code><br/><small><em>(collections <b>identifiers</b>, not collections names/translations).</em></small></small></td></tr><tr><td>'
text += input_text(ln = ln, title = "Phrase:", name =
"oai_set_p1", value = oai_set_p1)
text += "</td><td>"
fields = create_drop_down_menu_content("SELECT distinct(code) from field")
fields.append(['', '', ''])
if (oai_set_f1):
fields.append([oai_set_f1, 'selected', oai_set_f1])
else:
fields.append(['', 'selected', 'Field'])
if (oai_set_m1):
mode_dropdown_m1 = [[oai_set_m1, 'selected', modes[oai_set_m1]]]
else:
mode_dropdown_m1 = [['', 'selected', 'Mode']]
text += drop_down_menu("oai_set_f1", fields)
text += "</td><td>"
text += drop_down_menu("oai_set_m1", mode_dropdown + mode_dropdown_m1)
text += "</td><td>"
if (oai_set_op1):
mode_operators_1.append([oai_set_op1, 'selected', operators[oai_set_op1]])
else:
mode_operators_1.append(['', 'selected', 'Operators'])
text += drop_down_menu("oai_set_op1", mode_operators_1)
text += "</td></tr><tr><td>"
text += input_text(ln = ln, title = "Phrase:", name = "oai_set_p2", value = oai_set_p2)
text += "</td><td>"
fields = create_drop_down_menu_content("SELECT distinct(code) from field")
fields.append(['', '', ''])
if (oai_set_f2):
fields.append([oai_set_f2, 'selected', oai_set_f2])
else:
fields.append(['', 'selected', 'Field'])
if (oai_set_m2):
mode_dropdown_m2 = [[oai_set_m2, 'selected', modes[oai_set_m2]]]
else:
mode_dropdown_m2 = [['', 'selected', 'Mode']]
text += drop_down_menu("oai_set_f2", fields)
text += "</td><td>"
text += drop_down_menu("oai_set_m2", mode_dropdown + mode_dropdown_m2)
text += "</td><td>"
if (oai_set_op2):
mode_operators_2.append([oai_set_op2, 'selected', operators[oai_set_op2]])
else:
mode_operators_2.append(['', 'selected', 'Operators'])
text += drop_down_menu("oai_set_op2", mode_operators_2)
text += "</td></tr><tr><td>"
text += input_text(ln = ln, title = "Phrase:", name = "oai_set_p3", value = oai_set_p3)
text += "</td><td>"
fields = create_drop_down_menu_content("SELECT distinct(code) from field")
fields.append(['', '', ''])
if (oai_set_f3):
fields.append([oai_set_f3, 'selected', oai_set_f3])
else:
fields.append(['', 'selected', 'Field'])
if (oai_set_m3):
mode_dropdown_m3 = [[oai_set_m3, 'selected', modes[oai_set_m3]]]
else:
mode_dropdown_m3 = [['', 'selected', 'Mode']]
text += drop_down_menu("oai_set_f3", fields)
text += "</td><td>"
text += drop_down_menu("oai_set_m3", mode_dropdown + mode_dropdown_m3)
text += "</td></tr></table>"
return text
def check_user(req, role, adminarea=2, authorized=0):
""""""
(auth_code, auth_message) = access_manager.acc_authorize_action(req, role)
if not authorized and auth_code != 0:
return ("false", auth_message)
return ("", auth_message)
def transform_tuple(header, tuple, start='', end='', extracolumn=''):
""""""
align = []
try:
firstrow = tuple[0]
if type(firstrow) in [int, long]:
align = ['admintdright']
elif type(firstrow) in [str, dict]:
align = ['admintdleft']
else:
for item in firstrow:
if type(item) is int:
align.append('admintdright')
else:
align.append('admintdleft')
except IndexError:
firstrow = []
tblstr = ''
for h in header:
tblstr += ' <th class="adminheader">%s</th>\n' % (h, )
if tblstr: tblstr = ' <tr>\n%s\n </tr>\n' % (tblstr, )
tblstr = start + '<table class="admin_wvar_nomargin">\n' + tblstr
try:
extra = '<tr>'
if type(firstrow) not in [int, long, str, dict]:
for i in range(len(firstrow)): extra += '<td class="%s">%s</td>\n' % (align[i], firstrow[i])
else:
extra += ' <td class="%s">%s</td>\n' % (align[0], firstrow)
#extra += '<td rowspan="%s" style="vertical-align: top">\n%s\n</td>\n</tr>\n' % (len(tuple), extracolumn)
extra += '</tr>\n'
except IndexError:
extra = ''
tblstr += extra
j = 1
for row in tuple[1:]:
style = ''
if j % 2:
style = ' style="background-color: rgb(235, 247, 255);"'
j += 1
tblstr += ' <tr%s>\n' % style
if type(row) not in [int, long, str, dict]:
for i in range(len(row)): tblstr += '<td class="%s" style="padding:5px 10px;">%s</td>\n' % (align[i], row[i])
else:
tblstr += ' <td class="%s" style="padding:5px 10px;">%s</td>\n' % (align[0], row)
tblstr += ' </tr> \n'
tblstr += '</table> \n '
tblstr += end
return tblstr
def nice_box(header='', content='', cls="admin_wvar"):
"""
Embed the content into a box with given header
Parameters:
header - *str* header of the box
datalist - *str* the content of the box
cls - *str* the class of the box
"""
out = '''
<table class="%s" width="95%%">
<thead>
<tr>
<th class="adminheaderleft" colspan="1">%s</th>
</tr>
</thead>
<tbody>
<tr>
<td style="vertical-align: top; margin-top: 5px; width: 100%%;">
%s
</td>
</tr>
</tbody>
</table>
''' % (cls, header, content)
return out
def extended_input_form(action="", text="", button="func", cnfrm='',
**hidden):
""""""
out = '<form action="%s" method="post">\n' % (action, )
out += '<table>\n<tr><td style="vertical-align: top">'
out += text
if cnfrm:
out += ' <input type="checkbox" name="func" value="1"/>'
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
out += ' <input type="hidden" name="%s" value="%s"/>\n' % (key, value)
else:
out += ' <input type="hidden" name="%s" value="%s"/>\n' % (key, hidden[key])
out += '</td><td style="vertical-align: bottom">'
out += ' <input class="adminbutton" type="submit" value="%s"/>\n' % (button, )
out += '</td></tr></table>'
out += '</form>\n'
return out
| gpl-2.0 | -2,565,258,604,585,858,000 | 2,682,983,186,729,138,000 | 36.958333 | 235 | 0.449635 | false |
sbailey/redrock | py/redrock/fitz.py | 1 | 7113 | """
redrock.fitz
============
Functions for fitting minima of chi^2 results.
"""
from __future__ import absolute_import, division, print_function
import numpy as np
import scipy.constants
import scipy.special
from . import constants
from .rebin import rebin_template
from .zscan import calc_zchi2_one, spectral_data
from .zwarning import ZWarningMask as ZW
from .utils import transmission_Lyman
def get_dv(z, zref):
"""Returns velocity difference in km/s for two redshifts
Args:
z (float): redshift for comparison.
zref (float): reference redshift.
Returns:
(float): the velocity difference.
"""
c = (scipy.constants.speed_of_light/1000.) #- km/s
dv = c * (z - zref) / (1.0 + zref)
return dv
def find_minima(x):
"""Return indices of local minima of x, including edges.
The indices are sorted small to large.
Note:
this is somewhat conservative in the case of repeated values:
find_minima([1,1,1,2,2,2]) -> [0,1,2,4,5]
Args:
x (array-like): The data array.
Returns:
(array): The indices.
"""
x = np.asarray(x)
ii = np.where(np.r_[True, x[1:]<=x[:-1]] & np.r_[x[:-1]<=x[1:], True])[0]
jj = np.argsort(x[ii])
return ii[jj]
def minfit(x, y):
"""Fits y = y0 + ((x-x0)/xerr)**2
See redrock.zwarning.ZWarningMask.BAD_MINFIT for zwarn failure flags
Args:
x (array): x values.
y (array): y values.
Returns:
(tuple): (x0, xerr, y0, zwarn) where zwarn=0 is good fit.
"""
if len(x) < 3:
return (-1,-1,-1,ZW.BAD_MINFIT)
try:
#- y = a x^2 + b x + c
a,b,c = np.polyfit(x,y,2)
except np.linalg.LinAlgError:
return (-1,-1,-1,ZW.BAD_MINFIT)
if a == 0.0:
return (-1,-1,-1,ZW.BAD_MINFIT)
#- recast as y = y0 + ((x-x0)/xerr)^2
x0 = -b / (2*a)
y0 = -(b**2) / (4*a) + c
zwarn = 0
if (x0 <= np.min(x)) or (np.max(x) <= x0):
zwarn |= ZW.BAD_MINFIT
if (y0<=0.):
zwarn |= ZW.BAD_MINFIT
if a > 0.0:
xerr = 1 / np.sqrt(a)
else:
xerr = 1 / np.sqrt(-a)
zwarn |= ZW.BAD_MINFIT
return (x0, xerr, y0, zwarn)
def fitz(zchi2, redshifts, spectra, template, nminima=3, archetype=None):
"""Refines redshift measurement around up to nminima minima.
TODO:
if there are fewer than nminima minima, consider padding.
Args:
zchi2 (array): chi^2 values for each redshift.
redshifts (array): the redshift values.
spectra (list): list of Spectrum objects at different wavelengths
grids.
template (Template): the template for this fit.
nminima (int): the number of minima to consider.
Returns:
Table: the fit parameters for the minima.
"""
assert len(zchi2) == len(redshifts)
nbasis = template.nbasis
# Build dictionary of wavelength grids
dwave = { s.wavehash:s.wave for s in spectra }
if not archetype is None:
# TODO: set this as a parameter
deg_legendre = 3
wave = np.concatenate([ w for w in dwave.values() ])
wave_min = wave.min()
wave_max = wave.max()
legendre = { hs:np.array([scipy.special.legendre(i)( (w-wave_min)/(wave_max-wave_min)*2.-1. ) for i in range(deg_legendre)]) for hs, w in dwave.items() }
(weights, flux, wflux) = spectral_data(spectra)
results = list()
for imin in find_minima(zchi2):
if len(results) == nminima:
break
#- Skip this minimum if it is within constants.max_velo_diff km/s of a
# previous one dv is in km/s
zprev = np.array([tmp['z'] for tmp in results])
dv = get_dv(z=redshifts[imin],zref=zprev)
if np.any(np.abs(dv) < constants.max_velo_diff):
continue
#- Sample more finely around the minimum
ilo = max(0, imin-1)
ihi = min(imin+1, len(zchi2)-1)
zz = np.linspace(redshifts[ilo], redshifts[ihi], 15)
nz = len(zz)
zzchi2 = np.zeros(nz, dtype=np.float64)
zzcoeff = np.zeros((nz, nbasis), dtype=np.float64)
for i, z in enumerate(zz):
binned = rebin_template(template, z, dwave)
for k in list(dwave.keys()):
T = transmission_Lyman(z,dwave[k])
for vect in range(binned[k].shape[1]):
binned[k][:,vect] *= T
zzchi2[i], zzcoeff[i] = calc_zchi2_one(spectra, weights, flux,
wflux, binned)
#- fit parabola to 3 points around minimum
i = min(max(np.argmin(zzchi2),1), len(zz)-2)
zmin, sigma, chi2min, zwarn = minfit(zz[i-1:i+2], zzchi2[i-1:i+2])
try:
binned = rebin_template(template, zmin, dwave)
for k in list(dwave.keys()):
T = transmission_Lyman(zmin,dwave[k])
for vect in range(binned[k].shape[1]):
binned[k][:,vect] *= T
coeff = calc_zchi2_one(spectra, weights, flux, wflux,
binned)[1]
except ValueError as err:
if zmin<redshifts[0] or redshifts[-1]<zmin:
#- beyond redshift range can be invalid for template
coeff = np.zeros(template.nbasis)
zwarn |= ZW.Z_FITLIMIT
zwarn |= ZW.BAD_MINFIT
else:
#- Unknown problem; re-raise error
raise err
zbest = zmin
zerr = sigma
#- Initial minimum or best fit too close to edge of redshift range
if zbest < redshifts[1] or zbest > redshifts[-2]:
zwarn |= ZW.Z_FITLIMIT
if zmin < redshifts[1] or zmin > redshifts[-2]:
zwarn |= ZW.Z_FITLIMIT
#- parabola minimum outside fit range; replace with min of scan
if zbest < zz[0] or zbest > zz[-1]:
zwarn |= ZW.BAD_MINFIT
imin = np.where(zbest == np.min(zbest))[0][0]
zbest = zz[imin]
chi2min = zzchi2[imin]
#- Skip this better defined minimum if it is within
#- constants.max_velo_diff km/s of a previous one
zprev = np.array([tmp['z'] for tmp in results])
dv = get_dv(z=zbest, zref=zprev)
if np.any(np.abs(dv) < constants.max_velo_diff):
continue
if archetype is None:
results.append(dict(z=zbest, zerr=zerr, zwarn=zwarn,
chi2=chi2min, zz=zz, zzchi2=zzchi2,
coeff=coeff))
else:
chi2min, coeff, fulltype = archetype.get_best_archetype(spectra,weights,flux,wflux,dwave,zbest,legendre)
results.append(dict(z=zbest, zerr=zerr, zwarn=zwarn,
chi2=chi2min, zz=zz, zzchi2=zzchi2,
coeff=coeff, fulltype=fulltype))
#- Sort results by chi2min; detailed fits may have changed order
ii = np.argsort([tmp['chi2'] for tmp in results])
results = [results[i] for i in ii]
#- Convert list of dicts -> Table
from astropy.table import Table
results = Table(results)
assert len(results) > 0
return results
| bsd-3-clause | -7,828,482,382,180,889,000 | 6,344,688,014,880,422,000 | 28.392562 | 161 | 0.566568 | false |
ludobox/ludobox | server/ludobox/history.py | 2 | 5286 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Record and manage file changes and keep track of history.
Key concepts are :
- events : everytime somethin is changed, we use this event
- history : the whole thread of events that applies to a page
For each event, a unique SHA id is created (like git https://stackoverflow.com/questions/29106996/git-what-is-a-git-commit-id )
"""
import hashlib
import time
import json
from flask import current_app
from jsonpatch import make_patch, JsonPatch
# TODO : implement state changes (draft -> reviewed, etc.)
event_types = ["create", "update", "delete", "change_state"]
# hashing changes to create an id
sha_1 = hashlib.sha1()
def new_event(event_type, content, user=None):
if event_type not in event_types:
raise ValueError(
"Event type should be one of the following %s"%", ".join(event_types))
if type(content) is not dict:
raise ValueError(
"Event content should be a JSON-compatible object.")
# timestamp
ts = int(time.time())
# generate unique ID using the whole content
sha_1.update("%s - %s - %s - %s"%(event_type, content, user, ts) )
sha_id = sha_1.hexdigest()
return {
"type" : event_type,
"content" : content,
"user" : user,
"id" : sha_id,
"ts" : ts
}
def is_valid_event(event):
assert type(event) is dict
assert type(event["id"]) is str or unicode
assert len(event["id"]) is 40
assert type(event["content"]) is dict
assert type(event["ts"]) is int
assert event["type"] in event_types
return True
def add_event_to_history(content_previous_version, event):
"""
Does 3 things :
- create threaded history of events if empty
- add current event to history
- replace old content by the new
"""
assert is_valid_event(event)
# immutable: clone original reference
content_with_updated_history = content_previous_version.copy()
# init history if empty
if "history" not in content_with_updated_history.keys():
content_with_updated_history["history"] = []
# re-apply changes and store last version
if event["type"] == "update":
content_with_updated_history = apply_update_patch(content_with_updated_history, event)
elif event["type"] == "change_state":
new_state = event["content"]["to"]
content_with_updated_history["state"] = new_state
# add event to history
content_with_updated_history["history"].append(event)
current_app.logger.debug("Event : %s - %s"%(event["type"], content_with_updated_history))
return content_with_updated_history
def make_create_event(content, user=None):
# make sure there is no prior history
if "history" in content.keys() and len(content["history"]) !=0:
raise ValueError("You are trying to use the CREATE action on a game that already has an history.")
# check if there is actual changes
if content is None or len(content.keys()) == 0:
return None
# create a new event and add it to history
event = new_event("create", content.copy(), user)
return event
def make_update_event(old_content, new_content, user=None):
# make things immutable
new = new_content.copy()
old = old_content.copy()
# ignore keys we don't want to track in the history events
ignored_keys = ["history", "files", "errors", "has_errors"]
for k in ignored_keys:
new.pop(k, None)
old.pop(k, None)
# create json diff
patch = make_patch(new, old)
# check if there is actual changes
if not len(list(patch)) :
return None
# create a new event and add it to history
event = new_event("update", { "changes" : list(patch) }, user)
return event
def make_update_state_event(old_content, updated_content_state, user=None):
"""Store an event reflecting content update"""
original_state = old_content["state"]
state_change = { "from" : original_state, "to" : updated_content_state}
# create a new event and add it to history
event = new_event("change_state", state_change, user)
return event
def apply_update_patch(content, event):
"""Apply JSON diff patches to content"""
patch = JsonPatch(event["content"]["changes"])
final_content = patch.apply(content)
return final_content
def apply_history(history, selected_id):
"""
Re-apply the chain of events from the history until selected id
returns the content *without* the history
"""
# check the hash format
assert type(selected_id) is str
assert len(selected_id) is 40
# filter history
final_content = {}
# run again the course of events
for event in history:
if not is_valid_event(event) :
raise ValueError("Event does not follow a proper format.")
# check event type
if event["type"] == "create": # init with full content
final_content = event["content"]
elif event["type"] == "update":
final_content = apply_update_patch(final_content, event)
elif event["type"] == "change_state":
new_state = event["content"]["to"]
# run until last is
if event["id"] == selected_id :
return final_content
| agpl-3.0 | 2,758,439,085,201,736,000 | -6,844,218,225,425,901,000 | 29.37931 | 127 | 0.644344 | false |
sysadmind/ansible-modules-extras | cloud/openstack/os_user_role.py | 24 | 6078 | #!/usr/bin/python
# Copyright (c) 2016 IBM
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
from distutils.version import StrictVersion
DOCUMENTATION = '''
---
module: os_user_role
short_description: Associate OpenStack Identity users and roles
extends_documentation_fragment: openstack
author: "Monty Taylor (@emonty), David Shrewsbury (@Shrews)"
version_added: "2.1"
description:
- Grant and revoke roles in either project or domain context for
OpenStack Identity Users.
options:
role:
description:
- Name or ID for the role.
required: true
user:
description:
- Name or ID for the user. If I(user) is not specified, then
I(group) is required. Both may not be specified.
required: false
default: null
group:
description:
- Name or ID for the group. Valid only with keystone version 3.
If I(group) is not specified, then I(user) is required. Both
may not be specified.
required: false
default: null
project:
description:
- Name or ID of the project to scope the role assocation to.
If you are using keystone version 2, then this value is required.
required: false
default: null
domain:
description:
- ID of the domain to scope the role association to. Valid only with
keystone version 3, and required if I(project) is not specified.
required: false
default: null
state:
description:
- Should the roles be present or absent on the user.
choices: [present, absent]
default: present
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Grant an admin role on the user admin in the project project1
- os_user_role:
cloud: mycloud
user: admin
role: admin
project: project1
# Revoke the admin role from the user barney in the newyork domain
- os_user_role:
cloud: mycloud
state: absent
user: barney
role: admin
domain: newyork
'''
RETURN = '''
#
'''
def _system_state_change(state, assignment):
if state == 'present' and not assignment:
return True
elif state == 'absent' and assignment:
return True
return False
def _build_kwargs(user, group, project, domain):
kwargs = {}
if user:
kwargs['user'] = user
if group:
kwargs['group'] = group
if project:
kwargs['project'] = project
if domain:
kwargs['domain'] = domain
return kwargs
def main():
argument_spec = openstack_full_argument_spec(
role=dict(required=True),
user=dict(required=False),
group=dict(required=False),
project=dict(required=False),
domain=dict(required=False),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs(
required_one_of=[
['user', 'group']
])
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
# role grant/revoke API introduced in 1.5.0
if not HAS_SHADE or (StrictVersion(shade.__version__) < StrictVersion('1.5.0')):
module.fail_json(msg='shade 1.5.0 or higher is required for this module')
role = module.params.pop('role')
user = module.params.pop('user')
group = module.params.pop('group')
project = module.params.pop('project')
domain = module.params.pop('domain')
state = module.params.pop('state')
try:
cloud = shade.operator_cloud(**module.params)
filters = {}
r = cloud.get_role(role)
if r is None:
module.fail_json(msg="Role %s is not valid" % role)
filters['role'] = r['id']
if user:
u = cloud.get_user(user)
if u is None:
module.fail_json(msg="User %s is not valid" % user)
filters['user'] = u['id']
if group:
g = cloud.get_group(group)
if g is None:
module.fail_json(msg="Group %s is not valid" % group)
filters['group'] = g['id']
if project:
p = cloud.get_project(project)
if p is None:
module.fail_json(msg="Project %s is not valid" % project)
filters['project'] = p['id']
if domain:
d = cloud.get_domain(domain)
if d is None:
module.fail_json(msg="Domain %s is not valid" % domain)
filters['domain'] = d['id']
assignment = cloud.list_role_assignments(filters=filters)
if module.check_mode:
module.exit_json(changed=_system_state_change(state, assignment))
changed = False
if state == 'present':
if not assignment:
kwargs = _build_kwargs(user, group, project, domain)
cloud.grant_role(role, **kwargs)
changed = True
elif state == 'absent':
if assignment:
kwargs = _build_kwargs(user, group, project, domain)
cloud.revoke_role(role, **kwargs)
changed=True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 | 3,828,329,249,938,366,500 | 2,514,775,545,269,786,600 | 28.221154 | 84 | 0.610727 | false |
potsmaster/cinder | cinder/volume/drivers/dothill/dothill_client.py | 1 | 12318 | # Copyright 2014 Objectif Libre
# Copyright 2015 DotHill Systems
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from hashlib import md5
import math
import time
from lxml import etree
from oslo_log import log as logging
import requests
import six
from cinder import exception
from cinder.i18n import _LE
LOG = logging.getLogger(__name__)
class DotHillClient(object):
def __init__(self, host, login, password, protocol, ssl_verify):
self._login = login
self._password = password
self._base_url = "%s://%s/api" % (protocol, host)
self._session_key = None
self.ssl_verify = ssl_verify
def _get_auth_token(self, xml):
"""Parse an XML authentication reply to extract the session key."""
self._session_key = None
tree = etree.XML(xml)
if tree.findtext(".//PROPERTY[@name='response-type']") == "success":
self._session_key = tree.findtext(".//PROPERTY[@name='response']")
def login(self):
"""Authenticates the service on the device."""
hash_ = "%s_%s" % (self._login, self._password)
if six.PY3:
hash_ = hash_.encode('utf-8')
hash_ = md5(hash_)
digest = hash_.hexdigest()
url = self._base_url + "/login/" + digest
try:
xml = requests.get(url, verify=self.ssl_verify)
except requests.exceptions.RequestException:
raise exception.DotHillConnectionError
self._get_auth_token(xml.text.encode('utf8'))
if self._session_key is None:
raise exception.DotHillAuthenticationError
def _assert_response_ok(self, tree):
"""Parses the XML returned by the device to check the return code.
Raises a DotHillRequestError error if the return code is not 0.
"""
return_code = tree.findtext(".//PROPERTY[@name='return-code']")
if return_code and return_code != '0':
raise exception.DotHillRequestError(
message=tree.findtext(".//PROPERTY[@name='response']"))
elif not return_code:
raise exception.DotHillRequestError(message="No status found")
def _build_request_url(self, path, *args, **kargs):
url = self._base_url + path
if kargs:
url += '/' + '/'.join(["%s/%s" % (k.replace('_', '-'), v)
for (k, v) in kargs.items()])
if args:
url += '/' + '/'.join(args)
return url
def _request(self, path, *args, **kargs):
"""Performs an HTTP request on the device.
Raises a DotHillRequestError if the device returned but the status is
not 0. The device error message will be used in the exception message.
If the status is OK, returns the XML data for further processing.
"""
url = self._build_request_url(path, *args, **kargs)
headers = {'dataType': 'api', 'sessionKey': self._session_key}
try:
xml = requests.get(url, headers=headers, verify=self.ssl_verify)
tree = etree.XML(xml.text.encode('utf8'))
except Exception:
raise exception.DotHillConnectionError
if path == "/show/volumecopy-status":
return tree
self._assert_response_ok(tree)
return tree
def logout(self):
url = self._base_url + '/exit'
try:
requests.get(url, verify=self.ssl_verify)
return True
except Exception:
return False
def create_volume(self, name, size, backend_name, backend_type):
# NOTE: size is in this format: [0-9]+GB
path_dict = {'size': size}
if backend_type == "linear":
path_dict['vdisk'] = backend_name
else:
path_dict['pool'] = backend_name
self._request("/create/volume", name, **path_dict)
return None
def delete_volume(self, name):
self._request("/delete/volumes", name)
def extend_volume(self, name, added_size):
self._request("/expand/volume", name, size=added_size)
def create_snapshot(self, volume_name, snap_name):
self._request("/create/snapshots", snap_name, volumes=volume_name)
def delete_snapshot(self, snap_name):
self._request("/delete/snapshot", "cleanup", snap_name)
def backend_exists(self, backend_name, backend_type):
try:
if backend_type == "linear":
path = "/show/vdisks"
else:
path = "/show/pools"
self._request(path, backend_name)
return True
except exception.DotHillRequestError:
return False
def _get_size(self, size):
return int(math.ceil(float(size) * 512 / (10 ** 9)))
def backend_stats(self, backend_name, backend_type):
stats = {'free_capacity_gb': 0,
'total_capacity_gb': 0}
prop_list = []
if backend_type == "linear":
path = "/show/vdisks"
prop_list = ["size-numeric", "freespace-numeric"]
else:
path = "/show/pools"
prop_list = ["total-size-numeric", "total-avail-numeric"]
tree = self._request(path, backend_name)
size = tree.findtext(".//PROPERTY[@name='%s']" % prop_list[0])
if size:
stats['total_capacity_gb'] = self._get_size(size)
size = tree.findtext(".//PROPERTY[@name='%s']" % prop_list[1])
if size:
stats['free_capacity_gb'] = self._get_size(size)
return stats
def list_luns_for_host(self, host):
tree = self._request("/show/host-maps", host)
return [int(prop.text) for prop in tree.xpath(
"//PROPERTY[@name='lun']")]
def _get_first_available_lun_for_host(self, host):
luns = self.list_luns_for_host(host)
lun = 1
while True:
if lun not in luns:
return lun
lun += 1
def map_volume(self, volume_name, connector, connector_element):
if connector_element == 'wwpns':
lun = self._get_first_available_lun_for_host(connector['wwpns'][0])
host = ",".join(connector['wwpns'])
else:
host = connector['initiator']
host_status = self._check_host(host)
if host_status != 0:
hostname = self._safe_hostname(connector['host'])
self._request("/create/host", hostname, id=host)
lun = self._get_first_available_lun_for_host(host)
self._request("/map/volume",
volume_name,
lun=str(lun),
host=host,
access="rw")
return lun
def unmap_volume(self, volume_name, connector, connector_element):
if connector_element == 'wwpns':
host = ",".join(connector['wwpns'])
else:
host = connector['initiator']
self._request("/unmap/volume", volume_name, host=host)
def get_active_target_ports(self):
ports = []
tree = self._request("/show/ports")
for obj in tree.xpath("//OBJECT[@basetype='port']"):
port = {prop.get('name'): prop.text
for prop in obj.iter("PROPERTY")
if prop.get('name') in
["port-type", "target-id", "status"]}
if port['status'] == 'Up':
ports.append(port)
return ports
def get_active_fc_target_ports(self):
return [port['target-id'] for port in self.get_active_target_ports()
if port['port-type'] == "FC"]
def get_active_iscsi_target_iqns(self):
return [port['target-id'] for port in self.get_active_target_ports()
if port['port-type'] == "iSCSI"]
def copy_volume(self, src_name, dest_name, same_bknd, dest_bknd_name):
self._request("/volumecopy",
dest_name,
dest_vdisk=dest_bknd_name,
source_volume=src_name,
prompt='yes')
if same_bknd == 0:
return
count = 0
while True:
tree = self._request("/show/volumecopy-status")
return_code = tree.findtext(".//PROPERTY[@name='return-code']")
if return_code == '0':
status = tree.findtext(".//PROPERTY[@name='progress']")
progress = False
if status:
progress = True
LOG.debug("Volume copy is in progress: %s", status)
if not progress:
LOG.debug("Volume copy completed: %s", status)
break
else:
if count >= 5:
LOG.error(_LE('Error in copying volume: %s'), src_name)
raise exception.DotHillRequestError
break
time.sleep(1)
count += 1
time.sleep(5)
def _check_host(self, host):
host_status = -1
tree = self._request("/show/hosts")
for prop in tree.xpath("//PROPERTY[@name='host-id' and text()='%s']"
% host):
host_status = 0
return host_status
def _safe_hostname(self, hostname):
"""Modify an initiator name to match firmware requirements.
Initiator name cannot include certain characters and cannot exceed
15 bytes in 'T' firmware (32 bytes in 'G' firmware).
"""
for ch in [',', '"', '\\', '<', '>']:
if ch in hostname:
hostname = hostname.replace(ch, '')
index = len(hostname)
if index > 15:
index = 15
return hostname[:index]
def get_active_iscsi_target_portals(self):
# This function returns {'ip': status,}
portals = {}
prop = 'ip-address'
tree = self._request("/show/ports")
for el in tree.xpath("//PROPERTY[@name='primary-ip-address']"):
prop = 'primary-ip-address'
break
iscsi_ips = [ip.text for ip in tree.xpath(
"//PROPERTY[@name='%s']" % prop)]
if not iscsi_ips:
return portals
for index, port_type in enumerate(tree.xpath(
"//PROPERTY[@name='port-type' and text()='iSCSI']")):
status = port_type.getparent().findtext("PROPERTY[@name='status']")
if status == 'Up':
portals[iscsi_ips[index]] = status
return portals
def get_chap_record(self, initiator_name):
tree = self._request("/show/chap-records")
for prop in tree.xpath("//PROPERTY[@name='initiator-name' and "
"text()='%s']" % initiator_name):
chap_secret = prop.getparent().findtext("PROPERTY[@name='initiator"
"-secret']")
return chap_secret
def create_chap_record(self, initiator_name, chap_secret):
self._request("/create/chap-record",
name=initiator_name,
secret=chap_secret)
def get_serial_number(self):
tree = self._request("/show/system")
return tree.findtext(".//PROPERTY[@name='midplane-serial-number']")
def get_owner_info(self, backend_name):
tree = self._request("/show/vdisks", backend_name)
return tree.findtext(".//PROPERTY[@name='owner']")
def modify_volume_name(self, old_name, new_name):
self._request("/set/volume", old_name, name=new_name)
def get_volume_size(self, volume_name):
tree = self._request("/show/volumes", volume_name)
size = tree.findtext(".//PROPERTY[@name='size-numeric']")
return self._get_size(size)
| apache-2.0 | 7,362,203,164,092,854,000 | 90,200,187,726,377,140 | 35.443787 | 79 | 0.551469 | false |
ttsirkia/a-plus | exercise/tests_cache.py | 2 | 9577 | from lib.testdata import CourseTestCase
from course.models import CourseModule, LearningObjectCategory
from .cache.content import CachedContent
from .cache.hierarchy import PreviousIterator
from .cache.points import CachedPoints
from .models import BaseExercise, StaticExercise, Submission
class CachedContentTest(CourseTestCase):
def test_invalidation(self):
c = CachedContent(self.instance)
created = c.created()
c = CachedContent(self.instance)
self.assertEqual(c.created(), created)
self.exercise0.save()
c = CachedContent(self.instance)
self.assertNotEqual(c.created(), created)
def test_content(self):
self.module0.status = CourseModule.STATUS.UNLISTED
self.module0.save()
c = CachedContent(self.instance)
self.assertFalse(c.dirty)
total = c.total()
self.assertEqual(total['min_group_size'], 1)
self.assertEqual(total['max_group_size'], 2)
modules = c.modules()
self.assertEqual(len(c.modules()), 3)
self.assertEqual(len(c.categories()), 1)
exercises0 = list(c.flat_module(modules[0], enclosed=False))
exercises1 = list(c.flat_module(modules[1], enclosed=False))
self.assertEqual(len(exercises0), 1)
self.assertEqual(len(exercises1), 2)
exercise = exercises0[0]
self.assertEqual(exercise['module_id'], modules[0]['id'])
self.assertTrue(CachedContent.is_visible(exercise))
self.assertFalse(CachedContent.is_listed(exercise))
exercise = exercises1[0]
self.assertEqual(exercise['module_id'], modules[1]['id'])
self.assertTrue(CachedContent.is_visible(exercise))
self.assertTrue(CachedContent.is_listed(exercise))
self.assertFalse(CachedContent.is_in_maintenance(exercise))
self.assertEqual(exercise['opening_time'], self.module.opening_time)
self.assertEqual(exercise['closing_time'], self.module.closing_time)
self.assertEqual(exercise['points_to_pass'], 0)
self.assertEqual(exercise['max_points'], 100)
def test_hierarchy(self):
c = CachedContent(self.instance)
full = list(c.flat_full())
hierarchy = [
'module','level','exercise','level',
'module','level','exercise','exercise','level',
'module','level','exercise','level',
]
for i,typ in enumerate(hierarchy):
self.assertEqual(full[i]['type'], typ)
begin = c.begin()
self.assertEqual(begin, full[2])
def test_find(self):
c = CachedContent(self.instance)
module,tree,prev,nex = c.find(self.module)
self.assertEqual(module['type'], 'module')
self.assertEqual(module['id'], self.module.id)
self.assertEqual(len(tree), 1)
self.assertEqual(prev['type'], 'exercise')
self.assertEqual(prev['id'], self.exercise0.id)
self.assertEqual(nex['type'], 'exercise')
self.assertEqual(nex['id'], self.exercise.id)
eid = c.find_path(self.module.id, self.exercise2.get_path())
self.assertEqual(eid, self.exercise2.id)
exercise,tree,prev,nex = c.find(self.exercise2)
self.assertEqual(exercise['type'], 'exercise')
self.assertEqual(exercise['id'], self.exercise2.id)
self.assertEqual(len(tree), 2)
self.assertEqual(tree[0], module)
self.assertEqual(prev['type'], 'exercise')
self.assertEqual(prev['id'], self.exercise.id)
self.assertEqual(nex['type'], 'module')
self.assertEqual(nex['id'], self.module2.id)
def test_backwards(self):
c = CachedContent(self.instance)
backwards = list(PreviousIterator(c.modules()))
hierarcy = [
'exercise','module',
'exercise','exercise','module',
'exercise','module',
]
for i,typ in enumerate(hierarcy):
self.assertEqual(backwards[i]['type'], typ)
def test_flat_modules(self):
c = CachedContent(self.instance)
sizes = [3,4,3]
for i,m in enumerate(c.modules_flatted()):
self.assertEqual(len(list(m['flatted'])), sizes[i])
def test_deep(self):
self.subexercise = StaticExercise.objects.create(
course_module=self.module,
category=self.category,
parent=self.exercise2,
status=BaseExercise.STATUS.UNLISTED,
url='s1',
name="Deep Exercise",
exercise_page_content='$$subexercise$$content',
submission_page_content='$$subexercise$$received',
points_to_pass=0,
max_points=100,
order=1,
)
c = CachedContent(self.instance)
exercise,tree,prev,nex = c.find(self.subexercise)
self.assertEqual(nex['type'], 'module')
self.assertEqual(nex['id'], self.module2.id)
class CachedPointsTest(CourseTestCase):
def test_invalidation(self):
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
self.assertFalse(p.dirty)
created = p.created()
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
self.assertEqual(p.created(), created)
self.exercise0.save()
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
self.assertNotEqual(p.created(), created)
created = p.created()
self.submission2.save()
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
self.assertEqual(c.created(), created[1])
self.assertNotEqual(p.created(), created)
def test_accumulation(self):
self.submission2.set_points(2,2)
self.submission2.save()
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
entry,tree,_,_ = p.find(self.exercise)
self.assertTrue(entry['graded'])
self.assertTrue(entry['passed'])
self.assertEqual(entry['points'], 50)
total = p.total()
self.assertEqual(total['submission_count'], 2)
self.assertEqual(total['points'], 50)
self.assertEqual(total['points_by_difficulty'].get('',0), 50)
module = p.modules()[1]
self.assertEqual(module['submission_count'], 2)
self.assertEqual(module['points'], 50)
self.assertEqual(module['points_by_difficulty'].get('',0), 50)
self.assertFalse(module['passed'])
category = p.categories()[0]
self.assertTrue(category['passed'])
self.submission2.set_ready()
self.submission2.save()
p = CachedPoints(self.instance, self.student, c)
total = p.total()
self.assertEqual(total['points'], 100)
self.submission3.set_points(10,100)
self.submission3.set_ready()
self.submission3.save()
p = CachedPoints(self.instance, self.student, c)
total = p.total()
self.assertEqual(total['points'], 110)
module = p.modules()[1]
self.assertTrue(module['passed'])
def test_unconfirmed(self):
self.category2 = LearningObjectCategory.objects.create(
course_instance=self.instance,
name="Test Category 2",
points_to_pass=5,
confirm_the_level=True,
)
self.exercise2.category = self.category2
self.exercise2.save()
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
total = p.total()
self.assertEqual(total['points'], 0)
self.assertEqual(total['points_by_difficulty'].get('',0), 0)
self.assertEqual(total['unconfirmed_points_by_difficulty'].get('',0), 50)
module = p.modules()[1]
self.assertEqual(module['points'], 0)
category = p.categories()[0]
self.assertEqual(category['points'], 0)
self.submission3.set_points(1,2)
self.submission3.set_ready()
self.submission3.save()
p = CachedPoints(self.instance, self.student, c)
total = p.total()
self.assertEqual(total['points'], 50)
self.assertEqual(total['points_by_difficulty'].get('',0), 50)
self.assertEqual(total['unconfirmed_points_by_difficulty'].get('',0), 0)
module = p.modules()[1]
self.assertEqual(module['points'], 50)
category = p.categories()[0]
self.assertEqual(category['points'], 50)
def test_unofficial(self):
self.module.late_submissions_allowed = False
self.module.save()
self.category.accept_unofficial_submits = True
self.category.save()
sub = Submission.objects.create(exercise=self.exercise3)
sub.submitters.add(self.student.userprofile)
sub.submission_time = self.three_days_after
sub.set_points(1,2)
sub.set_ready()
sub.save()
self.submission2.submission_time = self.three_days_after
self.submission2.set_points(2,2)
self.submission2.set_ready()
self.submission2.save()
c = CachedContent(self.instance)
p = CachedPoints(self.instance, self.student, c)
entry,_,_,_ = p.find(self.exercise3)
self.assertFalse(entry['graded'])
self.assertTrue(entry['unofficial'])
self.assertEqual(entry['points'], 50)
entry,_,_,_ = p.find(self.exercise)
self.assertTrue(entry['graded'])
self.assertFalse(entry['unofficial'])
self.assertEqual(entry['points'], 50)
| gpl-3.0 | 8,656,726,916,922,393,000 | -3,861,094,334,948,499,000 | 39.239496 | 81 | 0.620132 | false |
BeATz-UnKNoWN/python-for-android | python3-alpha/python3-src/Lib/ctypes/test/test_array_in_pointer.py | 170 | 1738 | import unittest
from ctypes import *
from binascii import hexlify
import re
def dump(obj):
# helper function to dump memory contents in hex, with a hyphen
# between the bytes.
h = hexlify(memoryview(obj)).decode()
return re.sub(r"(..)", r"\1-", h)[:-1]
class Value(Structure):
_fields_ = [("val", c_byte)]
class Container(Structure):
_fields_ = [("pvalues", POINTER(Value))]
class Test(unittest.TestCase):
def test(self):
# create an array of 4 values
val_array = (Value * 4)()
# create a container, which holds a pointer to the pvalues array.
c = Container()
c.pvalues = val_array
# memory contains 4 NUL bytes now, that's correct
self.assertEqual("00-00-00-00", dump(val_array))
# set the values of the array through the pointer:
for i in range(4):
c.pvalues[i].val = i + 1
values = [c.pvalues[i].val for i in range(4)]
# These are the expected results: here s the bug!
self.assertEqual(
(values, dump(val_array)),
([1, 2, 3, 4], "01-02-03-04")
)
def test_2(self):
val_array = (Value * 4)()
# memory contains 4 NUL bytes now, that's correct
self.assertEqual("00-00-00-00", dump(val_array))
ptr = cast(val_array, POINTER(Value))
# set the values of the array through the pointer:
for i in range(4):
ptr[i].val = i + 1
values = [ptr[i].val for i in range(4)]
# These are the expected results: here s the bug!
self.assertEqual(
(values, dump(val_array)),
([1, 2, 3, 4], "01-02-03-04")
)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 5,346,498,265,803,036,000 | -5,467,684,335,085,902,000 | 26.15625 | 73 | 0.561565 | false |
cbitstech/Purple-Robot-Django | management/commands/extractors/builtin_rawlocationprobeeventlog.py | 1 | 2943 | # pylint: disable=line-too-long
import datetime
import psycopg2
import pytz
CREATE_PROBE_TABLE_SQL = 'CREATE TABLE builtin_rawlocationprobeeventlog(id SERIAL PRIMARY KEY, user_id TEXT, guid TEXT, timestamp BIGINT, utc_logged TIMESTAMP, provider_status TEXT, log_event TEXT, satellites BIGINT);'
CREATE_PROBE_USER_ID_INDEX = 'CREATE INDEX ON builtin_rawlocationprobeeventlog(user_id);'
CREATE_PROBE_GUID_INDEX = 'CREATE INDEX ON builtin_rawlocationprobeeventlog(guid);'
CREATE_PROBE_UTC_LOGGED_INDEX = 'CREATE INDEX ON builtin_rawlocationprobeeventlog(utc_logged);'
def exists(connection_str, user_id, reading):
conn = psycopg2.connect(connection_str)
if probe_table_exists(conn) is False:
conn.close()
return False
cursor = conn.cursor()
cursor.execute('SELECT id FROM builtin_rawlocationprobeeventlog WHERE (user_id = %s AND guid = %s);', (user_id, reading['GUID']))
row_exists = (cursor.rowcount > 0)
cursor.close()
conn.close()
return row_exists
def probe_table_exists(conn):
cursor = conn.cursor()
cursor.execute('SELECT table_name FROM information_schema.tables WHERE (table_schema = \'public\' AND table_name = \'builtin_rawlocationprobeeventlog\')')
table_exists = (cursor.rowcount > 0)
cursor.close()
return table_exists
def insert(connection_str, user_id, reading, check_exists=True):
conn = psycopg2.connect(connection_str)
cursor = conn.cursor()
if check_exists and probe_table_exists(conn) is False:
cursor.execute(CREATE_PROBE_TABLE_SQL)
cursor.execute(CREATE_PROBE_USER_ID_INDEX)
cursor.execute(CREATE_PROBE_GUID_INDEX)
cursor.execute(CREATE_PROBE_UTC_LOGGED_INDEX)
conn.commit()
reading_cmd = 'INSERT INTO builtin_rawlocationprobeeventlog(user_id, ' + \
'guid, ' + \
'timestamp, ' + \
'utc_logged, ' + \
'provider_status, ' + \
'log_event, ' + \
'satellites) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING id;'
provider_status = None
satellites = None
if 'PROVIDER_STATUS' in reading:
provider_status = reading['PROVIDER_STATUS']
if 'satellites' in reading:
satellites = reading['satellites']
cursor.execute(reading_cmd, (user_id,
reading['GUID'],
reading['TIMESTAMP'],
datetime.datetime.fromtimestamp(reading['TIMESTAMP'], tz=pytz.utc),
provider_status,
reading['LOG_EVENT'],
satellites))
conn.commit()
cursor.close()
conn.close()
| gpl-3.0 | -1,281,744,708,257,984,800 | -2,481,647,530,005,427,000 | 34.890244 | 218 | 0.576283 | false |
shubhamgupta123/erpnext | erpnext/config/non_profit.py | 8 | 1775 | from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Chapter"),
"icon": "fa fa-star",
"items": [
{
"type": "doctype",
"name": "Chapter",
"description": _("Chapter information."),
}
]
},
{
"label": _("Membership"),
"items": [
{
"type": "doctype",
"name": "Member",
"description": _("Member information."),
},
{
"type": "doctype",
"name": "Membership",
"description": _("Memebership Details"),
},
{
"type": "doctype",
"name": "Membership Type",
"description": _("Memebership Type Details"),
},
]
},
{
"label": _("Volunteer"),
"items": [
{
"type": "doctype",
"name": "Volunteer",
"description": _("Volunteer information."),
},
{
"type": "doctype",
"name": "Volunteer Type",
"description": _("Volunteer Type information."),
}
]
},
{
"label": _("Donor"),
"items": [
{
"type": "doctype",
"name": "Donor",
"description": _("Donor information."),
},
{
"type": "doctype",
"name": "Donor Type",
"description": _("Donor Type information."),
}
]
},
{
"label": _("Loan Management"),
"icon": "icon-list",
"items": [
{
"type": "doctype",
"name": "Loan Type",
"description": _("Define various loan types")
},
{
"type": "doctype",
"name": "Loan Application",
"description": _("Loan Application")
},
{
"type": "doctype",
"name": "Loan"
},
]
},
{
"label": _("Grant Application"),
"items": [
{
"type": "doctype",
"name": "Grant Application",
"description": _("Grant information."),
}
]
}
]
| gpl-3.0 | -7,606,904,284,953,485,000 | 5,663,449,196,577,294,000 | 17.298969 | 53 | 0.469296 | false |
cloudnull/eventlet_wsgi | example_app/app.py | 1 | 3150 | # =============================================================================
# Copyright [2014] [Kevin Carter]
# License Information :
# This software has no warranty, it is provided 'as is'. It is your
# responsibility to validate the behavior of the routines and its accuracy
# using the code provided. Consult the GNU General Public license for further
# details (see GNU General Public License).
# http://www.gnu.org/licenses/gpl.html
# =============================================================================
# This is an example application
# =============================================================================
import datetime
import os
import flask
import ewsgi
from cloudlib import parse_ini
from cloudlib import logger
CONFIG = parse_ini.ConfigurationSetup()
try:
CONFIG.load_config(name='example', path=os.getcwd())
# Load Default Configuration
default_config = CONFIG.config_args(section='default')
# Set the application name
APPNAME = default_config.get('appname', 'example')
# Store network Configuration
network_config = CONFIG.config_args(section='network')
# Store SSL configuration
ssl_config = CONFIG.config_args(section='ssl')
# Enable or disable DEBUG mode
DEBUG = default_config.get('debug', False)
except IOError:
# If the configuration file is not present, set the two bits we need
DEBUG = True
APPNAME = 'example'
# Load Logging
LOG = logger.getLogger(APPNAME)
# Load the flask APP
APP = flask.Flask(APPNAME)
# Enable general debugging
if DEBUG is True:
APP.debug = True
LOG.debug(APP.logger)
# Enable Application Threading
APP.threaded = True
# Enforce strict slashes in URI's
APP.url_map.strict_slashes = False
# Add Default Handling for File not found.
APP.errorhandler(ewsgi.not_found)
# Load the BLUEPRINT handler
BLUEPRINT = flask.Blueprint
blueprints = []
# Each Blueprint is essentially route. this has a name and needs to be
# stored as an object which will be used as a decorator.
hello_world = BLUEPRINT('hello', APPNAME)
test_path = BLUEPRINT('test_path', __name__)
# The decorator object is appended to the "blueprints" list and will be
# used later to register ALL blueprints.
blueprints.append(hello_world)
blueprints.append(test_path)
# This decorator loads the route and provides the allowed methods
# available from within the decorator
@hello_world.route('/hello', methods=['GET'])
def _hello_world():
"""Return 200 response on GET '/hello'."""
LOG.debug('hello world')
return 'hello world. The time is [ %s ]' % datetime.datetime.utcnow(), 200
@test_path.route('/test', methods=['GET'])
def _test_path():
"""Return 200 response on GET '/test'."""
state = {
'Application': APPNAME,
'time': datetime.datetime.utcnow(),
'request': {
'method': flask.request.method,
'path': flask.request.path
}
}
LOG.debug(state)
return flask.jsonify({'response': state}, indent=2), 200
# Register all blueprints as found in are `list` of blueprints
for blueprint in blueprints:
APP.register_blueprint(blueprint=blueprint)
| gpl-3.0 | -8,369,119,338,207,739,000 | -8,519,649,967,413,267,000 | 27.378378 | 79 | 0.653651 | false |
Codepoints/unidump | unidump/__init__.py | 1 | 1861 | #!/usr/bin/env python3
"""
hexdump(1) for Unicode data
"""
from typing import IO
from unidump.output import sanitize_char, print_line, fill_and_print
from unidump.env import Env
VERSION = '1.1.3'
def unidump(inbytes: IO[bytes], env: Env) -> None:
"""take a list of bytes and print their Unicode codepoints
>>> import io
>>> import sys
>>> from unidump.env import Env
>>> _env = Env(linelength=4, output=sys.stdout)
>>> unidump(io.BytesIO(b'\\x01\\xF0\\x9F\\x99\\xB8ABC'), _env)
0 0001 1F678 0041 0042 .\U0001F678AB
7 0043 C
>>> unidump(io.BytesIO(b'\\xD7'), _env)
0 ?D7? X
>>> _env.encoding = 'latin1'
>>> unidump(io.BytesIO(b'\\xD7'), _env)
0 00D7 \u00D7
"""
byteoffset = 0
bytebuffer = b''
current_line = [0, [], '']
byte = inbytes.read(1)
while byte:
byteoffset += 1
bytebuffer += byte
try:
char = bytebuffer.decode(env.encoding)
except UnicodeDecodeError:
next_byte = inbytes.read(1)
if not next_byte or len(bytebuffer) >= 4:
for i, data in enumerate(bytebuffer):
current_line = (
fill_and_print(current_line, byteoffset - 4 + i,
'?{:02X}?'.format(data), 'X', env)
)
bytebuffer = b''
byte = next_byte
continue
else:
current_line = (
fill_and_print(current_line, byteoffset - len(bytebuffer),
'{:04X}'.format(ord(char)), sanitize_char(char),
env)
)
bytebuffer = b''
byte = inbytes.read(1)
print_line(current_line, env)
| mit | -38,129,353,052,989,520 | 7,066,070,150,891,301,000 | 27.630769 | 79 | 0.487372 | false |
tianon/hy | tests/compilers/test_ast.py | 1 | 14265 | # Copyright (c) 2013 Paul Tagliamonte <[email protected]>
# Copyright (c) 2013 Julien Danjou <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
from hy import HyString
from hy.models import HyObject
from hy.compiler import hy_compile
from hy.errors import HyCompileError, HyTypeError
from hy.lex.exceptions import LexException
from hy.lex import tokenize
from hy._compat import PY3
import ast
def _ast_spotcheck(arg, root, secondary):
if "." in arg:
local, full = arg.split(".", 1)
return _ast_spotcheck(full,
getattr(root, local),
getattr(secondary, local))
assert getattr(root, arg) == getattr(secondary, arg)
def can_compile(expr):
return hy_compile(tokenize(expr), "__main__")
def cant_compile(expr):
try:
hy_compile(tokenize(expr), "__main__")
assert False
except HyTypeError as e:
# Anything that can't be compiled should raise a user friendly
# error, otherwise it's a compiler bug.
assert isinstance(e.expression, HyObject)
assert e.message
except HyCompileError as e:
# Anything that can't be compiled should raise a user friendly
# error, otherwise it's a compiler bug.
assert isinstance(e.exception, HyTypeError)
assert e.traceback
def test_ast_bad_type():
"Make sure AST breakage can happen"
try:
hy_compile("foo", "__main__")
assert True is False
except HyCompileError:
pass
def test_ast_bad_if():
"Make sure AST can't compile invalid if"
cant_compile("(if)")
cant_compile("(if foobar)")
cant_compile("(if 1 2 3 4 5)")
def test_ast_valid_if():
"Make sure AST can't compile invalid if"
can_compile("(if foo bar)")
def test_ast_valid_unary_op():
"Make sure AST can compile valid unary operator"
can_compile("(not 2)")
can_compile("(~ 1)")
def test_ast_invalid_unary_op():
"Make sure AST can't compile invalid unary operator"
cant_compile("(not 2 3 4)")
cant_compile("(not)")
cant_compile("(not 2 3 4)")
cant_compile("(~ 2 2 3 4)")
cant_compile("(~)")
def test_ast_bad_while():
"Make sure AST can't compile invalid while"
cant_compile("(while)")
cant_compile("(while (true))")
def test_ast_good_do():
"Make sure AST can compile valid do"
can_compile("(do)")
can_compile("(do 1)")
def test_ast_good_throw():
"Make sure AST can compile valid throw"
can_compile("(throw)")
can_compile("(throw Exception)")
def test_ast_bad_throw():
"Make sure AST can't compile invalid throw"
cant_compile("(throw Exception Exception)")
def test_ast_good_raise():
"Make sure AST can compile valid raise"
can_compile("(raise)")
can_compile("(raise Exception)")
can_compile("(raise e)")
if PY3:
def test_ast_raise_from():
can_compile("(raise Exception :from NameError)")
def test_ast_bad_raise():
"Make sure AST can't compile invalid raise"
cant_compile("(raise Exception Exception)")
def test_ast_good_try():
"Make sure AST can compile valid try"
can_compile("(try)")
can_compile("(try 1)")
can_compile("(try 1 (except) (else 1))")
can_compile("(try 1 (else 1) (except))")
can_compile("(try 1 (finally 1) (except))")
can_compile("(try 1 (finally 1))")
can_compile("(try 1 (except) (finally 1))")
can_compile("(try 1 (except) (finally 1) (else 1))")
can_compile("(try 1 (except) (else 1) (finally 1))")
def test_ast_bad_try():
"Make sure AST can't compile invalid try"
cant_compile("(try 1 bla)")
cant_compile("(try 1 bla bla)")
cant_compile("(try (do) (else 1) (else 2))")
cant_compile("(try 1 (else 1))")
def test_ast_good_catch():
"Make sure AST can compile valid catch"
can_compile("(try 1 (catch))")
can_compile("(try 1 (catch []))")
can_compile("(try 1 (catch [Foobar]))")
can_compile("(try 1 (catch [[]]))")
can_compile("(try 1 (catch [x FooBar]))")
can_compile("(try 1 (catch [x [FooBar BarFoo]]))")
can_compile("(try 1 (catch [x [FooBar BarFoo]]))")
def test_ast_bad_catch():
"Make sure AST can't compile invalid catch"
cant_compile("(catch 22)") # heh
cant_compile("(try (catch 1))")
cant_compile("(try (catch \"A\"))")
cant_compile("(try (catch [1 3]))")
cant_compile("(try (catch [x [FooBar] BarBar]))")
def test_ast_good_except():
"Make sure AST can compile valid except"
can_compile("(try 1 (except))")
can_compile("(try 1 (except []))")
can_compile("(try 1 (except [Foobar]))")
can_compile("(try 1 (except [[]]))")
can_compile("(try 1 (except [x FooBar]))")
can_compile("(try 1 (except [x [FooBar BarFoo]]))")
can_compile("(try 1 (except [x [FooBar BarFoo]]))")
def test_ast_bad_except():
"Make sure AST can't compile invalid except"
cant_compile("(except 1)")
cant_compile("(try 1 (except 1))")
cant_compile("(try 1 (except [1 3]))")
cant_compile("(try 1 (except [x [FooBar] BarBar]))")
def test_ast_good_assert():
"""Make sure AST can compile valid asserts. Asserts may or may not
include a label."""
can_compile("(assert 1)")
can_compile("(assert 1 \"Assert label\")")
can_compile("(assert 1 (+ \"spam \" \"eggs\"))")
can_compile("(assert 1 12345)")
can_compile("(assert 1 nil)")
can_compile("(assert 1 (+ 2 \"incoming eggsception\"))")
def test_ast_bad_assert():
"Make sure AST can't compile invalid assert"
cant_compile("(assert)")
cant_compile("(assert 1 2 3)")
cant_compile("(assert 1 [1 2] 3)")
def test_ast_good_global():
"Make sure AST can compile valid global"
can_compile("(global a)")
def test_ast_bad_global():
"Make sure AST can't compile invalid global"
cant_compile("(global)")
cant_compile("(global foo bar)")
def test_ast_good_defclass():
"Make sure AST can compile valid defclass"
can_compile("(defclass a)")
can_compile("(defclass a [])")
def test_ast_bad_defclass():
"Make sure AST can't compile invalid defclass"
cant_compile("(defclass)")
cant_compile("(defclass a null)")
cant_compile("(defclass a null null)")
def test_ast_good_lambda():
"Make sure AST can compile valid lambda"
can_compile("(lambda [])")
can_compile("(lambda [] 1)")
def test_ast_bad_lambda():
"Make sure AST can't compile invalid lambda"
cant_compile("(lambda)")
def test_ast_good_yield():
"Make sure AST can compile valid yield"
can_compile("(yield 1)")
def test_ast_bad_yield():
"Make sure AST can't compile invalid yield"
cant_compile("(yield 1 2)")
def test_ast_good_import_from():
"Make sure AST can compile valid selective import"
can_compile("(import [x [y]])")
def test_ast_good_get():
"Make sure AST can compile valid get"
can_compile("(get x y)")
def test_ast_bad_get():
"Make sure AST can't compile invalid get"
cant_compile("(get)")
cant_compile("(get 1)")
def test_ast_good_slice():
"Make sure AST can compile valid slice"
can_compile("(slice x)")
can_compile("(slice x y)")
can_compile("(slice x y z)")
can_compile("(slice x y z t)")
def test_ast_bad_slice():
"Make sure AST can't compile invalid slice"
cant_compile("(slice)")
cant_compile("(slice 1 2 3 4 5)")
def test_ast_good_take():
"Make sure AST can compile valid 'take'"
can_compile("(take 1 [2 3])")
def test_ast_good_drop():
"Make sure AST can compile valid 'drop'"
can_compile("(drop 1 [2 3])")
def test_ast_good_assoc():
"Make sure AST can compile valid assoc"
can_compile("(assoc x y z)")
def test_ast_bad_assoc():
"Make sure AST can't compile invalid assoc"
cant_compile("(assoc)")
cant_compile("(assoc 1)")
cant_compile("(assoc 1 2)")
cant_compile("(assoc 1 2 3 4)")
def test_ast_bad_with():
"Make sure AST can't compile invalid with"
cant_compile("(with*)")
cant_compile("(with* [])")
cant_compile("(with* [] (pass))")
def test_ast_valid_while():
"Make sure AST can't compile invalid while"
can_compile("(while foo bar)")
def test_ast_valid_for():
"Make sure AST can compile valid for"
can_compile("(for [a 2] (print a))")
def test_ast_invalid_for():
"Make sure AST can't compile invalid for"
cant_compile("(for* [a 1] (else 1 2))")
def test_ast_valid_let():
"Make sure AST can compile valid let"
can_compile("(let [])")
can_compile("(let [a b])")
can_compile("(let [[a 1]])")
can_compile("(let [[a 1] b])")
def test_ast_invalid_let():
"Make sure AST can't compile invalid let"
cant_compile("(let 1)")
cant_compile("(let [1])")
cant_compile("(let [[a 1 2]])")
cant_compile("(let [[]])")
cant_compile("(let [[a]])")
cant_compile("(let [[1]])")
def test_ast_expression_basics():
""" Ensure basic AST expression conversion works. """
code = can_compile("(foo bar)").body[0]
tree = ast.Expr(value=ast.Call(
func=ast.Name(
id="foo",
ctx=ast.Load(),
),
args=[
ast.Name(id="bar", ctx=ast.Load())
],
keywords=[],
starargs=None,
kwargs=None,
))
_ast_spotcheck("value.func.id", code, tree)
def test_ast_anon_fns_basics():
""" Ensure anon fns work. """
code = can_compile("(fn (x) (* x x))").body[0]
assert type(code) == ast.FunctionDef
code = can_compile("(fn (x))").body[0]
cant_compile("(fn)")
def test_ast_non_decoratable():
""" Ensure decorating garbage breaks """
cant_compile("(with-decorator (foo) (* x x))")
def test_ast_lambda_lists():
"""Ensure the compiler chokes on invalid lambda-lists"""
cant_compile('(fn [&key {"a" b} &key {"foo" bar}] [a foo])')
cant_compile('(fn [&optional a &key {"foo" bar}] [a foo])')
cant_compile('(fn [&optional [a b c]] a)')
def test_ast_print():
code = can_compile("(print \"foo\")").body[0]
assert type(code.value) == ast.Call
def test_ast_tuple():
""" Ensure tuples work. """
code = can_compile("(, 1 2 3)").body[0].value
assert type(code) == ast.Tuple
def test_lambda_list_keywords_rest():
""" Ensure we can compile functions with lambda list keywords."""
can_compile("(fn (x &rest xs) (print xs))")
cant_compile("(fn (x &rest xs &rest ys) (print xs))")
def test_lambda_list_keywords_key():
""" Ensure we can compile functions with &key."""
can_compile("(fn (x &key {foo True}) (list x foo))")
cant_compile("(fn (x &key {bar \"baz\"} &key {foo 42}) (list x bar foo))")
def test_lambda_list_keywords_kwargs():
""" Ensure we can compile functions with &kwargs."""
can_compile("(fn (x &kwargs kw) (list x kw))")
cant_compile("(fn (x &kwargs xs &kwargs ys) (list x xs ys))")
def test_lambda_list_keywords_mixed():
""" Ensure we can mix them up."""
can_compile("(fn (x &rest xs &kwargs kw) (list x xs kw))")
cant_compile("(fn (x &rest xs &fasfkey {bar \"baz\"}))")
def test_ast_unicode_strings():
"""Ensure we handle unicode strings correctly"""
def _compile_string(s):
hy_s = HyString(s)
hy_s.start_line = hy_s.end_line = 0
hy_s.start_column = hy_s.end_column = 0
code = hy_compile([hy_s], "__main__")
# code == ast.Module(body=[ast.Expr(value=ast.Str(s=xxx))])
return code.body[0].value.s
assert _compile_string("test") == "test"
assert _compile_string("\u03b1\u03b2") == "\u03b1\u03b2"
assert _compile_string("\xc3\xa9") == "\xc3\xa9"
def test_compile_error():
"""Ensure we get compile error in tricky cases"""
try:
can_compile("(fn [] (= 1))")
except HyTypeError as e:
assert(e.message == "`=' needs at least 2 arguments, got 1.")
else:
assert(False)
def test_for_compile_error():
"""Ensure we get compile error in tricky 'for' cases"""
try:
can_compile("(fn [] (for)")
except LexException as e:
assert(e.message == "Premature end of input")
else:
assert(False)
try:
can_compile("(fn [] (for)))")
except LexException as e:
assert(e.message == "Ran into a RPAREN where it wasn't expected.")
else:
assert(False)
try:
can_compile("(fn [] (for [x]))")
except HyTypeError as e:
assert(e.message == "`for' requires an even number of args.")
else:
assert(False)
try:
can_compile("(fn [] (for [x xx]))")
except HyTypeError as e:
assert(e.message == "`for' requires a body to evaluate")
else:
assert(False)
def test_attribute_access():
"""Ensure attribute access compiles correctly"""
can_compile("(. foo bar baz)")
can_compile("(. foo [bar] baz)")
can_compile("(. foo bar [baz] [0] quux [frob])")
can_compile("(. foo bar [(+ 1 2 3 4)] quux [frob])")
cant_compile("(. foo bar :baz [0] quux [frob])")
cant_compile("(. foo bar baz (0) quux [frob])")
cant_compile("(. foo bar baz [0] quux {frob})")
def test_cons_correct():
"""Ensure cons gets compiled correctly"""
can_compile("(cons a b)")
| mit | -5,161,326,467,001,764,000 | -5,378,700,368,596,162,000 | 27.359841 | 78 | 0.615212 | false |
burjorjee/evolve-parities | evolveparities.py | 1 | 5098 | from contextlib import closing
from matplotlib.pyplot import plot, figure, hold, axis, ylabel, xlabel, savefig, title
from numpy import sort, logical_xor, transpose, logical_not
from numpy.numarray.functions import cumsum, zeros
from numpy.random import rand, shuffle
from numpy import mod, floor
import time
import cloud
from durus.file_storage import FileStorage
from durus.connection import Connection
def bitFreqVisualizer(effectiveAttrIndices, bitFreqs, gen):
f = figure(1)
n = len(bitFreqs)
hold(False)
plot(range(n), bitFreqs,'b.', markersize=10)
hold(True)
plot(effectiveAttrIndices, bitFreqs[effectiveAttrIndices],'r.', markersize=10)
axis([0, n-1, 0, 1])
title("Generation = %s" % (gen,))
ylabel('Frequency of the Bit 1')
xlabel('Locus')
f.canvas.draw()
f.show()
def showExperimentTimeStamps():
with closing(FileStorage("results.durus")) as durus:
conn = Connection(durus)
return conn.get_root().keys()
def neap_uga(m, n, gens, probMutation, effectiveAttrIndices, probMisclassification, bitFreqVisualizer=None):
""" neap = "noisy effective attribute parity"
"""
pop = rand(m,n)<0.5
bitFreqHist= zeros((n,gens+1))
for t in range(gens+1):
print "Generation %s" % t
bitFreqs = pop.astype('float').sum(axis=0)/m
bitFreqHist[:,t] = transpose(bitFreqs)
if bitFreqVisualizer:
bitFreqVisualizer(bitFreqs,t)
fitnessVals = mod(pop[:, effectiveAttrIndices].astype('byte').sum(axis=1) +
(rand(m) < probMisclassification).astype('byte'),2)
totalFitness = sum (fitnessVals)
cumNormFitnessVals = cumsum(fitnessVals).astype('float')/totalFitness
parentIndices = zeros(2*m, dtype='int16')
markers = sort(rand(2*m))
ctr = 0
for idx in xrange(2*m):
while markers[idx]>cumNormFitnessVals[ctr]:
ctr += 1
parentIndices[idx] = ctr
shuffle(parentIndices)
crossoverMasks = rand(m, n) < 0.5
newPop = zeros((m, n), dtype='bool')
newPop[crossoverMasks] = pop[parentIndices[:m], :][crossoverMasks]
newPop[logical_not(crossoverMasks)] = pop[parentIndices[m:], :][logical_not(crossoverMasks)]
mutationMasks = rand(m, n)<probMutation
pop = logical_xor(newPop,mutationMasks)
return bitFreqHist[0, :], bitFreqHist[-1, :]
def f(gens):
k = 7
n= k + 1
effectiveAttrIndices = range(k)
probMutation = 0.004
probMisclassification = 0.20
popSize = 1500
jid = cloud.call(neap_uga, **dict(m=popSize,
n=n,
gens=gens,
probMutation=probMutation,
effectiveAttrIndices=effectiveAttrIndices,
probMisclassification=probMisclassification))
print "Kicked off trial %s" % jid
return jid
def cloud_result(jid):
result = cloud.result(jid)
print "Retrieved results for trial %s" % jid
return result
def run_trials():
numTrials = 3000
gens = 1000
from multiprocessing.pool import ThreadPool as Pool
pool = Pool(50)
jids = pool.map(f,[gens]*numTrials)
print "Done spawning trials. Retrieving results..."
results = pool.map(cloud_result, jids)
firstLocusFreqsHists = zeros((numTrials,gens+1), dtype='float')
lastLocusFreqsHists = zeros((numTrials,gens+1), dtype='float')
print "Done retrieving results. Press Enter to serialize..."
raw_input()
for i, result in enumerate(results):
firstLocusFreqsHists[i, :], lastLocusFreqsHists[i, :] = result
with closing(FileStorage("results.durus")) as durus:
conn = Connection(durus)
conn.get_root()[str(int(floor(time.time())))] = (firstLocusFreqsHists, lastLocusFreqsHists)
conn.commit()
pool.close()
pool.join()
def render_results(timestamp=None):
with closing(FileStorage("results.durus")) as durus:
conn = Connection(durus)
db = conn.get_root()
if not timestamp:
timestamp = sorted(db.keys())[-1]
firstLocusFreqsHists, lastLocusFreqsHists = db[timestamp]
print "Done deserializing results. Plotting..."
x = [(2, 'First', firstLocusFreqsHists, "effective"),
(3, 'Last', lastLocusFreqsHists, "non-effective")]
for i, pos, freqsHists, filename in x :
freqsHists = freqsHists[:,:801]
f = figure(i)
hold(False)
plot(transpose(freqsHists), color='grey')
hold(True)
maxGens = freqsHists.shape[1]-1
plot([0, maxGens], [.05,.05], 'k--')
plot([0, maxGens], [.95,.95], 'k--')
axis([0, maxGens, 0, 1])
xlabel('Generation')
ylabel('1-Frequency of the '+pos+' Locus')
f.canvas.draw()
f.show()
savefig(filename+'.png', format='png', dpi=200)
if __name__ == "__main__":
cloud.start_simulator()
run_trials()
render_results()
print "Done plotting results. Press Enter to end..."
raw_input()
| gpl-3.0 | 4,005,576,279,145,840,600 | -8,147,286,316,681,068,000 | 32.539474 | 108 | 0.620832 | false |
mtp1376/youtube-dl | youtube_dl/extractor/imgur.py | 9 | 3559 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
js_to_json,
mimetype2ext,
ExtractorError,
)
class ImgurIE(InfoExtractor):
_VALID_URL = r'https?://(?:i\.)?imgur\.com/(?P<id>[a-zA-Z0-9]+)(?:\.mp4|\.gifv)?'
_TESTS = [{
'url': 'https://i.imgur.com/A61SaA1.gifv',
'info_dict': {
'id': 'A61SaA1',
'ext': 'mp4',
'title': 're:Imgur GIF$|MRW gifv is up and running without any bugs$',
'description': 're:The origin of the Internet\'s most viral images$|The Internet\'s visual storytelling community\. Explore, share, and discuss the best visual stories the Internet has to offer\.$',
},
}, {
'url': 'https://imgur.com/A61SaA1',
'info_dict': {
'id': 'A61SaA1',
'ext': 'mp4',
'title': 're:Imgur GIF$|MRW gifv is up and running without any bugs$',
'description': 're:The origin of the Internet\'s most viral images$|The Internet\'s visual storytelling community\. Explore, share, and discuss the best visual stories the Internet has to offer\.$',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
width = int_or_none(self._search_regex(
r'<param name="width" value="([0-9]+)"',
webpage, 'width', fatal=False))
height = int_or_none(self._search_regex(
r'<param name="height" value="([0-9]+)"',
webpage, 'height', fatal=False))
video_elements = self._search_regex(
r'(?s)<div class="video-elements">(.*?)</div>',
webpage, 'video elements', default=None)
if not video_elements:
raise ExtractorError(
'No sources found for video %s. Maybe an image?' % video_id,
expected=True)
formats = []
for m in re.finditer(r'<source\s+src="(?P<src>[^"]+)"\s+type="(?P<type>[^"]+)"', video_elements):
formats.append({
'format_id': m.group('type').partition('/')[2],
'url': self._proto_relative_url(m.group('src')),
'ext': mimetype2ext(m.group('type')),
'acodec': 'none',
'width': width,
'height': height,
'http_headers': {
'User-Agent': 'youtube-dl (like wget)',
},
})
gif_json = self._search_regex(
r'(?s)var\s+videoItem\s*=\s*(\{.*?\})',
webpage, 'GIF code', fatal=False)
if gif_json:
gifd = self._parse_json(
gif_json, video_id, transform_source=js_to_json)
formats.append({
'format_id': 'gif',
'preference': -10,
'width': width,
'height': height,
'ext': 'gif',
'acodec': 'none',
'vcodec': 'gif',
'container': 'gif',
'url': self._proto_relative_url(gifd['gifUrl']),
'filesize': gifd.get('size'),
'http_headers': {
'User-Agent': 'youtube-dl (like wget)',
},
})
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'description': self._og_search_description(webpage),
'title': self._og_search_title(webpage),
}
| unlicense | 164,662,886,503,146,700 | 5,667,400,699,104,885,000 | 35.690722 | 210 | 0.494521 | false |
davipeterlini/routeflow_tcc | pox/tests/unit/openflow/switch_impl_test.py | 23 | 6728 | #!/usr/bin/env python
import unittest
import sys
import os.path
from copy import copy
sys.path.append(os.path.dirname(__file__) + "/../../..")
from pox.openflow.libopenflow_01 import *
from pox.datapaths.switch import *
class MockConnection(object):
def __init__(self):
self.received = []
@property
def last(self):
return self.received[-1]
def set_message_handler(self, handler):
self.on_message_received = handler
def to_switch(self, msg):
self.on_message_received(self, msg)
# from switch
def send(self, msg):
self.received.append(msg)
class SwitchImplTest(unittest.TestCase):
def setUp(self):
self.conn = MockConnection()
self.switch = SoftwareSwitch(1, name="sw1")
self.switch.set_connection(self.conn)
self.packet = ethernet(src=EthAddr("00:00:00:00:00:01"), dst=EthAddr("00:00:00:00:00:02"),
payload=ipv4(srcip=IPAddr("1.2.3.4"), dstip=IPAddr("1.2.3.5"),
payload=udp(srcport=1234, dstport=53, payload="haha")))
def test_hello(self):
c = self.conn
c.to_switch(ofp_hello(xid=123))
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_hello),
"should have received hello but got %s" % c.last)
def test_echo_request(self):
c = self.conn
c.to_switch(ofp_echo_request(xid=123))
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_echo_reply) and c.last.xid == 123,
"should have received echo reply but got %s" % c.last)
def test_barrier(self):
c = self.conn
c.to_switch(ofp_barrier_request(xid=123))
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_barrier_reply) and c.last.xid == 123,
"should have received echo reply but got %s" % c.last)
def test_flow_mod(self):
c = self.conn
s = self.switch
c.to_switch(ofp_flow_mod(xid=124, priority=1, match=ofp_match(in_port=1, nw_src="1.2.3.4")))
self.assertEqual(len(c.received), 0)
self.assertEqual(len(s.table), 1)
e = s.table.entries[0]
self.assertEqual(e.priority,1)
self.assertEqual(e.match, ofp_match(in_port=1, nw_src="1.2.3.4"))
def test_packet_out(self):
c = self.conn
s = self.switch
received = []
s.addListener(DpPacketOut, lambda(event): received.append(event))
packet = self.packet
c.to_switch(ofp_packet_out(data=packet, actions=[ofp_action_output(port=2)]))
self.assertEqual(len(c.received), 0)
self.assertEqual(len(received), 1)
event = received[0]
self.assertEqual(event.port.port_no,2)
self.assertEqual(event.packet.pack(), packet.pack())
def test_send_packet_in(self):
c = self.conn
s = self.switch
s.send_packet_in(in_port=1, buffer_id=123, packet=self.packet, reason=OFPR_NO_MATCH)
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_packet_in) and c.last.xid == 0,
"should have received packet_in but got %s" % c.last)
self.assertEqual(c.last.in_port,1)
self.assertEqual(c.last.buffer_id,123)
self.assertEqual(c.last.data, self.packet.pack())
def test_rx_packet(self):
c = self.conn
s = self.switch
received = []
s.addListener(DpPacketOut, lambda(event): received.append(event))
# no flow entries -> should result in a packet_in
s.rx_packet(self.packet, in_port=1)
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_packet_in),
"should have received packet_in but got %s" % c.last)
self.assertTrue(c.last.buffer_id > 0)
# let's send a flow_mod with a buffer id
c.to_switch(ofp_flow_mod(xid=124, buffer_id=c.last.buffer_id, priority=1,
match=ofp_match(in_port=1, nw_src="1.2.3.4"),
actions = [ ofp_action_output(port=3) ]
))
# that should have send the packet out port 3
self.assertEqual(len(received), 1)
event = received[0]
self.assertEqual(event.port.port_no,3)
self.assertEqual(event.packet, self.packet)
# now the next packet should go through on the fast path
c.received = []
received = []
s.rx_packet(self.packet, in_port=1)
self.assertEqual(len(c.received), 0)
self.assertEqual(len(received), 1)
event = received[0]
self.assertEqual(event.port.port_no,3)
self.assertEqual(event.packet, self.packet)
def test_delete_port(self):
c = self.conn
s = self.switch
original_num_ports = len(self.switch.ports)
p = self.switch.ports.values()[0]
s.delete_port(p)
new_num_ports = len(self.switch.ports)
self.assertTrue(new_num_ports == original_num_ports - 1,
"Should have removed the port")
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_port_status),
"should have received port_status but got %s" % c.last)
self.assertTrue(c.last.reason == OFPPR_DELETE)
def test_add_port(self):
c = self.conn
s = self.switch
port_count = len(self.switch.ports)
old_port = s.delete_port(1)
self.assertTrue(port_count - 1 == len(self.switch.ports),
"Should have removed port")
self.assertFalse(old_port.port_no in self.switch.ports,
"Should have removedport")
s.add_port(old_port)
self.assertTrue(old_port.port_no in self.switch.ports,
"Should have added port")
self.assertEqual(len(c.received), 2)
self.assertTrue(isinstance(c.last, ofp_port_status),
"should have received port_status but got %s" % c.last)
self.assertTrue(c.last.reason == OFPPR_ADD)
def test_port_mod_failed(self):
c = self.conn
# test wrong port
msg = ofp_port_mod()
msg.port_no = 1234
c.to_switch(msg)
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_error))
self.assertTrue(c.last.type == OFPET_PORT_MOD_FAILED)
self.assertTrue(c.last.code == OFPPMFC_BAD_PORT)
# test wrong hw_addr
msg.port_no = 1
msg.hw_addr = EthAddr("11:22:33:44:55:66")
c.to_switch(msg)
self.assertEqual(len(c.received), 2)
self.assertTrue(isinstance(c.last, ofp_error))
self.assertTrue(c.last.type == OFPET_PORT_MOD_FAILED)
self.assertTrue(c.last.code == OFPPMFC_BAD_HW_ADDR)
def test_port_mod_link_down(self):
c = self.conn
s = self.switch
# test wrong port
msg = ofp_port_mod()
msg.port_no = 1
msg.hw_addr = s.ports[1].hw_addr
msg.mask = OFPPC_PORT_DOWN
msg.config = OFPPC_PORT_DOWN
c.to_switch(msg)
self.assertEqual(len(c.received), 1)
self.assertTrue(isinstance(c.last, ofp_port_status))
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -2,767,648,308,531,934,700 | -2,270,194,165,581,394,000 | 32.64 | 96 | 0.644322 | false |
whatsthehubbub/rippleeffect | nousernameregistration/models.py | 1 | 10449 | from django.conf import settings
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except:
pass
from django.db import models
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
import datetime
import hashlib
import random
import re
try:
from django.utils.timezone import now as datetime_now
except ImportError:
datetime_now = datetime.datetime.now
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, email, password,
site, send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
new_user = User.objects.create_user(email, password)
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if send_email:
registration_profile.send_activation_email(site)
return new_user
create_inactive_user = transaction.commit_on_success(create_inactive_user)
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
email = user.email
if isinstance(email, unicode):
email = email.encode('utf-8')
activation_key = hashlib.sha1(salt+email).hexdigest()
return self.create(user=user,
activation_key=activation_key)
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
try:
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
profile.delete()
except User.DoesNotExist:
profile.delete()
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True, verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime_now())
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = {'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site}
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
| mit | -4,985,723,383,216,108,000 | 7,298,162,585,476,859,000 | 37.557196 | 91 | 0.620825 | false |
patilsangram/erpnext | erpnext/templates/pages/help.py | 17 | 1260 | from __future__ import unicode_literals
import frappe, json
import requests
def get_context(context):
context.no_cache = 1
settings = frappe.get_doc("Support Settings", "Support Settings")
s = settings
# Get Started sections
sections = json.loads(s.get_started_sections)
context.get_started_sections = sections
# Forum posts
topics_data, post_params = get_forum_posts(s)
context.post_params = post_params
context.forum_url = s.forum_url
context.topics = topics_data[:3]
# Issues
if frappe.session.user != "Guest":
context.issues = frappe.get_list("Issue", fields=["name", "status", "subject", "modified"])[:3]
else:
context.issues = []
def get_forum_posts(s):
response = requests.get(s.forum_url + '/' + s.get_latest_query)
response.raise_for_status()
response_json = response.json()
topics_data = {} # it will actually be an array
key_list = s.response_key_list.split(',')
for key in key_list:
topics_data = response_json.get(key) if not topics_data else topics_data.get(key)
for topic in topics_data:
topic["link"] = s.forum_url + '/' + s.post_route_string + '/' + str(topic.get(s.post_route_key))
post_params = {
"title": s.post_title_key,
"description": s.post_description_key
}
return topics_data, post_params
| gpl-3.0 | 4,364,130,366,187,653,000 | -5,512,599,495,608,460,000 | 27.636364 | 98 | 0.696825 | false |
xaviercobain88/framework-python | openerp/addons/base/ir/workflow/__init__.py | 79 | 1093 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import workflow
import print_instance
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 6,262,538,103,228,595,000 | -6,304,236,611,046,444,000 | 42.72 | 79 | 0.614822 | false |
terkaa/linuxcnc | src/hal/user_comps/pyvcp.py | 32 | 3152 | #!/usr/bin/env python
# This is a component of emc
# Copyright 2007 Anders Wallin <[email protected]>
#
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" Python Virtual Control Panel for EMC
A virtual control panel (VCP) is used to display and control
HAL pins, which are either BIT or FLOAT valued.
Usage: pyvcp -g WxH+X+Y -c compname myfile.xml
compname is the name of the HAL component to be created.
The name of the HAL pins associated with the VCP will begin with 'compname.'
myfile.xml is an XML file which specifies the layout of the VCP.
Valid XML tags are described in the documentation for pyvcp_widgets.py
-g option allows setting of the inital size and/or position of the panel
"""
import sys, os
BASE = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), ".."))
sys.path.insert(0, os.path.join(BASE, "lib", "python"))
import vcpparse
import hal
from Tkinter import Tk
import getopt
def usage():
""" prints the usage message """
print "Usage: pyvcp [-g WIDTHxHEIGHT+XOFFSET+YOFFSET][-c hal_component_name] myfile.xml"
print "If the component name is not specified, the basename of the xml file is used."
print "-g options are in pixel units, XOFFSET/YOFFSET is referenced from top left of screen"
print "use -g WIDTHxHEIGHT for just setting size or -g +XOFFSET+YOFFSET for just position"
def main():
""" creates a HAL component.
calls vcpparse with the specified XML file.
"""
try:
opts, args = getopt.getopt(sys.argv[1:], "c:g:")
except getopt.GetoptError, detail:
print detail
usage()
sys.exit(1)
window_geometry = None
component_name = None
for o, a in opts:
if o == "-c":
component_name = a
if o == "-g":
window_geometry = a
try:
filename=args[0]
except:
usage()
sys.exit(1)
if component_name is None:
component_name = os.path.splitext(os.path.basename(filename))[0]
pyvcp0 = Tk()
pyvcp0.title(component_name)
if window_geometry:
pyvcp0.geometry(window_geometry)
vcpparse.filename=filename
pycomp=vcpparse.create_vcp(compname=component_name, master=pyvcp0)
pycomp.ready()
try:
try:
pyvcp0.mainloop()
except KeyboardInterrupt:
sys.exit(0)
finally:
pycomp.exit()
if __name__ == '__main__':
main()
| gpl-2.0 | 1,147,952,708,821,585,900 | -2,651,486,285,362,646,000 | 30.838384 | 96 | 0.662437 | false |
who-emro/meerkat_frontend | meerkat_frontend/views/messaging.py | 1 | 15049 | """
messaging.py
A Flask Blueprint module for Meerkat messaging services.
"""
from flask.ext.babel import gettext
from flask import Blueprint, render_template
from flask import redirect, flash, request, current_app, g, jsonify
import random
from meerkat_frontend import app, auth
import meerkat_libs as libs
from .. import common as c
messaging = Blueprint('messaging', __name__)
@messaging.route('/')
@messaging.route('/loc_<int:locID>')
@auth.authorise(*app.config['AUTH'].get('messaging', [['BROKEN'], ['']]))
def subscribe(locID=None):
"""
Subscription Process Stage 1: Render the page with the subscription form.
Args:
locID (int): The location ID of a location to be automatically
loaded into the location selector.
"""
# Initialise locID to allowed location
# Can't be done during function declaration because outside app context
locID = g.allowed_location if not locID else locID
return render_template('messaging/subscribe.html',
content=g.config['MESSAGING_CONFIG'],
loc=locID,
week=c.api('/epi_week'))
@messaging.route('/subscribe/subscribed', methods=['POST'])
@auth.authorise(*app.config['AUTH'].get('messaging', [['BROKEN'], ['']]))
def subscribed():
"""
Subscription Process Stage 2: Confirms successful subscription request
and informs the user of the verification process. This method assembles
the HTML form data into a structure Meerkat Hermes understands and then
uses the Meerkat Hermes "subscribe" resource to create the subscriber. It
further assembles the email and SMS verification messages and uses the
Meerkat Hermes to send it out.
"""
# Convert form immutabledict to dict.
data = {}
for key in request.form.keys():
key_list = request.form.getlist(key)
if(len(key_list) > 1):
data[key] = key_list
else:
data[key] = key_list[0]
# Call hermes subscribe method.
subscribe_response = libs.hermes('/subscribe', 'PUT', data)
# Assemble and send verification email.
url = request.url_root + \
g.get("language") + "/messaging/subscribe/verify/" + \
subscribe_response['subscriber_id']
verify_text = gettext(g.config['MESSAGING_CONFIG']['messages'].get(
'verify_text',
"Dear {first_name} {last_name} ,\n\n" +
"Your subscription to receive public health surveillance "
"notifications from {country} has been created or updated. An "
"administrator of the system may have done this on your behalf. "
"\n\nIn order to receive future notifications, please "
"verify your contact details by copying and pasting the following url "
"into your address bar: {url}\n"
)).format(
first_name=data["first_name"],
last_name=data["last_name"],
country=current_app.config['MESSAGING_CONFIG']['messages']['country'],
url=url
)
verify_html = gettext(g.config['MESSAGING_CONFIG']['messages'].get(
'verify_html',
"<p>Dear {first_name} {last_name},</p>"
"<p>Your subscription to receive public health surveillance "
"notifications from {country} has been created or updated. "
"An administrator of the system may have done this on your "
"behalf.</p><p> To receive future notifications, please verify "
"your contact details by <a href='{url}' target='_blank'>"
"clicking here</a>.</p>"
)).format(
first_name=data["first_name"],
last_name=data["last_name"],
country=current_app.config['MESSAGING_CONFIG']['messages']['country'],
url=url
)
libs.hermes('/email', 'PUT', {
'email': data['email'],
'subject': gettext('Please verify your contact details'),
'message': verify_text,
'html': verify_html,
'from': current_app.config['MESSAGING_CONFIG']['messages']['from']
})
# Set and send sms verification code.
if 'sms' in data:
__set_code(subscribe_response['subscriber_id'], data['sms'])
# Delete the old account if it exists. Inform the user of success.
if data.get('id', None):
response = libs.hermes('/subscribe/' + data['id'], 'DELETE')
if hasattr(response, 'status_code') and response.status_code != 200:
flash(gettext(
'Account update failed: invalid ID. '
'Creating new subscription instead.'
))
else:
flash(
gettext('Subscription updated for ') + data['first_name'] +
" " + data['last_name'] + "."
)
return render_template('messaging/subscribed.html',
content=g.config['MESSAGING_CONFIG'],
week=c.api('/epi_week'),
data=data)
@messaging.route('/subscribe/verify/<string:subscriber_id>')
def verify(subscriber_id):
"""
Subscription Process Stage 3: Verfies contact details for the subscriber ID
specified in the URL. If no SMS number is provided, then just landing on
this page is enough to verify the users email address (assuming the ID is
not guessable). In this case we do a redirect to Stage 4. If the user has
already been verified, then we also redirect to stage four with a flash
message to remind them that they have already verified. In all other cases
we show the SMS verification form.
Args:
subscriber_id (str): The UUID that is assigned to the subscriber upon
creation by Meerkat Hermes.
"""
# Get the subscriber
subscriber = libs.hermes('/subscribe/' + subscriber_id, 'GET')
if subscriber['Item']['verified'] is True:
flash(gettext('You have already verified your account.'))
return redirect(
"/" + g.get("language") +
'/messaging/subscribe/verified/' + subscriber_id,
code=302
)
elif 'sms' not in subscriber['Item']:
current_app.logger.warning(str(subscriber['Item']))
libs.hermes('/verify/' + subscriber_id, 'GET')
return redirect(
"/" + g.get("language") +
'/messaging/subscribe/verified/' + subscriber_id
)
else:
return render_template('messaging/verify.html',
content=g.config['MESSAGING_CONFIG'],
week=c.api('/epi_week'),
data=subscriber['Item'])
@messaging.route('/subscribe/verified/<string:subscriber_id>')
def verified(subscriber_id):
"""
Subscription Process Stage 4: Confirms that the users details has been
verified, and sends out a confirmation email as well.
Args:
subscriber_id (str): The UUID that is assigned to the subscriber
upon creation by Meerkat Hermes.
"""
# Get the subscriber
subscriber = libs.hermes('/subscribe/' + subscriber_id, 'GET')['Item']
# If the subscriber isn't verified redirect to the verify stage.
if not subscriber['verified']:
return redirect(
'/' + g.get("language") +
'/messaging/subscribe/verify/' + subscriber_id,
code=302
)
country = current_app.config['MESSAGING_CONFIG']['messages']['country']
# Send a confirmation e-mail with the unsubscribe link.
confirmation_text = gettext(g.config['MESSAGING_CONFIG']['messages'].get(
'confirmation_text',
"Dear {first_name} {last_name},\n\n"
"Thank you for subscribing to receive public health surveillance "
"notifications from {country}. We can confirm that your contact "
"details have been successfully verified.\n\nYou can unsubscribe at "
"any time by clicking on the relevant link in your e-mails.\n\n If "
"you wish to unsubscribe now copy and paste the following url into "
"your address bar:\n{url}/unsubscribe/{subscriber_id}"
)).format(
first_name=subscriber["first_name"],
last_name=subscriber["last_name"],
country=country,
url=current_app.config["HERMES_ROOT"],
subscriber_id=subscriber_id
)
confirmation_html = gettext(g.config['MESSAGING_CONFIG']['messages'].get(
'confirmation_html',
"<p>Dear {first_name} {last_name},</p>"
"<p>Thank you for subscribing to receive public health surveillance "
"notifications from {country}. We can confirm that your contact "
"details have been successfully verified.</p><p>You can unsubscribe "
"at any time by clicking on the relevant link in your e-mails.</p><p> "
"If you wish to unsubscribe now "
"<a href='{url}/unsubscribe/{subscriber_id}'>click here.</a></p>"
)).format(
first_name=subscriber["first_name"],
last_name=subscriber["last_name"],
country=country,
url=current_app.config["HERMES_ROOT"],
subscriber_id=subscriber_id
)
email = {
'email': subscriber['email'],
'subject': gettext("Your subscription has been successful"),
'message': confirmation_text,
'html': confirmation_html,
'from': current_app.config['MESSAGING_CONFIG']['messages']['from']
}
email_response = libs.hermes('/email', 'PUT', email)
current_app.logger.warning('Response is: ' + str(email_response))
return render_template('messaging/verified.html',
content=g.config['MESSAGING_CONFIG'],
week=c.api('/epi_week'))
@messaging.route('/subscribe/sms_code/<string:subscriber_id>',
methods=['get', 'post'])
def sms_code(subscriber_id):
"""
Chooses, sets and checks SMS verification codes for the subscriber
corresponding to the ID given in the URL. If a POST request is made to this
URL it checks whether the code supplied in the POST request form data
matches the code sent to the phone. If it does, it rediects to Stage 4, if
it doesn't it redirects to stage 3 again with a flash informing the user
they got the wrong code. If a GET request is made to this URL, the function
selects a new code and sends the code out to the phone. It then redirects
to Stage 3 with a flash message informing the user whether the new code has
been succesffully sent.
Args:
subscriber_id (str): The UUID that is assigned to the subscriber upon
creation by Meerkat Hermes.
"""
# If a POST request is made we check the given verification code.
if request.method == 'POST':
if __check_code(subscriber_id, request.form['code']):
libs.hermes('/verify/' + subscriber_id, 'GET')
return redirect(
"/" + g.get("language") +
"/messaging/subscribe/verified/" + subscriber_id,
code=302
)
else:
flash('You submitted the wrong code.', 'error')
return redirect(
"/" + g.get("language") +
"/messaging/subscribe/verify/" + subscriber_id,
code=302
)
# If a GET request is made we send a new code.
else:
subscriber = libs.hermes('/subscribe/' + subscriber_id, 'GET')
response = __set_code(subscriber_id, subscriber['Item']['sms'])
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
flash(gettext('A new code has been sent to your phone.'))
return redirect(
"/" + g.get("language") +
"/messaging/subscribe/verify/" + subscriber_id,
code=302
)
else:
current_app.logger.error(
"Request to send SMS failed. Response:\n{}".format(response)
)
flash(
gettext('Error: Try again later, or contact administrator.'),
'error'
)
return redirect(
"/" + g.get("language") +
"/messaging/subscribe/verify/" + subscriber_id,
code=302
)
@messaging.route('/get_subscribers')
@auth.authorise(*app.config['AUTH'].get('admin', [['BROKEN'], ['']]))
def get_subscribers():
"""
Function that securely uses the server's access to hermes api to extract
subscriber data from hermes. If the request went straight from the browsers
console to hermes, we would have to give the user direct access to hermes.
This is not safe.
"""
country = current_app.config['MESSAGING_CONFIG']['messages']['country']
subscribers = libs.hermes('/subscribers/'+country, 'GET')
return jsonify({'rows': subscribers})
@messaging.route('/delete_subscribers', methods=['POST'])
@auth.authorise(*app.config['AUTH'].get('admin', [['BROKEN'], ['']]))
def delete_subscribers():
"""
Delete the subscribers specified in the post arguments.
"""
# Load the list of subscribers to be deleted.
subscribers = request.get_json()
# Try to delete each subscriber, flag up if there is an error
error = False
for subscriber_id in subscribers:
response = libs.hermes('/subscribe/' + subscriber_id, 'DELETE')
if response['status'] != 'successful':
error = True
if error:
return "ERROR: There was an error deleting some users."
else:
return "Users successfully deleted."
def __check_code(subscriber_id, code):
"""
Checks if the given code for the given subscriber ID is the correct SMS
verification code.
Args:
subscriber_id (str): The UUID that is assigned to the subscriber upon
creation by Meerkat Hermes.
code (str): The code to be checked.
Returns:
bool: True if there is a match, False otherwise.
"""
response = libs.hermes('/verify', 'POST',
{'subscriber_id': subscriber_id, 'code': code})
current_app.logger.warning(str(response))
return bool(response['matched'])
def __set_code(subscriber_id, sms):
"""
Sets a new sms verification code for the given subscriber ID.
Args:
subscriber_id (str): The UUID that is assigned to the subscriber
upon creation by Meerkat Hermes.
sms (int): The SMS number to which the new code should be sent.
Returns:
The Meerkat Hermes response object.
"""
code = round(random.random()*9999)
message = gettext(
'Your verification code for {country} public health '
'surveillance notifications is: {code}. For further information '
'please see your email.'
).format(
country=current_app.config['MESSAGING_CONFIG']['messages']['country'],
code=code
)
data = {'sms': sms, 'message': message}
response = libs.hermes('/verify', 'PUT',
{'subscriber_id': subscriber_id, 'code': code})
response = libs.hermes('/sms', 'PUT', data)
return response
| mit | 2,890,613,995,338,011,600 | -6,752,055,152,130,345,000 | 37.002525 | 79 | 0.612001 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/funding_v30.py | 1 | 16706 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.amount_v30 import AmountV30 # noqa: F401,E501
from orcid_api_v3.models.created_date_v30 import CreatedDateV30 # noqa: F401,E501
from orcid_api_v3.models.external_i_ds_v30 import ExternalIDsV30 # noqa: F401,E501
from orcid_api_v3.models.funding_contributors_v30 import FundingContributorsV30 # noqa: F401,E501
from orcid_api_v3.models.funding_title_v30 import FundingTitleV30 # noqa: F401,E501
from orcid_api_v3.models.fuzzy_date_v30 import FuzzyDateV30 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30 import LastModifiedDateV30 # noqa: F401,E501
from orcid_api_v3.models.organization_defined_funding_sub_type_v30 import OrganizationDefinedFundingSubTypeV30 # noqa: F401,E501
from orcid_api_v3.models.organization_v30 import OrganizationV30 # noqa: F401,E501
from orcid_api_v3.models.source_v30 import SourceV30 # noqa: F401,E501
from orcid_api_v3.models.url_v30 import UrlV30 # noqa: F401,E501
class FundingV30(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'created_date': 'CreatedDateV30',
'last_modified_date': 'LastModifiedDateV30',
'source': 'SourceV30',
'put_code': 'int',
'path': 'str',
'type': 'str',
'organization_defined_type': 'OrganizationDefinedFundingSubTypeV30',
'title': 'FundingTitleV30',
'short_description': 'str',
'amount': 'AmountV30',
'url': 'UrlV30',
'start_date': 'FuzzyDateV30',
'end_date': 'FuzzyDateV30',
'external_ids': 'ExternalIDsV30',
'contributors': 'FundingContributorsV30',
'organization': 'OrganizationV30',
'visibility': 'str'
}
attribute_map = {
'created_date': 'created-date',
'last_modified_date': 'last-modified-date',
'source': 'source',
'put_code': 'put-code',
'path': 'path',
'type': 'type',
'organization_defined_type': 'organization-defined-type',
'title': 'title',
'short_description': 'short-description',
'amount': 'amount',
'url': 'url',
'start_date': 'start-date',
'end_date': 'end-date',
'external_ids': 'external-ids',
'contributors': 'contributors',
'organization': 'organization',
'visibility': 'visibility'
}
def __init__(self, created_date=None, last_modified_date=None, source=None, put_code=None, path=None, type=None, organization_defined_type=None, title=None, short_description=None, amount=None, url=None, start_date=None, end_date=None, external_ids=None, contributors=None, organization=None, visibility=None): # noqa: E501
"""FundingV30 - a model defined in Swagger""" # noqa: E501
self._created_date = None
self._last_modified_date = None
self._source = None
self._put_code = None
self._path = None
self._type = None
self._organization_defined_type = None
self._title = None
self._short_description = None
self._amount = None
self._url = None
self._start_date = None
self._end_date = None
self._external_ids = None
self._contributors = None
self._organization = None
self._visibility = None
self.discriminator = None
if created_date is not None:
self.created_date = created_date
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if source is not None:
self.source = source
if put_code is not None:
self.put_code = put_code
if path is not None:
self.path = path
if type is not None:
self.type = type
if organization_defined_type is not None:
self.organization_defined_type = organization_defined_type
if title is not None:
self.title = title
if short_description is not None:
self.short_description = short_description
if amount is not None:
self.amount = amount
if url is not None:
self.url = url
if start_date is not None:
self.start_date = start_date
if end_date is not None:
self.end_date = end_date
if external_ids is not None:
self.external_ids = external_ids
if contributors is not None:
self.contributors = contributors
if organization is not None:
self.organization = organization
if visibility is not None:
self.visibility = visibility
@property
def created_date(self):
"""Gets the created_date of this FundingV30. # noqa: E501
:return: The created_date of this FundingV30. # noqa: E501
:rtype: CreatedDateV30
"""
return self._created_date
@created_date.setter
def created_date(self, created_date):
"""Sets the created_date of this FundingV30.
:param created_date: The created_date of this FundingV30. # noqa: E501
:type: CreatedDateV30
"""
self._created_date = created_date
@property
def last_modified_date(self):
"""Gets the last_modified_date of this FundingV30. # noqa: E501
:return: The last_modified_date of this FundingV30. # noqa: E501
:rtype: LastModifiedDateV30
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this FundingV30.
:param last_modified_date: The last_modified_date of this FundingV30. # noqa: E501
:type: LastModifiedDateV30
"""
self._last_modified_date = last_modified_date
@property
def source(self):
"""Gets the source of this FundingV30. # noqa: E501
:return: The source of this FundingV30. # noqa: E501
:rtype: SourceV30
"""
return self._source
@source.setter
def source(self, source):
"""Sets the source of this FundingV30.
:param source: The source of this FundingV30. # noqa: E501
:type: SourceV30
"""
self._source = source
@property
def put_code(self):
"""Gets the put_code of this FundingV30. # noqa: E501
:return: The put_code of this FundingV30. # noqa: E501
:rtype: int
"""
return self._put_code
@put_code.setter
def put_code(self, put_code):
"""Sets the put_code of this FundingV30.
:param put_code: The put_code of this FundingV30. # noqa: E501
:type: int
"""
self._put_code = put_code
@property
def path(self):
"""Gets the path of this FundingV30. # noqa: E501
:return: The path of this FundingV30. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this FundingV30.
:param path: The path of this FundingV30. # noqa: E501
:type: str
"""
self._path = path
@property
def type(self):
"""Gets the type of this FundingV30. # noqa: E501
:return: The type of this FundingV30. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this FundingV30.
:param type: The type of this FundingV30. # noqa: E501
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = ["GRANT", "CONTRACT", "AWARD", "SALARY_AWARD", "grant", "contract", "award",
"salary-award"] # noqa: E501
if type not in allowed_values:
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values)
)
self._type = type
@property
def organization_defined_type(self):
"""Gets the organization_defined_type of this FundingV30. # noqa: E501
:return: The organization_defined_type of this FundingV30. # noqa: E501
:rtype: OrganizationDefinedFundingSubTypeV30
"""
return self._organization_defined_type
@organization_defined_type.setter
def organization_defined_type(self, organization_defined_type):
"""Sets the organization_defined_type of this FundingV30.
:param organization_defined_type: The organization_defined_type of this FundingV30. # noqa: E501
:type: OrganizationDefinedFundingSubTypeV30
"""
self._organization_defined_type = organization_defined_type
@property
def title(self):
"""Gets the title of this FundingV30. # noqa: E501
:return: The title of this FundingV30. # noqa: E501
:rtype: FundingTitleV30
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this FundingV30.
:param title: The title of this FundingV30. # noqa: E501
:type: FundingTitleV30
"""
if title is None:
raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501
self._title = title
@property
def short_description(self):
"""Gets the short_description of this FundingV30. # noqa: E501
:return: The short_description of this FundingV30. # noqa: E501
:rtype: str
"""
return self._short_description
@short_description.setter
def short_description(self, short_description):
"""Sets the short_description of this FundingV30.
:param short_description: The short_description of this FundingV30. # noqa: E501
:type: str
"""
self._short_description = short_description
@property
def amount(self):
"""Gets the amount of this FundingV30. # noqa: E501
:return: The amount of this FundingV30. # noqa: E501
:rtype: AmountV30
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this FundingV30.
:param amount: The amount of this FundingV30. # noqa: E501
:type: AmountV30
"""
self._amount = amount
@property
def url(self):
"""Gets the url of this FundingV30. # noqa: E501
:return: The url of this FundingV30. # noqa: E501
:rtype: UrlV30
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this FundingV30.
:param url: The url of this FundingV30. # noqa: E501
:type: UrlV30
"""
self._url = url
@property
def start_date(self):
"""Gets the start_date of this FundingV30. # noqa: E501
:return: The start_date of this FundingV30. # noqa: E501
:rtype: FuzzyDateV30
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this FundingV30.
:param start_date: The start_date of this FundingV30. # noqa: E501
:type: FuzzyDateV30
"""
self._start_date = start_date
@property
def end_date(self):
"""Gets the end_date of this FundingV30. # noqa: E501
:return: The end_date of this FundingV30. # noqa: E501
:rtype: FuzzyDateV30
"""
return self._end_date
@end_date.setter
def end_date(self, end_date):
"""Sets the end_date of this FundingV30.
:param end_date: The end_date of this FundingV30. # noqa: E501
:type: FuzzyDateV30
"""
self._end_date = end_date
@property
def external_ids(self):
"""Gets the external_ids of this FundingV30. # noqa: E501
:return: The external_ids of this FundingV30. # noqa: E501
:rtype: ExternalIDsV30
"""
return self._external_ids
@external_ids.setter
def external_ids(self, external_ids):
"""Sets the external_ids of this FundingV30.
:param external_ids: The external_ids of this FundingV30. # noqa: E501
:type: ExternalIDsV30
"""
self._external_ids = external_ids
@property
def contributors(self):
"""Gets the contributors of this FundingV30. # noqa: E501
:return: The contributors of this FundingV30. # noqa: E501
:rtype: FundingContributorsV30
"""
return self._contributors
@contributors.setter
def contributors(self, contributors):
"""Sets the contributors of this FundingV30.
:param contributors: The contributors of this FundingV30. # noqa: E501
:type: FundingContributorsV30
"""
self._contributors = contributors
@property
def organization(self):
"""Gets the organization of this FundingV30. # noqa: E501
:return: The organization of this FundingV30. # noqa: E501
:rtype: OrganizationV30
"""
return self._organization
@organization.setter
def organization(self, organization):
"""Sets the organization of this FundingV30.
:param organization: The organization of this FundingV30. # noqa: E501
:type: OrganizationV30
"""
if organization is None:
raise ValueError("Invalid value for `organization`, must not be `None`") # noqa: E501
self._organization = organization
@property
def visibility(self):
"""Gets the visibility of this FundingV30. # noqa: E501
:return: The visibility of this FundingV30. # noqa: E501
:rtype: str
"""
return self._visibility
@visibility.setter
def visibility(self, visibility):
"""Sets the visibility of this FundingV30.
:param visibility: The visibility of this FundingV30. # noqa: E501
:type: str
"""
allowed_values = ["LIMITED", "REGISTERED_ONLY", "PUBLIC", "PRIVATE", "public", "private",
"limited", "registered-only"] # noqa: E501
if visibility not in allowed_values:
raise ValueError(
"Invalid value for `visibility` ({0}), must be one of {1}" # noqa: E501
.format(visibility, allowed_values)
)
self._visibility = visibility
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(FundingV30, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FundingV30):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | 6,288,915,066,987,183,000 | -1,566,955,598,314,880,500 | 28.939068 | 328 | 0.590028 | false |
tensorflow/models | official/nlp/transformer/transformer_forward_test.py | 1 | 6052 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Forward pass test for Transformer model refactoring."""
import numpy as np
import tensorflow as tf
from official.nlp.modeling import models
from official.nlp.transformer import metrics
from official.nlp.transformer import model_params
from official.nlp.transformer import transformer
def _count_params(layer, trainable_only=True):
"""Returns the count of all model parameters, or just trainable ones."""
if not trainable_only:
return layer.count_params()
else:
return int(
np.sum([
tf.keras.backend.count_params(p) for p in layer.trainable_weights
]))
def _create_model(params, is_train):
"""Creates transformer model."""
encdec_kwargs = dict(
num_layers=params["num_hidden_layers"],
num_attention_heads=params["num_heads"],
intermediate_size=params["filter_size"],
activation="relu",
dropout_rate=params["relu_dropout"],
attention_dropout_rate=params["attention_dropout"],
use_bias=False,
norm_first=True,
norm_epsilon=1e-6,
intermediate_dropout=params["relu_dropout"])
encoder_layer = models.TransformerEncoder(**encdec_kwargs)
decoder_layer = models.TransformerDecoder(**encdec_kwargs)
model_kwargs = dict(
vocab_size=params["vocab_size"],
embedding_width=params["hidden_size"],
dropout_rate=params["layer_postprocess_dropout"],
padded_decode=params["padded_decode"],
decode_max_length=params["decode_max_length"],
dtype=params["dtype"],
extra_decode_length=params["extra_decode_length"],
beam_size=params["beam_size"],
alpha=params["alpha"],
encoder_layer=encoder_layer,
decoder_layer=decoder_layer,
name="transformer_v2")
if is_train:
inputs = tf.keras.layers.Input((None,), dtype="int64", name="inputs")
targets = tf.keras.layers.Input((None,), dtype="int64", name="targets")
internal_model = models.Seq2SeqTransformer(**model_kwargs)
logits = internal_model(
dict(inputs=inputs, targets=targets), training=is_train)
vocab_size = params["vocab_size"]
label_smoothing = params["label_smoothing"]
if params["enable_metrics_in_training"]:
logits = metrics.MetricLayer(vocab_size)([logits, targets])
logits = tf.keras.layers.Lambda(
lambda x: x, name="logits", dtype=tf.float32)(
logits)
model = tf.keras.Model([inputs, targets], logits)
loss = metrics.transformer_loss(logits, targets, label_smoothing,
vocab_size)
model.add_loss(loss)
return model
batch_size = params["decode_batch_size"] if params["padded_decode"] else None
inputs = tf.keras.layers.Input((None,),
batch_size=batch_size,
dtype="int64",
name="inputs")
internal_model = models.Seq2SeqTransformer(**model_kwargs)
ret = internal_model(dict(inputs=inputs), training=is_train)
outputs, scores = ret["outputs"], ret["scores"]
return tf.keras.Model(inputs, [outputs, scores])
class TransformerForwardTest(tf.test.TestCase):
def setUp(self):
super(TransformerForwardTest, self).setUp()
self.params = params = model_params.TINY_PARAMS
params["batch_size"] = params["default_batch_size"] = 16
params["hidden_size"] = 12
params["num_hidden_layers"] = 3
params["filter_size"] = 14
params["num_heads"] = 2
params["vocab_size"] = 41
params["extra_decode_length"] = 0
params["beam_size"] = 3
params["dtype"] = tf.float32
params["layer_postprocess_dropout"] = 0.0
params["attention_dropout"] = 0.0
params["relu_dropout"] = 0.0
def test_forward_pass_train(self):
# Set input_len different from target_len
inputs = np.asarray([[5, 2, 1], [7, 5, 0], [1, 4, 0], [7, 5, 11]])
targets = np.asarray([[4, 3, 4, 0], [13, 19, 17, 8], [20, 14, 1, 2],
[5, 7, 3, 0]])
# src_model is the original model before refactored.
src_model = transformer.create_model(self.params, True)
src_num_weights = _count_params(src_model)
src_weights = src_model.get_weights()
src_model_output = src_model([inputs, targets], training=True)
# dest_model is the refactored model.
dest_model = _create_model(self.params, True)
dest_num_weights = _count_params(dest_model)
self.assertEqual(src_num_weights, dest_num_weights)
dest_model.set_weights(src_weights)
dest_model_output = dest_model([inputs, targets], training=True)
self.assertAllEqual(src_model_output, dest_model_output)
def test_forward_pass_not_train(self):
inputs = np.asarray([[5, 2, 1], [7, 5, 0], [1, 4, 0], [7, 5, 11]])
# src_model is the original model before refactored.
src_model = transformer.create_model(self.params, False)
src_num_weights = _count_params(src_model)
src_weights = src_model.get_weights()
src_model_output = src_model([inputs], training=False)
# dest_model is the refactored model.
dest_model = _create_model(self.params, False)
dest_num_weights = _count_params(dest_model)
self.assertEqual(src_num_weights, dest_num_weights)
dest_model.set_weights(src_weights)
dest_model_output = dest_model([inputs], training=False)
self.assertAllEqual(src_model_output[0], dest_model_output[0])
self.assertAllEqual(src_model_output[1], dest_model_output[1])
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | -28,909,354,039,817,124 | 5,870,337,246,831,905,000 | 37.547771 | 79 | 0.666061 | false |
babycaseny/poedit | deps/boost/tools/build/test/direct_request_test.py | 44 | 1396 | #!/usr/bin/python
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
# First check some startup.
t.write("jamroot.jam", "")
t.write("jamfile.jam", """\
exe a : a.cpp b ;
lib b : b.cpp ;
""")
t.write("a.cpp", """\
void
# ifdef _WIN32
__declspec(dllimport)
# endif
foo();
int main() { foo(); }
""")
t.write("b.cpp", """\
#ifdef MACROS
void
# ifdef _WIN32
__declspec(dllexport)
# endif
foo() {}
#endif
# ifdef _WIN32
int __declspec(dllexport) force_implib_creation;
# endif
""")
t.run_build_system(["define=MACROS"])
t.expect_addition("bin/$toolset/debug/"
* (BoostBuild.List("a.obj b.obj b.dll a.exe")))
# When building a debug version, the 'define' still applies.
t.rm("bin")
t.run_build_system(["debug", "define=MACROS"])
t.expect_addition("bin/$toolset/debug/"
* (BoostBuild.List("a.obj b.obj b.dll a.exe")))
# When building a release version, the 'define' still applies.
t.write("jamfile.jam", """\
exe a : a.cpp b : <variant>debug ;
lib b : b.cpp ;
""")
t.rm("bin")
t.run_build_system(["release", "define=MACROS"])
# Regression test: direct build request was not working when there was more
# than one level of 'build-project'.
t.rm(".")
t.write("jamroot.jam", "")
t.write("jamfile.jam", "build-project a ;")
t.write("a/jamfile.jam", "build-project b ;")
t.write("a/b/jamfile.jam", "")
t.run_build_system(["release"])
t.cleanup()
| mit | 1,781,348,137,777,316,900 | 1,074,254,532,110,977,300 | 19.529412 | 75 | 0.636103 | false |
PrincetonUniversity/pox | pox/lib/packet/eapol.py | 47 | 3220 | # Copyright 2011 James McCauley
# Copyright 2008 (C) Nicira, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is derived from the packet library in NOX, which was
# developed by Nicira, Inc.
#======================================================================
#
# EAPOL Header Format (see IEEE 802.1X-2004):
#
# Octet 0: Protocol version (1 or 2).
# Octet 1: Packet type:
# 0 = EAP packet
# 1 = EAPOL-Start
# 2 = EAPOL-Logoff
# 3 = EAPOL-Key
# 4 = EAPOL-Encapsulated-ASF-Alert
# Octets 2-3: Length of packet body field (0 if packet body is absent)
# Octets 4-end: Packet body (present only for packet types 0, 3, 4)
#
#======================================================================
import struct
from packet_utils import *
from packet_base import packet_base
from eap import *
class eapol(packet_base):
"EAP over LAN packet"
MIN_LEN = 4
V1_PROTO = 1
V2_PROTO = 2
EAP_TYPE = 0
EAPOL_START_TYPE = 1
EAPOL_LOGOFF_TYPE = 2
EAPOL_KEY_TYPE = 3
EAPOL_ENCAPSULATED_ASF_ALERT = 4
type_names = {EAP_TYPE: "EAP",
EAPOL_START_TYPE: "EAPOL-Start",
EAPOL_LOGOFF_TYPE: "EAPOL-Logoff",
EAPOL_KEY_TYPE: "EAPOL-Key",
EAPOL_ENCAPSULATED_ASF_ALERT: "EAPOL-Encapsulated-ASF-Alert"}
@staticmethod
def type_name(type):
return eapol.type_names.get(type, "type%d" % type)
def __init__(self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.version = self.V1_PROTO
self.type = self.EAP_TYPE
self.bodylen = 0
if raw is not None:
self.parse(raw)
self._init(kw)
def __str__(self):
s = '[EAPOL v%d %s]' % (self.version, self.type_name(self.type))
return s
def parse(self, raw):
assert isinstance(raw, bytes)
self.raw = raw
dlen = len(raw)
if dlen < self.MIN_LEN:
self.msg('(eapol parse) warning EAPOL packet data too short to parse header: data len %u' % (dlen,))
return
(self.version, self.type, self.bodylen) \
= struct.unpack('!BBH', raw[:self.MIN_LEN])
self.parsed = True
if self.type == self.EAP_TYPE:
self.next = eap(raw=raw[self.MIN_LEN:], prev=self)
elif (self.type == self.EAPOL_START_TYPE
or self.type == self.EAPOL_LOGOFF_TYPE):
pass # These types have no payloads.
else:
self.msg('warning unsupported EAPOL type: %s' % (self.type_name(self.type),))
def hdr(self, payload):
return struct.pack('!BBH', self.version, self.type, self.bodylen)
| apache-2.0 | -811,519,918,888,953,700 | -3,075,420,469,801,832,000 | 29.961538 | 112 | 0.586646 | false |
chand3040/cloud_that | common/test/acceptance/fixtures/base.py | 148 | 6165 | """
Common code shared by course and library fixtures.
"""
import re
import requests
import json
from lazy import lazy
from . import STUDIO_BASE_URL
class StudioApiLoginError(Exception):
"""
Error occurred while logging in to the Studio API.
"""
pass
class StudioApiFixture(object):
"""
Base class for fixtures that use the Studio restful API.
"""
def __init__(self):
# Info about the auto-auth user used to create the course/library.
self.user = {}
@lazy
def session(self):
"""
Log in as a staff user, then return a `requests` `session` object for the logged in user.
Raises a `StudioApiLoginError` if the login fails.
"""
# Use auto-auth to retrieve the session for a logged in user
session = requests.Session()
response = session.get(STUDIO_BASE_URL + "/auto_auth?staff=true")
# Return the session from the request
if response.ok:
# auto_auth returns information about the newly created user
# capture this so it can be used by by the testcases.
user_pattern = re.compile(r'Logged in user {0} \({1}\) with password {2} and user_id {3}'.format(
r'(?P<username>\S+)', r'(?P<email>[^\)]+)', r'(?P<password>\S+)', r'(?P<user_id>\d+)'))
user_matches = re.match(user_pattern, response.text)
if user_matches:
self.user = user_matches.groupdict()
return session
else:
msg = "Could not log in to use Studio restful API. Status code: {0}".format(response.status_code)
raise StudioApiLoginError(msg)
@lazy
def session_cookies(self):
"""
Log in as a staff user, then return the cookies for the session (as a dict)
Raises a `StudioApiLoginError` if the login fails.
"""
return {key: val for key, val in self.session.cookies.items()}
@lazy
def headers(self):
"""
Default HTTP headers dict.
"""
return {
'Content-type': 'application/json',
'Accept': 'application/json',
'X-CSRFToken': self.session_cookies.get('csrftoken', '')
}
class FixtureError(Exception):
"""
Error occurred while installing a course or library fixture.
"""
pass
class XBlockContainerFixture(StudioApiFixture):
"""
Base class for course and library fixtures.
"""
def __init__(self):
self.children = []
super(XBlockContainerFixture, self).__init__()
def add_children(self, *args):
"""
Add children XBlock to the container.
Each item in `args` is an `XBlockFixtureDesc` object.
Returns the fixture to allow chaining.
"""
self.children.extend(args)
return self
def _create_xblock_children(self, parent_loc, xblock_descriptions):
"""
Recursively create XBlock children.
"""
for desc in xblock_descriptions:
loc = self.create_xblock(parent_loc, desc)
self._create_xblock_children(loc, desc.children)
def create_xblock(self, parent_loc, xblock_desc):
"""
Create an XBlock with `parent_loc` (the location of the parent block)
and `xblock_desc` (an `XBlockFixtureDesc` instance).
"""
create_payload = {
'category': xblock_desc.category,
'display_name': xblock_desc.display_name,
}
if parent_loc is not None:
create_payload['parent_locator'] = parent_loc
# Create the new XBlock
response = self.session.post(
STUDIO_BASE_URL + '/xblock/',
data=json.dumps(create_payload),
headers=self.headers,
)
if not response.ok:
msg = "Could not create {0}. Status was {1}".format(xblock_desc, response.status_code)
raise FixtureError(msg)
try:
loc = response.json().get('locator')
xblock_desc.locator = loc
except ValueError:
raise FixtureError("Could not decode JSON from '{0}'".format(response.content))
# Configure the XBlock
response = self.session.post(
STUDIO_BASE_URL + '/xblock/' + loc,
data=xblock_desc.serialize(),
headers=self.headers,
)
if response.ok:
return loc
else:
raise FixtureError("Could not update {0}. Status code: {1}".format(xblock_desc, response.status_code))
def _update_xblock(self, locator, data):
"""
Update the xblock at `locator`.
"""
# Create the new XBlock
response = self.session.put(
"{}/xblock/{}".format(STUDIO_BASE_URL, locator),
data=json.dumps(data),
headers=self.headers,
)
if not response.ok:
msg = "Could not update {} with data {}. Status was {}".format(locator, data, response.status_code)
raise FixtureError(msg)
def _encode_post_dict(self, post_dict):
"""
Encode `post_dict` (a dictionary) as UTF-8 encoded JSON.
"""
return json.dumps({
k: v.encode('utf-8') if isinstance(v, basestring) else v
for k, v in post_dict.items()
})
def get_nested_xblocks(self, category=None):
"""
Return a list of nested XBlocks for the container that can be filtered by
category.
"""
xblocks = self._get_nested_xblocks(self)
if category:
xblocks = [x for x in xblocks if x.category == category]
return xblocks
def _get_nested_xblocks(self, xblock_descriptor):
"""
Return a list of nested XBlocks for the container.
"""
xblocks = list(xblock_descriptor.children)
for child in xblock_descriptor.children:
xblocks.extend(self._get_nested_xblocks(child))
return xblocks
def _publish_xblock(self, locator):
"""
Publish the xblock at `locator`.
"""
self._update_xblock(locator, {'publish': 'make_public'})
| agpl-3.0 | -1,577,727,334,375,148,300 | -1,469,135,569,689,559,800 | 30.454082 | 115 | 0.57794 | false |
jelly/calibre | src/calibre/utils/resources.py | 1 | 3853 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import __builtin__, sys, os
from calibre import config_dir
class PathResolver(object):
def __init__(self):
self.locations = [sys.resources_location]
self.cache = {}
def suitable(path):
try:
return os.path.exists(path) and os.path.isdir(path) and \
os.listdir(path)
except:
pass
return False
self.default_path = sys.resources_location
dev_path = os.environ.get('CALIBRE_DEVELOP_FROM', None)
self.using_develop_from = False
if dev_path is not None:
dev_path = os.path.join(os.path.abspath(
os.path.dirname(dev_path)), 'resources')
if suitable(dev_path):
self.locations.insert(0, dev_path)
self.default_path = dev_path
self.using_develop_from = True
user_path = os.path.join(config_dir, 'resources')
self.user_path = None
if suitable(user_path):
self.locations.insert(0, user_path)
self.user_path = user_path
def __call__(self, path, allow_user_override=True):
path = path.replace(os.sep, '/')
key = (path, allow_user_override)
ans = self.cache.get(key, None)
if ans is None:
for base in self.locations:
if not allow_user_override and base == self.user_path:
continue
fpath = os.path.join(base, *path.split('/'))
if os.path.exists(fpath):
ans = fpath
break
if ans is None:
ans = os.path.join(self.default_path, *path.split('/'))
self.cache[key] = ans
return ans
_resolver = PathResolver()
def get_path(path, data=False, allow_user_override=True):
fpath = _resolver(path, allow_user_override=allow_user_override)
if data:
with open(fpath, 'rb') as f:
return f.read()
return fpath
def get_image_path(path, data=False, allow_user_override=True):
if not path:
return get_path('images', allow_user_override=allow_user_override)
return get_path('images/'+path, data=data, allow_user_override=allow_user_override)
def js_name_to_path(name, ext='.coffee'):
path = (u'/'.join(name.split('.'))) + ext
d = os.path.dirname
base = d(d(os.path.abspath(__file__)))
return os.path.join(base, path)
def _compile_coffeescript(name):
from calibre.utils.serve_coffee import compile_coffeescript
src = js_name_to_path(name)
with open(src, 'rb') as f:
cs, errors = compile_coffeescript(f.read(), src)
if errors:
for line in errors:
print (line)
raise Exception('Failed to compile coffeescript'
': %s'%src)
return cs
def compiled_coffeescript(name, dynamic=False):
import zipfile
zipf = get_path('compiled_coffeescript.zip', allow_user_override=False)
with zipfile.ZipFile(zipf, 'r') as zf:
if dynamic:
import json
existing_hash = json.loads(zf.comment or '{}').get(name + '.js')
if existing_hash is not None:
import hashlib
with open(js_name_to_path(name), 'rb') as f:
if existing_hash == hashlib.sha1(f.read()).hexdigest():
return zf.read(name + '.js')
return _compile_coffeescript(name)
else:
return zf.read(name+'.js')
__builtin__.__dict__['P'] = get_path
__builtin__.__dict__['I'] = get_image_path
| gpl-3.0 | 636,397,914,065,387,800 | -6,540,776,084,492,148,000 | 30.842975 | 87 | 0.562938 | false |
areeda/gwpy | gwpy/timeseries/io/core.py | 3 | 4692 | # -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2018-2020)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Basic I/O routines for :mod:`gwpy.timeseries`
"""
from ...io import cache as io_cache
from ...io.mp import read_multi as io_read_multi
def read(cls, source, *args, **kwargs):
"""Read data from a source into a `gwpy.timeseries` object.
This method is just the internal worker for `TimeSeries.read`, and
`TimeSeriesDict.read`, and isn't meant to be called directly.
"""
# if reading a cache, read it now and sieve
if io_cache.is_cache(source):
from .cache import preformat_cache
source = preformat_cache(source, *args[1:],
start=kwargs.get('start'),
end=kwargs.get('end'))
# get join arguments
pad = kwargs.pop('pad', None)
gap = kwargs.pop('gap', 'raise' if pad is None else 'pad')
joiner = _join_factory(
cls,
gap,
pad,
kwargs.get("start", None),
kwargs.get("end", None),
)
# read
return io_read_multi(joiner, cls, source, *args, **kwargs)
def _join_factory(cls, gap, pad, start, end):
"""Build a joiner for the given cls, and the given padding options
"""
if issubclass(cls, dict):
def _join(data):
out = cls()
data = list(data)
while data:
tsd = data.pop(0)
out.append(tsd, gap=gap, pad=pad)
del tsd
if gap in ("pad", "raise"):
for key in out:
out[key] = _pad_series(
out[key],
pad,
start,
end,
error=(gap == "raise"),
)
return out
else:
from .. import TimeSeriesBaseList
def _join(arrays):
list_ = TimeSeriesBaseList(*arrays)
joined = list_.join(pad=pad, gap=gap)
if gap in ("pad", "raise"):
return _pad_series(
joined,
pad,
start,
end,
error=(gap == "raise"),
)
return joined
return _join
def _pad_series(ts, pad, start=None, end=None, error=False):
"""Pad a timeseries to match the specified [start, end) limits
To cover a gap in data returned from a data source.
Parameters
----------
ts : `gwpy.types.Series`
the input series
pad : `float`, `astropy.units.Quantity`
the value with which to pad
start : `float`, `astropy.units.Quantity`, optional
the desired start point of the X-axis, defaults to
the start point of the incoming series
end : `float`, `astropy.units.Quantity`, optional
the desired end point of the X-axis, defaults to
the end point of the incoming series
error : `bool`, optional
raise `ValueError` when gaps are present, rather than padding
anything
Returns
-------
series : instance of incoming series type
a padded version of the series. This may be the same
object if not padding is needed.
Raises
------
ValueError
if `error=True` is given and padding would have been required
to match the request.
"""
span = ts.span
if start is None:
start = span[0]
if end is None:
end = span[1]
pada = max(int((span[0] - start) * ts.sample_rate.value), 0)
padb = max(int((end - span[1]) * ts.sample_rate.value), 0)
if not (pada or padb): # if noop, just return the input
return ts
if error: # if error, bail out now
raise ValueError(
"{} with span {} does not cover requested interval {}".format(
type(ts).__name__,
span,
type(span)(start, end),
)
)
# otherwise applying the padding
return ts.pad((pada, padb), mode='constant', constant_values=(pad,))
| gpl-3.0 | -2,855,136,849,425,177,600 | -2,536,943,367,135,243,000 | 30.918367 | 74 | 0.558824 | false |
Subsets and Splits