or4cl3ai/Aiden_t5
Text Generation
•
Updated
•
822
•
14
repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
JazzeYoung/VeryDeepAutoEncoder | refs/heads/master | pylearn2/models/differentiable_sparse_coding.py | 44 | """
An implementation of the model described in "Differentiable Sparse Coding" by
Bradley and Bagnell
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
import logging
import numpy as N
from theano.compat.six.moves import xrange
import theano.tensor as T
import theano
from theano import function, shared, config
floatX = config.floatX
from pylearn2.utils.rng import make_np_rng
logger = logging.getLogger(__name__)
class DifferentiableSparseCoding(object):
"""
.. todo::
WRITEME
Parameters
----------
nvis : WRITEME
nhid : WRITEME
init_lambda : WRITEME
init_p : WRITEME
init_alpha : WRITEME
learning_rate : WRITEME
"""
def __init__(self, nvis, nhid,
init_lambda,
init_p, init_alpha, learning_rate):
self.nvis = int(nvis)
self.nhid = int(nhid)
self.init_lambda = float(init_lambda)
self.init_p = float(init_p)
self.init_alpha = N.cast[config.floatX](init_alpha)
self.tol = 1e-6
self.time_constant = 1e-2
self.learning_rate = N.cast[config.floatX](learning_rate)
self.predictor_learning_rate = self.learning_rate
self.rng = make_np_rng(None, [1,2,3], which_method="randn")
self.error_record = []
self.ERROR_RECORD_MODE_MONITORING = 0
self.error_record_mode = self.ERROR_RECORD_MODE_MONITORING
self.instrumented = False
self.redo_everything()
def get_output_dim(self):
"""
.. todo::
WRITEME
"""
return self.nhid
def get_output_channels(self):
"""
.. todo::
WRITEME
"""
return self.nhid
def normalize_W(self):
"""
.. todo::
WRITEME
"""
W = self.W.get_value(borrow=True)
norms = N.sqrt(N.square(W).sum(axis=0))
self.W.set_value(W/norms, borrow=True)
def redo_everything(self):
"""
.. todo::
WRITEME
"""
self.W = shared(N.cast[floatX](self.rng.randn(self.nvis,self.nhid)), name='W')
self.pred_W = shared(self.W.get_value(borrow=False),name='pred_W')
self.pred_b = shared(N.zeros(self.nhid,dtype=floatX),name='pred_b')
self.pred_g = shared(N.ones(self.nhid,dtype=floatX),name='pred_g')
self.normalize_W()
self.p = shared(N.zeros(self.nhid, dtype=floatX)+N.cast[floatX](self.init_p), name='p')
#mispelling lambda because python is too dumb to know that self.lambda isn't a lambda function
self.lamda = shared(
N.zeros(
self.nhid, dtype=floatX)+
N.cast[floatX](self.init_lambda), name='lambda')
self.alpha = self.init_alpha
self.failure_rate = .5
self.examples_seen = 0
self.batches_seen = 0
self.redo_theano()
def recons_error(self, v, h):
"""
.. todo::
WRITEME
"""
recons = T.dot(self.W,h)
diffs = recons - v
rval = T.dot(diffs,diffs) / N.cast[floatX](self.nvis)
return rval
def recons_error_batch(self, V, H):
"""
.. todo::
WRITEME
"""
recons = T.dot(H,self.W.T)
diffs = recons - V
rval = T.mean(T.sqr(diffs))
return rval
def sparsity_penalty(self, v, h):
"""
.. todo::
WRITEME
"""
sparsity_measure = h * T.log(h) - h * T.log(self.p) - h + self.p
rval = T.dot(self.lamda, sparsity_measure) / N.cast[floatX](self.nhid)
return rval
def sparsity_penalty_batch(self, V, H):
"""
.. todo::
WRITEME
"""
sparsity_measure = H * T.log(H) - H * T.log(self.p) - H + self.p
sparsity_measure_exp = T.mean(sparsity_measure, axis=0)
rval = T.dot(self.lamda, sparsity_measure_exp) / N.cast[floatX](self.nhid)
return rval
def coding_obj(self, v, h):
"""
.. todo::
WRITEME
"""
return self.recons_error(v,h) + self.sparsity_penalty(v,h)
def coding_obj_batch(self, V, H):
"""
.. todo::
WRITEME
"""
return self.recons_error_batch(V,H) + self.sparsity_penalty_batch(V,H)
def predict(self, V):
"""
.. todo::
WRITEME
"""
rval = T.nnet.sigmoid(T.dot(V,self.pred_W)+self.pred_b)*self.pred_g
assert rval.type.dtype == V.type.dtype
return rval
def redo_theano(self):
"""
.. todo::
WRITEME
"""
self.h = shared(N.zeros(self.nhid, dtype=floatX), name='h')
self.v = shared(N.zeros(self.nvis, dtype=floatX), name='v')
input_v = T.vector()
assert input_v.type.dtype == floatX
self.init_h_v = function([input_v], updates = { self.h : self.predict(input_v),
self.v : input_v } )
coding_obj = self.coding_obj(self.v, self.h)
assert len(coding_obj.type.broadcastable) == 0
coding_grad = T.grad(coding_obj, self.h)
assert len(coding_grad.type.broadcastable) == 1
self.coding_obj_grad = function([], [coding_obj, coding_grad] )
self.new_h = shared(N.zeros(self.nhid, dtype=floatX), name='new_h')
alpha = T.scalar(name='alpha')
outside_grad = T.vector(name='outside_grad')
new_h = T.clip(self.h * T.exp(-alpha * outside_grad), 1e-10, 1e4)
new_obj = self.coding_obj(self.v, new_h)
self.try_step = function( [alpha, outside_grad], updates = { self.new_h : new_h }, outputs = new_obj )
self.accept_h = function( [], updates = { self.h : self.new_h } )
self.get_h = function( [] , self.h )
V = T.matrix(name='V')
H = T.matrix(name='H')
coding_obj_batch = self.coding_obj_batch(V,H)
self.code_learning_obj = function( [V,H], coding_obj_batch)
learning_grad = T.grad( coding_obj_batch, self.W )
self.code_learning_step = function( [V,H,alpha], updates = { self.W : self.W - alpha * learning_grad } )
pred_obj = T.mean(T.sqr(self.predict(V)-H))
predictor_params = [ self.pred_W, self.pred_b, self.pred_g ]
pred_grads = T.grad(pred_obj, wrt = predictor_params )
predictor_updates = {}
for param, grad in zip(predictor_params, pred_grads):
predictor_updates[param] = param - alpha * grad
predictor_updates[self.pred_g ] = T.clip(predictor_updates[self.pred_g], N.cast[floatX](0.5), N.cast[floatX](1000.))
self.train_predictor = function([V,H,alpha] , updates = predictor_updates )
def weights_format(self):
"""
.. todo::
WRITEME
"""
return ['v','h']
def error_func(self, x):
"""
.. todo::
WRITEME
"""
batch_size = x.shape[0]
H = N.zeros((batch_size,self.nhid),dtype=floatX)
for i in xrange(batch_size):
assert self.alpha > 9e-8
H[i,:] = self.optimize_h(x[i,:])
assert self.alpha > 9e-8
return self.code_learning_obj(x,H)
def record_monitoring_error(self, dataset, batch_size, batches):
"""
.. todo::
WRITEME
"""
logger.info('running on monitoring set')
assert self.error_record_mode == self.ERROR_RECORD_MODE_MONITORING
w = self.W.get_value(borrow=True)
logger.info('weights summary: '
'({0}, {1}, {2})'.format(w.min(), w.mean(), w.max()))
errors = []
if self.instrumented:
self.clear_instruments()
for i in xrange(batches):
x = dataset.get_batch_design(batch_size)
error = self.error_func(x)
errors.append( error )
if self.instrumented:
self.update_instruments(x)
self.error_record.append( (self.examples_seen, self.batches_seen, N.asarray(errors).mean() ) )
if self.instrumented:
self.instrument_record.begin_report(examples_seen = self.examples_seen, batches_seen = self.batches_seen)
self.make_instrument_report()
self.instrument_record.end_report()
self.clear_instruments()
logger.info('monitoring set done')
def infer_h(self, v):
"""
.. todo::
WRITEME
"""
return self.optimize_h(v)
def optimize_h(self, v):
"""
.. todo::
WRITEME
"""
assert self.alpha > 9e-8
self.init_h_v(v)
first = True
while True:
obj, grad = self.coding_obj_grad()
if first:
#print 'orig_obj: ', obj
first = False
assert not N.any(N.isnan(obj))
assert not N.any(N.isnan(grad))
if N.abs(grad).max() < self.tol:
break
#print 'max gradient ',N.abs(grad).max()
cur_alpha = N.cast[floatX] ( self.alpha + 0.0 )
new_obj = self.try_step(cur_alpha, grad)
assert not N.isnan(new_obj)
self.failure_rate = (1. - self.time_constant ) * self.failure_rate + self.time_constant * float(new_obj > obj )
assert self.alpha > 9e-8
if self.failure_rate > .6 and self.alpha > 1e-7:
self.alpha *= .9
#print '!!!!!!!!!!!!!!!!!!!!!!shrank alpha to ',self.alpha
elif self.failure_rate < .3:
self.alpha *= 1.1
#print '**********************grew alpha to ',self.alpha
assert self.alpha > 9e-8
while new_obj >= obj:
cur_alpha *= .9
if cur_alpha < 1e-12:
self.accept_h()
#print 'failing final obj ',new_obj
return self.get_h()
new_obj = self.try_step(cur_alpha, grad)
assert not N.isnan(new_obj)
self.accept_h()
#print 'final obj ',new_obj
return self.get_h()
def train_batch(self, dataset, batch_size):
"""
.. todo::
WRITEME
"""
self.learn_mini_batch(dataset.get_batch_design(batch_size))
return True
def learn_mini_batch(self, x):
"""
.. todo::
WRITEME
"""
assert self.alpha > 9e-8
batch_size = x.shape[0]
H = N.zeros((batch_size,self.nhid),dtype=floatX)
for i in xrange(batch_size):
assert self.alpha > 9e-8
H[i,:] = self.optimize_h(x[i,:])
assert self.alpha > 9e-8
self.code_learning_step(x,H,self.learning_rate)
self.normalize_W()
self.train_predictor(x,H,self.predictor_learning_rate)
self.examples_seen += x.shape[0]
self.batches_seen += 1
|
xeddmc/pupy | refs/heads/master | pupy/packages/windows/amd64/psutil/_psosx.py | 81 | #!/usr/bin/env python
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""OSX platform implementation."""
import errno
import functools
import os
from collections import namedtuple
from . import _common
from . import _psposix
from . import _psutil_osx as cext
from . import _psutil_posix as cext_posix
from ._common import conn_tmap, usage_percent, isfile_strict
from ._common import sockfam_to_enum, socktype_to_enum
__extra__all__ = []
# --- constants
PAGESIZE = os.sysconf("SC_PAGE_SIZE")
AF_LINK = cext_posix.AF_LINK
# http://students.mimuw.edu.pl/lxr/source/include/net/tcp_states.h
TCP_STATUSES = {
cext.TCPS_ESTABLISHED: _common.CONN_ESTABLISHED,
cext.TCPS_SYN_SENT: _common.CONN_SYN_SENT,
cext.TCPS_SYN_RECEIVED: _common.CONN_SYN_RECV,
cext.TCPS_FIN_WAIT_1: _common.CONN_FIN_WAIT1,
cext.TCPS_FIN_WAIT_2: _common.CONN_FIN_WAIT2,
cext.TCPS_TIME_WAIT: _common.CONN_TIME_WAIT,
cext.TCPS_CLOSED: _common.CONN_CLOSE,
cext.TCPS_CLOSE_WAIT: _common.CONN_CLOSE_WAIT,
cext.TCPS_LAST_ACK: _common.CONN_LAST_ACK,
cext.TCPS_LISTEN: _common.CONN_LISTEN,
cext.TCPS_CLOSING: _common.CONN_CLOSING,
cext.PSUTIL_CONN_NONE: _common.CONN_NONE,
}
PROC_STATUSES = {
cext.SIDL: _common.STATUS_IDLE,
cext.SRUN: _common.STATUS_RUNNING,
cext.SSLEEP: _common.STATUS_SLEEPING,
cext.SSTOP: _common.STATUS_STOPPED,
cext.SZOMB: _common.STATUS_ZOMBIE,
}
scputimes = namedtuple('scputimes', ['user', 'nice', 'system', 'idle'])
svmem = namedtuple(
'svmem', ['total', 'available', 'percent', 'used', 'free',
'active', 'inactive', 'wired'])
pextmem = namedtuple('pextmem', ['rss', 'vms', 'pfaults', 'pageins'])
pmmap_grouped = namedtuple(
'pmmap_grouped',
'path rss private swapped dirtied ref_count shadow_depth')
pmmap_ext = namedtuple(
'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields))
# set later from __init__.py
NoSuchProcess = None
ZombieProcess = None
AccessDenied = None
TimeoutExpired = None
# --- functions
def virtual_memory():
"""System virtual memory as a namedtuple."""
total, active, inactive, wired, free = cext.virtual_mem()
avail = inactive + free
used = active + inactive + wired
percent = usage_percent((total - avail), total, _round=1)
return svmem(total, avail, percent, used, free,
active, inactive, wired)
def swap_memory():
"""Swap system memory as a (total, used, free, sin, sout) tuple."""
total, used, free, sin, sout = cext.swap_mem()
percent = usage_percent(used, total, _round=1)
return _common.sswap(total, used, free, percent, sin, sout)
def cpu_times():
"""Return system CPU times as a namedtuple."""
user, nice, system, idle = cext.cpu_times()
return scputimes(user, nice, system, idle)
def per_cpu_times():
"""Return system CPU times as a named tuple"""
ret = []
for cpu_t in cext.per_cpu_times():
user, nice, system, idle = cpu_t
item = scputimes(user, nice, system, idle)
ret.append(item)
return ret
def cpu_count_logical():
"""Return the number of logical CPUs in the system."""
return cext.cpu_count_logical()
def cpu_count_physical():
"""Return the number of physical CPUs in the system."""
return cext.cpu_count_phys()
def boot_time():
"""The system boot time expressed in seconds since the epoch."""
return cext.boot_time()
def disk_partitions(all=False):
retlist = []
partitions = cext.disk_partitions()
for partition in partitions:
device, mountpoint, fstype, opts = partition
if device == 'none':
device = ''
if not all:
if not os.path.isabs(device) or not os.path.exists(device):
continue
ntuple = _common.sdiskpart(device, mountpoint, fstype, opts)
retlist.append(ntuple)
return retlist
def users():
retlist = []
rawlist = cext.users()
for item in rawlist:
user, tty, hostname, tstamp = item
if tty == '~':
continue # reboot or shutdown
if not tstamp:
continue
nt = _common.suser(user, tty or None, hostname or None, tstamp)
retlist.append(nt)
return retlist
def net_connections(kind='inet'):
# Note: on OSX this will fail with AccessDenied unless
# the process is owned by root.
ret = []
for pid in pids():
try:
cons = Process(pid).connections(kind)
except NoSuchProcess:
continue
else:
if cons:
for c in cons:
c = list(c) + [pid]
ret.append(_common.sconn(*c))
return ret
def net_if_stats():
"""Get NIC stats (isup, duplex, speed, mtu)."""
names = net_io_counters().keys()
ret = {}
for name in names:
isup, duplex, speed, mtu = cext_posix.net_if_stats(name)
if hasattr(_common, 'NicDuplex'):
duplex = _common.NicDuplex(duplex)
ret[name] = _common.snicstats(isup, duplex, speed, mtu)
return ret
pids = cext.pids
pid_exists = _psposix.pid_exists
disk_usage = _psposix.disk_usage
net_io_counters = cext.net_io_counters
disk_io_counters = cext.disk_io_counters
net_if_addrs = cext_posix.net_if_addrs
def wrap_exceptions(fun):
"""Decorator which translates bare OSError exceptions into
NoSuchProcess and AccessDenied.
"""
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
try:
return fun(self, *args, **kwargs)
except OSError as err:
# support for private module import
if (NoSuchProcess is None or AccessDenied is None or
ZombieProcess is None):
raise
if err.errno == errno.ESRCH:
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._name)
else:
raise ZombieProcess(self.pid, self._name, self._ppid)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._name)
raise
return wrapper
class Process(object):
"""Wrapper class around underlying C implementation."""
__slots__ = ["pid", "_name", "_ppid"]
def __init__(self, pid):
self.pid = pid
self._name = None
self._ppid = None
@wrap_exceptions
def name(self):
return cext.proc_name(self.pid)
@wrap_exceptions
def exe(self):
return cext.proc_exe(self.pid)
@wrap_exceptions
def cmdline(self):
if not pid_exists(self.pid):
raise NoSuchProcess(self.pid, self._name)
return cext.proc_cmdline(self.pid)
@wrap_exceptions
def ppid(self):
return cext.proc_ppid(self.pid)
@wrap_exceptions
def cwd(self):
return cext.proc_cwd(self.pid)
@wrap_exceptions
def uids(self):
real, effective, saved = cext.proc_uids(self.pid)
return _common.puids(real, effective, saved)
@wrap_exceptions
def gids(self):
real, effective, saved = cext.proc_gids(self.pid)
return _common.pgids(real, effective, saved)
@wrap_exceptions
def terminal(self):
tty_nr = cext.proc_tty_nr(self.pid)
tmap = _psposix._get_terminal_map()
try:
return tmap[tty_nr]
except KeyError:
return None
@wrap_exceptions
def memory_info(self):
rss, vms = cext.proc_memory_info(self.pid)[:2]
return _common.pmem(rss, vms)
@wrap_exceptions
def memory_info_ex(self):
rss, vms, pfaults, pageins = cext.proc_memory_info(self.pid)
return pextmem(rss, vms, pfaults * PAGESIZE, pageins * PAGESIZE)
@wrap_exceptions
def cpu_times(self):
user, system = cext.proc_cpu_times(self.pid)
return _common.pcputimes(user, system)
@wrap_exceptions
def create_time(self):
return cext.proc_create_time(self.pid)
@wrap_exceptions
def num_ctx_switches(self):
return _common.pctxsw(*cext.proc_num_ctx_switches(self.pid))
@wrap_exceptions
def num_threads(self):
return cext.proc_num_threads(self.pid)
@wrap_exceptions
def open_files(self):
if self.pid == 0:
return []
files = []
rawlist = cext.proc_open_files(self.pid)
for path, fd in rawlist:
if isfile_strict(path):
ntuple = _common.popenfile(path, fd)
files.append(ntuple)
return files
@wrap_exceptions
def connections(self, kind='inet'):
if kind not in conn_tmap:
raise ValueError("invalid %r kind argument; choose between %s"
% (kind, ', '.join([repr(x) for x in conn_tmap])))
families, types = conn_tmap[kind]
rawlist = cext.proc_connections(self.pid, families, types)
ret = []
for item in rawlist:
fd, fam, type, laddr, raddr, status = item
status = TCP_STATUSES[status]
fam = sockfam_to_enum(fam)
type = socktype_to_enum(type)
nt = _common.pconn(fd, fam, type, laddr, raddr, status)
ret.append(nt)
return ret
@wrap_exceptions
def num_fds(self):
if self.pid == 0:
return 0
return cext.proc_num_fds(self.pid)
@wrap_exceptions
def wait(self, timeout=None):
try:
return _psposix.wait_pid(self.pid, timeout)
except _psposix.TimeoutExpired:
# support for private module import
if TimeoutExpired is None:
raise
raise TimeoutExpired(timeout, self.pid, self._name)
@wrap_exceptions
def nice_get(self):
return cext_posix.getpriority(self.pid)
@wrap_exceptions
def nice_set(self, value):
return cext_posix.setpriority(self.pid, value)
@wrap_exceptions
def status(self):
code = cext.proc_status(self.pid)
# XXX is '?' legit? (we're not supposed to return it anyway)
return PROC_STATUSES.get(code, '?')
@wrap_exceptions
def threads(self):
rawlist = cext.proc_threads(self.pid)
retlist = []
for thread_id, utime, stime in rawlist:
ntuple = _common.pthread(thread_id, utime, stime)
retlist.append(ntuple)
return retlist
@wrap_exceptions
def memory_maps(self):
return cext.proc_memory_maps(self.pid)
|
ahmadshahwan/cohorte-runtime | refs/heads/master | python/src/lib/python/jsonrpclib/utils.py | 7 | #!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Utility methods, for compatibility between Python version
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
:author: Thomas Calmant
:license: Apache License 2.0
:version: 1.0.1
"""
# Module version
__version_info__ = (0, 2, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
import sys
# ------------------------------------------------------------------------------
if sys.version_info[0] < 3:
# Python 2
import types
try:
string_types = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
string_types = (types.StringType,)
numeric_types = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
string_types = (
bytes,
str
)
numeric_types = (
int,
float
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data, "UTF-8")
# ------------------------------------------------------------------------------
# Common
DictType = dict
ListType = list
TupleType = tuple
iterable_types = (
list,
set, frozenset,
tuple
)
value_types = (
bool,
type(None)
)
primitive_types = string_types + numeric_types + value_types
|
guettli/django | refs/heads/master | django/contrib/admin/checks.py | 13 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from itertools import chain
from django.apps import apps
from django.conf import settings
from django.contrib.admin.utils import (
NotRelationField, flatten, get_fields_from_path,
)
from django.core import checks
from django.core.exceptions import FieldDoesNotExist
from django.db import models
from django.db.models.constants import LOOKUP_SEP
from django.forms.models import (
BaseModelForm, BaseModelFormSet, _get_foreign_key,
)
from django.template.engine import Engine
def check_admin_app(**kwargs):
from django.contrib.admin.sites import system_check_errors
return system_check_errors
def check_dependencies(**kwargs):
"""
Check that the admin's dependencies are correctly installed.
"""
errors = []
# contrib.contenttypes must be installed.
if not apps.is_installed('django.contrib.contenttypes'):
missing_app = checks.Error(
"'django.contrib.contenttypes' must be in INSTALLED_APPS in order "
"to use the admin application.",
id="admin.E401",
)
errors.append(missing_app)
# The auth context processor must be installed if using the default
# authentication backend.
try:
default_template_engine = Engine.get_default()
except Exception:
# Skip this non-critical check:
# 1. if the user has a non-trivial TEMPLATES setting and Django
# can't find a default template engine
# 2. if anything goes wrong while loading template engines, in
# order to avoid raising an exception from a confusing location
# Catching ImproperlyConfigured suffices for 1. but 2. requires
# catching all exceptions.
pass
else:
if ('django.contrib.auth.context_processors.auth'
not in default_template_engine.context_processors and
'django.contrib.auth.backends.ModelBackend' in settings.AUTHENTICATION_BACKENDS):
missing_template = checks.Error(
"'django.contrib.auth.context_processors.auth' must be in "
"TEMPLATES in order to use the admin application.",
id="admin.E402"
)
errors.append(missing_template)
return errors
class BaseModelAdminChecks(object):
def check(self, admin_obj, **kwargs):
errors = []
errors.extend(self._check_raw_id_fields(admin_obj))
errors.extend(self._check_fields(admin_obj))
errors.extend(self._check_fieldsets(admin_obj))
errors.extend(self._check_exclude(admin_obj))
errors.extend(self._check_form(admin_obj))
errors.extend(self._check_filter_vertical(admin_obj))
errors.extend(self._check_filter_horizontal(admin_obj))
errors.extend(self._check_radio_fields(admin_obj))
errors.extend(self._check_prepopulated_fields(admin_obj))
errors.extend(self._check_view_on_site_url(admin_obj))
errors.extend(self._check_ordering(admin_obj))
errors.extend(self._check_readonly_fields(admin_obj))
return errors
def _check_raw_id_fields(self, obj):
""" Check that `raw_id_fields` only contains field names that are listed
on the model. """
if not isinstance(obj.raw_id_fields, (list, tuple)):
return must_be('a list or tuple', option='raw_id_fields', obj=obj, id='admin.E001')
else:
return list(chain(*[
self._check_raw_id_fields_item(obj, obj.model, field_name, 'raw_id_fields[%d]' % index)
for index, field_name in enumerate(obj.raw_id_fields)
]))
def _check_raw_id_fields_item(self, obj, model, field_name, label):
""" Check an item of `raw_id_fields`, i.e. check that field named
`field_name` exists in model `model` and is a ForeignKey or a
ManyToManyField. """
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=obj, id='admin.E002')
else:
if not field.many_to_many and not isinstance(field, models.ForeignKey):
return must_be('a foreign key or a many-to-many field',
option=label, obj=obj, id='admin.E003')
else:
return []
def _check_fields(self, obj):
""" Check that `fields` only refer to existing fields, doesn't contain
duplicates. Check if at most one of `fields` and `fieldsets` is defined.
"""
if obj.fields is None:
return []
elif not isinstance(obj.fields, (list, tuple)):
return must_be('a list or tuple', option='fields', obj=obj, id='admin.E004')
elif obj.fieldsets:
return [
checks.Error(
"Both 'fieldsets' and 'fields' are specified.",
obj=obj.__class__,
id='admin.E005',
)
]
fields = flatten(obj.fields)
if len(fields) != len(set(fields)):
return [
checks.Error(
"The value of 'fields' contains duplicate field(s).",
obj=obj.__class__,
id='admin.E006',
)
]
return list(chain(*[
self._check_field_spec(obj, obj.model, field_name, 'fields')
for field_name in obj.fields
]))
def _check_fieldsets(self, obj):
""" Check that fieldsets is properly formatted and doesn't contain
duplicates. """
if obj.fieldsets is None:
return []
elif not isinstance(obj.fieldsets, (list, tuple)):
return must_be('a list or tuple', option='fieldsets', obj=obj, id='admin.E007')
else:
return list(chain(*[
self._check_fieldsets_item(obj, obj.model, fieldset, 'fieldsets[%d]' % index)
for index, fieldset in enumerate(obj.fieldsets)
]))
def _check_fieldsets_item(self, obj, model, fieldset, label):
""" Check an item of `fieldsets`, i.e. check that this is a pair of a
set name and a dictionary containing "fields" key. """
if not isinstance(fieldset, (list, tuple)):
return must_be('a list or tuple', option=label, obj=obj, id='admin.E008')
elif len(fieldset) != 2:
return must_be('of length 2', option=label, obj=obj, id='admin.E009')
elif not isinstance(fieldset[1], dict):
return must_be('a dictionary', option='%s[1]' % label, obj=obj, id='admin.E010')
elif 'fields' not in fieldset[1]:
return [
checks.Error(
"The value of '%s[1]' must contain the key 'fields'." % label,
obj=obj.__class__,
id='admin.E011',
)
]
elif not isinstance(fieldset[1]['fields'], (list, tuple)):
return must_be('a list or tuple', option="%s[1]['fields']" % label, obj=obj, id='admin.E008')
fields = flatten(fieldset[1]['fields'])
if len(fields) != len(set(fields)):
return [
checks.Error(
"There are duplicate field(s) in '%s[1]'." % label,
obj=obj.__class__,
id='admin.E012',
)
]
return list(chain(*[
self._check_field_spec(obj, model, fieldset_fields, '%s[1]["fields"]' % label)
for fieldset_fields in fieldset[1]['fields']
]))
def _check_field_spec(self, obj, model, fields, label):
""" `fields` should be an item of `fields` or an item of
fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a
field name or a tuple of field names. """
if isinstance(fields, tuple):
return list(chain(*[
self._check_field_spec_item(obj, model, field_name, "%s[%d]" % (label, index))
for index, field_name in enumerate(fields)
]))
else:
return self._check_field_spec_item(obj, model, fields, label)
def _check_field_spec_item(self, obj, model, field_name, label):
if field_name in obj.readonly_fields:
# Stuff can be put in fields that isn't actually a model field if
# it's in readonly_fields, readonly_fields will handle the
# validation of such things.
return []
else:
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
# If we can't find a field on the model that matches, it could
# be an extra field on the form.
return []
else:
if (isinstance(field, models.ManyToManyField) and
not field.remote_field.through._meta.auto_created):
return [
checks.Error(
"The value of '%s' cannot include the ManyToManyField '%s', "
"because that field manually specifies a relationship model."
% (label, field_name),
obj=obj.__class__,
id='admin.E013',
)
]
else:
return []
def _check_exclude(self, obj):
""" Check that exclude is a sequence without duplicates. """
if obj.exclude is None: # default value is None
return []
elif not isinstance(obj.exclude, (list, tuple)):
return must_be('a list or tuple', option='exclude', obj=obj, id='admin.E014')
elif len(obj.exclude) > len(set(obj.exclude)):
return [
checks.Error(
"The value of 'exclude' contains duplicate field(s).",
obj=obj.__class__,
id='admin.E015',
)
]
else:
return []
def _check_form(self, obj):
""" Check that form subclasses BaseModelForm. """
if hasattr(obj, 'form') and not issubclass(obj.form, BaseModelForm):
return must_inherit_from(parent='BaseModelForm', option='form',
obj=obj, id='admin.E016')
else:
return []
def _check_filter_vertical(self, obj):
""" Check that filter_vertical is a sequence of field names. """
if not hasattr(obj, 'filter_vertical'):
return []
elif not isinstance(obj.filter_vertical, (list, tuple)):
return must_be('a list or tuple', option='filter_vertical', obj=obj, id='admin.E017')
else:
return list(chain(*[
self._check_filter_item(obj, obj.model, field_name, "filter_vertical[%d]" % index)
for index, field_name in enumerate(obj.filter_vertical)
]))
def _check_filter_horizontal(self, obj):
""" Check that filter_horizontal is a sequence of field names. """
if not hasattr(obj, 'filter_horizontal'):
return []
elif not isinstance(obj.filter_horizontal, (list, tuple)):
return must_be('a list or tuple', option='filter_horizontal', obj=obj, id='admin.E018')
else:
return list(chain(*[
self._check_filter_item(obj, obj.model, field_name, "filter_horizontal[%d]" % index)
for index, field_name in enumerate(obj.filter_horizontal)
]))
def _check_filter_item(self, obj, model, field_name, label):
""" Check one item of `filter_vertical` or `filter_horizontal`, i.e.
check that given field exists and is a ManyToManyField. """
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=obj, id='admin.E019')
else:
if not field.many_to_many:
return must_be('a many-to-many field', option=label, obj=obj, id='admin.E020')
else:
return []
def _check_radio_fields(self, obj):
""" Check that `radio_fields` is a dictionary. """
if not hasattr(obj, 'radio_fields'):
return []
elif not isinstance(obj.radio_fields, dict):
return must_be('a dictionary', option='radio_fields', obj=obj, id='admin.E021')
else:
return list(chain(*[
self._check_radio_fields_key(obj, obj.model, field_name, 'radio_fields') +
self._check_radio_fields_value(obj, val, 'radio_fields["%s"]' % field_name)
for field_name, val in obj.radio_fields.items()
]))
def _check_radio_fields_key(self, obj, model, field_name, label):
""" Check that a key of `radio_fields` dictionary is name of existing
field and that the field is a ForeignKey or has `choices` defined. """
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=obj, id='admin.E022')
else:
if not (isinstance(field, models.ForeignKey) or field.choices):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an "
"instance of ForeignKey, and does not have a 'choices' definition." % (
label, field_name
),
obj=obj.__class__,
id='admin.E023',
)
]
else:
return []
def _check_radio_fields_value(self, obj, val, label):
""" Check type of a value of `radio_fields` dictionary. """
from django.contrib.admin.options import HORIZONTAL, VERTICAL
if val not in (HORIZONTAL, VERTICAL):
return [
checks.Error(
"The value of '%s' must be either admin.HORIZONTAL or admin.VERTICAL." % label,
obj=obj.__class__,
id='admin.E024',
)
]
else:
return []
def _check_view_on_site_url(self, obj):
if hasattr(obj, 'view_on_site'):
if not callable(obj.view_on_site) and not isinstance(obj.view_on_site, bool):
return [
checks.Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
obj=obj.__class__,
id='admin.E025',
)
]
else:
return []
else:
return []
def _check_prepopulated_fields(self, obj):
""" Check that `prepopulated_fields` is a dictionary containing allowed
field types. """
if not hasattr(obj, 'prepopulated_fields'):
return []
elif not isinstance(obj.prepopulated_fields, dict):
return must_be('a dictionary', option='prepopulated_fields', obj=obj, id='admin.E026')
else:
return list(chain(*[
self._check_prepopulated_fields_key(obj, obj.model, field_name, 'prepopulated_fields') +
self._check_prepopulated_fields_value(obj, obj.model, val, 'prepopulated_fields["%s"]' % field_name)
for field_name, val in obj.prepopulated_fields.items()
]))
def _check_prepopulated_fields_key(self, obj, model, field_name, label):
""" Check a key of `prepopulated_fields` dictionary, i.e. check that it
is a name of existing field and the field is one of the allowed types.
"""
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label,
model=model, obj=obj, id='admin.E027')
else:
if isinstance(field, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)):
return [
checks.Error(
"The value of '%s' refers to '%s', which must not be a DateTimeField, "
"a ForeignKey, or a ManyToManyField." % (label, field_name),
obj=obj.__class__,
id='admin.E028',
)
]
else:
return []
def _check_prepopulated_fields_value(self, obj, model, val, label):
""" Check a value of `prepopulated_fields` dictionary, i.e. it's an
iterable of existing fields. """
if not isinstance(val, (list, tuple)):
return must_be('a list or tuple', option=label, obj=obj, id='admin.E029')
else:
return list(chain(*[
self._check_prepopulated_fields_value_item(obj, model, subfield_name, "%s[%r]" % (label, index))
for index, subfield_name in enumerate(val)
]))
def _check_prepopulated_fields_value_item(self, obj, model, field_name, label):
""" For `prepopulated_fields` equal to {"slug": ("title",)},
`field_name` is "title". """
try:
model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label, model=model, obj=obj, id='admin.E030')
else:
return []
def _check_ordering(self, obj):
""" Check that ordering refers to existing fields or is random. """
# ordering = None
if obj.ordering is None: # The default value is None
return []
elif not isinstance(obj.ordering, (list, tuple)):
return must_be('a list or tuple', option='ordering', obj=obj, id='admin.E031')
else:
return list(chain(*[
self._check_ordering_item(obj, obj.model, field_name, 'ordering[%d]' % index)
for index, field_name in enumerate(obj.ordering)
]))
def _check_ordering_item(self, obj, model, field_name, label):
""" Check that `ordering` refers to existing fields. """
if field_name == '?' and len(obj.ordering) != 1:
return [
checks.Error(
"The value of 'ordering' has the random ordering marker '?', "
"but contains other fields as well.",
hint='Either remove the "?", or remove the other fields.',
obj=obj.__class__,
id='admin.E032',
)
]
elif field_name == '?':
return []
elif LOOKUP_SEP in field_name:
# Skip ordering in the format field1__field2 (FIXME: checking
# this format would be nice, but it's a little fiddly).
return []
else:
if field_name.startswith('-'):
field_name = field_name[1:]
try:
model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label, model=model, obj=obj, id='admin.E033')
else:
return []
def _check_readonly_fields(self, obj):
""" Check that readonly_fields refers to proper attribute or field. """
if obj.readonly_fields == ():
return []
elif not isinstance(obj.readonly_fields, (list, tuple)):
return must_be('a list or tuple', option='readonly_fields', obj=obj, id='admin.E034')
else:
return list(chain(*[
self._check_readonly_fields_item(obj, obj.model, field_name, "readonly_fields[%d]" % index)
for index, field_name in enumerate(obj.readonly_fields)
]))
def _check_readonly_fields_item(self, obj, model, field_name, label):
if callable(field_name):
return []
elif hasattr(obj, field_name):
return []
elif hasattr(model, field_name):
return []
else:
try:
model._meta.get_field(field_name)
except FieldDoesNotExist:
return [
checks.Error(
"The value of '%s' is not a callable, an attribute of '%s', or an attribute of '%s.%s'." % (
label, obj.__class__.__name__, model._meta.app_label, model._meta.object_name
),
obj=obj.__class__,
id='admin.E035',
)
]
else:
return []
class ModelAdminChecks(BaseModelAdminChecks):
def check(self, admin_obj, **kwargs):
errors = super(ModelAdminChecks, self).check(admin_obj)
errors.extend(self._check_save_as(admin_obj))
errors.extend(self._check_save_on_top(admin_obj))
errors.extend(self._check_inlines(admin_obj))
errors.extend(self._check_list_display(admin_obj))
errors.extend(self._check_list_display_links(admin_obj))
errors.extend(self._check_list_filter(admin_obj))
errors.extend(self._check_list_select_related(admin_obj))
errors.extend(self._check_list_per_page(admin_obj))
errors.extend(self._check_list_max_show_all(admin_obj))
errors.extend(self._check_list_editable(admin_obj))
errors.extend(self._check_search_fields(admin_obj))
errors.extend(self._check_date_hierarchy(admin_obj))
return errors
def _check_save_as(self, obj):
""" Check save_as is a boolean. """
if not isinstance(obj.save_as, bool):
return must_be('a boolean', option='save_as',
obj=obj, id='admin.E101')
else:
return []
def _check_save_on_top(self, obj):
""" Check save_on_top is a boolean. """
if not isinstance(obj.save_on_top, bool):
return must_be('a boolean', option='save_on_top',
obj=obj, id='admin.E102')
else:
return []
def _check_inlines(self, obj):
""" Check all inline model admin classes. """
if not isinstance(obj.inlines, (list, tuple)):
return must_be('a list or tuple', option='inlines', obj=obj, id='admin.E103')
else:
return list(chain(*[
self._check_inlines_item(obj, obj.model, item, "inlines[%d]" % index)
for index, item in enumerate(obj.inlines)
]))
def _check_inlines_item(self, obj, model, inline, label):
""" Check one inline model admin. """
inline_label = '.'.join([inline.__module__, inline.__name__])
from django.contrib.admin.options import InlineModelAdmin
if not issubclass(inline, InlineModelAdmin):
return [
checks.Error(
"'%s' must inherit from 'InlineModelAdmin'." % inline_label,
obj=obj.__class__,
id='admin.E104',
)
]
elif not inline.model:
return [
checks.Error(
"'%s' must have a 'model' attribute." % inline_label,
obj=obj.__class__,
id='admin.E105',
)
]
elif not issubclass(inline.model, models.Model):
return must_be('a Model', option='%s.model' % inline_label, obj=obj, id='admin.E106')
else:
return inline(model, obj.admin_site).check()
def _check_list_display(self, obj):
""" Check that list_display only contains fields or usable attributes.
"""
if not isinstance(obj.list_display, (list, tuple)):
return must_be('a list or tuple', option='list_display', obj=obj, id='admin.E107')
else:
return list(chain(*[
self._check_list_display_item(obj, obj.model, item, "list_display[%d]" % index)
for index, item in enumerate(obj.list_display)
]))
def _check_list_display_item(self, obj, model, item, label):
if callable(item):
return []
elif hasattr(obj, item):
return []
elif hasattr(model, item):
# getattr(model, item) could be an X_RelatedObjectsDescriptor
try:
field = model._meta.get_field(item)
except FieldDoesNotExist:
try:
field = getattr(model, item)
except AttributeError:
field = None
if field is None:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not a "
"callable, an attribute of '%s', or an attribute or method on '%s.%s'." % (
label, item, obj.__class__.__name__, model._meta.app_label, model._meta.object_name
),
obj=obj.__class__,
id='admin.E108',
)
]
elif isinstance(field, models.ManyToManyField):
return [
checks.Error(
"The value of '%s' must not be a ManyToManyField." % label,
obj=obj.__class__,
id='admin.E109',
)
]
else:
return []
else:
try:
model._meta.get_field(item)
except FieldDoesNotExist:
return [
# This is a deliberate repeat of E108; there's more than one path
# required to test this condition.
checks.Error(
"The value of '%s' refers to '%s', which is not a callable, "
"an attribute of '%s', or an attribute or method on '%s.%s'." % (
label, item, obj.__class__.__name__, model._meta.app_label, model._meta.object_name
),
obj=obj.__class__,
id='admin.E108',
)
]
else:
return []
def _check_list_display_links(self, obj):
""" Check that list_display_links is a unique subset of list_display.
"""
if obj.list_display_links is None:
return []
elif not isinstance(obj.list_display_links, (list, tuple)):
return must_be('a list, a tuple, or None', option='list_display_links', obj=obj, id='admin.E110')
else:
return list(chain(*[
self._check_list_display_links_item(obj, field_name, "list_display_links[%d]" % index)
for index, field_name in enumerate(obj.list_display_links)
]))
def _check_list_display_links_item(self, obj, field_name, label):
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not defined in 'list_display'." % (
label, field_name
),
obj=obj.__class__,
id='admin.E111',
)
]
else:
return []
def _check_list_filter(self, obj):
if not isinstance(obj.list_filter, (list, tuple)):
return must_be('a list or tuple', option='list_filter', obj=obj, id='admin.E112')
else:
return list(chain(*[
self._check_list_filter_item(obj, obj.model, item, "list_filter[%d]" % index)
for index, item in enumerate(obj.list_filter)
]))
def _check_list_filter_item(self, obj, model, item, label):
"""
Check one item of `list_filter`, i.e. check if it is one of three options:
1. 'field' -- a basic field filter, possibly w/ relationships (e.g.
'field__rel')
2. ('field', SomeFieldListFilter) - a field-based list filter class
3. SomeListFilter - a non-field list filter class
"""
from django.contrib.admin import ListFilter, FieldListFilter
if callable(item) and not isinstance(item, models.Field):
# If item is option 3, it should be a ListFilter...
if not issubclass(item, ListFilter):
return must_inherit_from(parent='ListFilter', option=label,
obj=obj, id='admin.E113')
# ... but not a FieldListFilter.
elif issubclass(item, FieldListFilter):
return [
checks.Error(
"The value of '%s' must not inherit from 'FieldListFilter'." % label,
obj=obj.__class__,
id='admin.E114',
)
]
else:
return []
elif isinstance(item, (tuple, list)):
# item is option #2
field, list_filter_class = item
if not issubclass(list_filter_class, FieldListFilter):
return must_inherit_from(parent='FieldListFilter', option='%s[1]' % label, obj=obj, id='admin.E115')
else:
return []
else:
# item is option #1
field = item
# Validate the field string
try:
get_fields_from_path(model, field)
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of '%s' refers to '%s', which does not refer to a Field." % (label, field),
obj=obj.__class__,
id='admin.E116',
)
]
else:
return []
def _check_list_select_related(self, obj):
""" Check that list_select_related is a boolean, a list or a tuple. """
if not isinstance(obj.list_select_related, (bool, list, tuple)):
return must_be('a boolean, tuple or list', option='list_select_related', obj=obj, id='admin.E117')
else:
return []
def _check_list_per_page(self, obj):
""" Check that list_per_page is an integer. """
if not isinstance(obj.list_per_page, int):
return must_be('an integer', option='list_per_page', obj=obj, id='admin.E118')
else:
return []
def _check_list_max_show_all(self, obj):
""" Check that list_max_show_all is an integer. """
if not isinstance(obj.list_max_show_all, int):
return must_be('an integer', option='list_max_show_all', obj=obj, id='admin.E119')
else:
return []
def _check_list_editable(self, obj):
""" Check that list_editable is a sequence of editable fields from
list_display without first element. """
if not isinstance(obj.list_editable, (list, tuple)):
return must_be('a list or tuple', option='list_editable', obj=obj, id='admin.E120')
else:
return list(chain(*[
self._check_list_editable_item(obj, obj.model, item, "list_editable[%d]" % index)
for index, item in enumerate(obj.list_editable)
]))
def _check_list_editable_item(self, obj, model, field_name, label):
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
return refer_to_missing_field(field=field_name, option=label, model=model, obj=obj, id='admin.E121')
else:
if field_name not in obj.list_display:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not "
"contained in 'list_display'." % (label, field_name),
obj=obj.__class__,
id='admin.E122',
)
]
elif obj.list_display_links and field_name in obj.list_display_links:
return [
checks.Error(
"The value of '%s' cannot be in both 'list_editable' and 'list_display_links'." % field_name,
obj=obj.__class__,
id='admin.E123',
)
]
# If list_display[0] is in list_editable, check that
# list_display_links is set. See #22792 and #26229 for use cases.
elif (obj.list_display[0] == field_name and not obj.list_display_links and
obj.list_display_links is not None):
return [
checks.Error(
"The value of '%s' refers to the first field in 'list_display' ('%s'), "
"which cannot be used unless 'list_display_links' is set." % (
label, obj.list_display[0]
),
obj=obj.__class__,
id='admin.E124',
)
]
elif not field.editable:
return [
checks.Error(
"The value of '%s' refers to '%s', which is not editable through the admin." % (
label, field_name
),
obj=obj.__class__,
id='admin.E125',
)
]
else:
return []
def _check_search_fields(self, obj):
""" Check search_fields is a sequence. """
if not isinstance(obj.search_fields, (list, tuple)):
return must_be('a list or tuple', option='search_fields', obj=obj, id='admin.E126')
else:
return []
def _check_date_hierarchy(self, obj):
""" Check that date_hierarchy refers to DateField or DateTimeField. """
if obj.date_hierarchy is None:
return []
else:
try:
field = get_fields_from_path(obj.model, obj.date_hierarchy)[-1]
except (NotRelationField, FieldDoesNotExist):
return [
checks.Error(
"The value of 'date_hierarchy' refers to '%s', which "
"does not refer to a Field." % obj.date_hierarchy,
obj=obj.__class__,
id='admin.E127',
)
]
else:
if not isinstance(field, (models.DateField, models.DateTimeField)):
return must_be('a DateField or DateTimeField', option='date_hierarchy', obj=obj, id='admin.E128')
else:
return []
class InlineModelAdminChecks(BaseModelAdminChecks):
def check(self, inline_obj, **kwargs):
errors = super(InlineModelAdminChecks, self).check(inline_obj)
parent_model = inline_obj.parent_model
errors.extend(self._check_relation(inline_obj, parent_model))
errors.extend(self._check_exclude_of_parent_model(inline_obj, parent_model))
errors.extend(self._check_extra(inline_obj))
errors.extend(self._check_max_num(inline_obj))
errors.extend(self._check_min_num(inline_obj))
errors.extend(self._check_formset(inline_obj))
return errors
def _check_exclude_of_parent_model(self, obj, parent_model):
# Do not perform more specific checks if the base checks result in an
# error.
errors = super(InlineModelAdminChecks, self)._check_exclude(obj)
if errors:
return []
# Skip if `fk_name` is invalid.
if self._check_relation(obj, parent_model):
return []
if obj.exclude is None:
return []
fk = _get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
if fk.name in obj.exclude:
return [
checks.Error(
"Cannot exclude the field '%s', because it is the foreign key "
"to the parent model '%s.%s'." % (
fk.name, parent_model._meta.app_label, parent_model._meta.object_name
),
obj=obj.__class__,
id='admin.E201',
)
]
else:
return []
def _check_relation(self, obj, parent_model):
try:
_get_foreign_key(parent_model, obj.model, fk_name=obj.fk_name)
except ValueError as e:
return [checks.Error(e.args[0], obj=obj.__class__, id='admin.E202')]
else:
return []
def _check_extra(self, obj):
""" Check that extra is an integer. """
if not isinstance(obj.extra, int):
return must_be('an integer', option='extra', obj=obj, id='admin.E203')
else:
return []
def _check_max_num(self, obj):
""" Check that max_num is an integer. """
if obj.max_num is None:
return []
elif not isinstance(obj.max_num, int):
return must_be('an integer', option='max_num', obj=obj, id='admin.E204')
else:
return []
def _check_min_num(self, obj):
""" Check that min_num is an integer. """
if obj.min_num is None:
return []
elif not isinstance(obj.min_num, int):
return must_be('an integer', option='min_num', obj=obj, id='admin.E205')
else:
return []
def _check_formset(self, obj):
""" Check formset is a subclass of BaseModelFormSet. """
if not issubclass(obj.formset, BaseModelFormSet):
return must_inherit_from(parent='BaseModelFormSet', option='formset', obj=obj, id='admin.E206')
else:
return []
def must_be(type, option, obj, id):
return [
checks.Error(
"The value of '%s' must be %s." % (option, type),
obj=obj.__class__,
id=id,
),
]
def must_inherit_from(parent, option, obj, id):
return [
checks.Error(
"The value of '%s' must inherit from '%s'." % (option, parent),
obj=obj.__class__,
id=id,
),
]
def refer_to_missing_field(field, option, model, obj, id):
return [
checks.Error(
"The value of '%s' refers to '%s', which is not an attribute of '%s.%s'." % (
option, field, model._meta.app_label, model._meta.object_name
),
obj=obj.__class__,
id=id,
),
]
|
HybridF5/tempest | refs/heads/master | tempest/common/fixed_network.py | 6 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_log import log as logging
from tempest import exceptions
from tempest.lib.common.utils import misc as misc_utils
LOG = logging.getLogger(__name__)
def get_network_from_name(name, compute_networks_client):
"""Get a full network dict from just a network name
:param str name: the name of the network to use
:param NetworksClient compute_networks_client: The network client
object to use for making the network lists api request
:return: The full dictionary for the network in question
:rtype: dict
:raises InvalidTestResource: If the name provided is invalid, the networks
list returns a 404, there are no found networks, or the found network
is invalid
"""
caller = misc_utils.find_test_caller()
if not name:
raise exceptions.InvalidTestResource(type='network', name=name)
networks = compute_networks_client.list_networks()['networks']
networks = [n for n in networks if n['label'] == name]
# Check that a network exists, else raise an InvalidConfigurationException
if len(networks) == 1:
network = sorted(networks)[0]
elif len(networks) > 1:
msg = ("Network with name: %s had multiple matching networks in the "
"list response: %s\n Unable to specify a single network" % (
name, networks))
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.warning(msg)
raise exceptions.InvalidTestResource(type='network', name=name)
else:
msg = "Network with name: %s not found" % name
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.warning(msg)
raise exceptions.InvalidTestResource(type='network', name=name)
# To be consistent between neutron and nova network always use name even
# if label is used in the api response. If neither is present than then
# the returned network is invalid.
name = network.get('name') or network.get('label')
if not name:
msg = "Network found from list doesn't contain a valid name or label"
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.warning(msg)
raise exceptions.InvalidTestResource(type='network', name=name)
network['name'] = name
return network
def get_tenant_network(creds_provider, compute_networks_client,
shared_network_name):
"""Get a network usable by the primary tenant
:param creds_provider: instance of credential provider
:param compute_networks_client: compute network client. We want to have the
compute network client so we can have use a common approach for both
neutron and nova-network cases. If this is not an admin network
client, set_network_kwargs might fail in case fixed_network_name
is the network to be used, and it's not visible to the tenant
:param shared_network_name: name of the shared network to be used if no
tenant network is available in the creds provider
:returns: a dict with 'id' and 'name' of the network
"""
caller = misc_utils.find_test_caller()
net_creds = creds_provider.get_primary_creds()
network = getattr(net_creds, 'network', None)
if not network or not network.get('name'):
if shared_network_name:
msg = ('No valid network provided or created, defaulting to '
'fixed_network_name')
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.debug(msg)
try:
network = get_network_from_name(shared_network_name,
compute_networks_client)
except exceptions.InvalidTestResource:
network = {}
msg = ('Found network %s available for tenant' % network)
if caller:
msg = '(%s) %s' % (caller, msg)
LOG.info(msg)
return network
def set_networks_kwarg(network, kwargs=None):
"""Set 'networks' kwargs for a server create if missing
:param network: dict of network to be used with 'id' and 'name'
:param kwargs: server create kwargs to be enhanced
:return: new dict of kwargs updated to include networks
"""
params = copy.copy(kwargs) or {}
if kwargs and 'networks' in kwargs:
return params
if network:
if 'id' in network.keys():
params.update({"networks": [{'uuid': network['id']}]})
else:
LOG.warning('The provided network dict: %s was invalid and did '
'not contain an id' % network)
return params
|
kytvi2p/Sigil | refs/heads/master | 3rdparty/python/Lib/imghdr.py | 88 | """Recognize image file formats based on their first few bytes."""
__all__ = ["what"]
#-------------------------#
# Recognize image headers #
#-------------------------#
def what(file, h=None):
f = None
try:
if h is None:
if isinstance(file, str):
f = open(file, 'rb')
h = f.read(32)
else:
location = file.tell()
h = file.read(32)
file.seek(location)
for tf in tests:
res = tf(h, f)
if res:
return res
finally:
if f: f.close()
return None
#---------------------------------#
# Subroutines per image file type #
#---------------------------------#
tests = []
def test_jpeg(h, f):
"""JPEG data in JFIF or Exif format"""
if h[6:10] in (b'JFIF', b'Exif'):
return 'jpeg'
tests.append(test_jpeg)
def test_png(h, f):
if h.startswith(b'\211PNG\r\n\032\n'):
return 'png'
tests.append(test_png)
def test_gif(h, f):
"""GIF ('87 and '89 variants)"""
if h[:6] in (b'GIF87a', b'GIF89a'):
return 'gif'
tests.append(test_gif)
def test_tiff(h, f):
"""TIFF (can be in Motorola or Intel byte order)"""
if h[:2] in (b'MM', b'II'):
return 'tiff'
tests.append(test_tiff)
def test_rgb(h, f):
"""SGI image library"""
if h.startswith(b'\001\332'):
return 'rgb'
tests.append(test_rgb)
def test_pbm(h, f):
"""PBM (portable bitmap)"""
if len(h) >= 3 and \
h[0] == ord(b'P') and h[1] in b'14' and h[2] in b' \t\n\r':
return 'pbm'
tests.append(test_pbm)
def test_pgm(h, f):
"""PGM (portable graymap)"""
if len(h) >= 3 and \
h[0] == ord(b'P') and h[1] in b'25' and h[2] in b' \t\n\r':
return 'pgm'
tests.append(test_pgm)
def test_ppm(h, f):
"""PPM (portable pixmap)"""
if len(h) >= 3 and \
h[0] == ord(b'P') and h[1] in b'36' and h[2] in b' \t\n\r':
return 'ppm'
tests.append(test_ppm)
def test_rast(h, f):
"""Sun raster file"""
if h.startswith(b'\x59\xA6\x6A\x95'):
return 'rast'
tests.append(test_rast)
def test_xbm(h, f):
"""X bitmap (X10 or X11)"""
if h.startswith(b'#define '):
return 'xbm'
tests.append(test_xbm)
def test_bmp(h, f):
if h.startswith(b'BM'):
return 'bmp'
tests.append(test_bmp)
#--------------------#
# Small test program #
#--------------------#
def test():
import sys
recursive = 0
if sys.argv[1:] and sys.argv[1] == '-r':
del sys.argv[1:2]
recursive = 1
try:
if sys.argv[1:]:
testall(sys.argv[1:], recursive, 1)
else:
testall(['.'], recursive, 1)
except KeyboardInterrupt:
sys.stderr.write('\n[Interrupted]\n')
sys.exit(1)
def testall(list, recursive, toplevel):
import sys
import os
for filename in list:
if os.path.isdir(filename):
print(filename + '/:', end=' ')
if recursive or toplevel:
print('recursing down:')
import glob
names = glob.glob(os.path.join(filename, '*'))
testall(names, recursive, 0)
else:
print('*** directory (use -r) ***')
else:
print(filename + ':', end=' ')
sys.stdout.flush()
try:
print(what(filename))
except OSError:
print('*** not found ***')
if __name__ == '__main__':
test()
|
wwj718/ANALYSE | refs/heads/master | common/lib/xmodule/xmodule/hidden_module.py | 94 | from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return u"ERROR: This module is unknown--students will not see it at all"
else:
return u""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
|
craisins/nascarbot | refs/heads/master | plugins/wikipedia.py | 6 | '''Searches wikipedia and returns first sentence of article
Scaevolus 2009'''
import re
from util import hook, http
api_prefix = "http://en.wikipedia.org/w/api.php"
search_url = api_prefix + "?action=opensearch&format=xml"
paren_re = re.compile('\s*\(.*\)$')
@hook.command('w')
@hook.command
def wiki(inp):
'''.w/.wiki <phrase> -- gets first sentence of wikipedia ''' \
'''article on <phrase>'''
x = http.get_xml(search_url, search=inp)
ns = '{http://opensearch.org/searchsuggest2}'
items = x.findall(ns + 'Section/' + ns + 'Item')
if items == []:
if x.find('error') is not None:
return 'error: %(code)s: %(info)s' % x.find('error').attrib
else:
return 'no results found'
def extract(item):
return [item.find(ns + x).text for x in
('Text', 'Description', 'Url')]
title, desc, url = extract(items[0])
if 'may refer to' in desc:
title, desc, url = extract(items[1])
title = paren_re.sub('', title)
if title.lower() not in desc.lower():
desc = title + desc
desc = re.sub('\s+', ' ', desc).strip() # remove excess spaces
if len(desc) > 300:
desc = desc[:300] + '...'
return '%s -- %s' % (desc, http.quote(http.unquote(url), ':/'))
|
MichaelNedzelsky/intellij-community | refs/heads/master | python/testData/completion/matMul.py | 79 | class C:
def __matmul<caret>
|
arthru/OpenUpgrade | refs/heads/master | addons/marketing_campaign_crm_demo/__openerp__.py | 119 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Marketing Campaign - Demo',
'version': '1.0',
'depends': ['marketing_campaign',
'crm',
],
'author': 'OpenERP SA',
'category': 'Marketing',
'description': """
Demo data for the module marketing_campaign.
============================================
Creates demo data like leads, campaigns and segments for the module marketing_campaign.
""",
'website': 'http://www.openerp.com',
'data': [],
'demo': ['marketing_campaign_demo.xml'],
'installable': True,
'auto_install': False,
'images': ['images/campaigns.jpeg','images/email_templates.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
vgrem/Office365-REST-Python-Client | refs/heads/master | office365/calendar/calendar_group.py | 1 | from office365.calendar.calendar_collection import CalendarCollection
from office365.entity import Entity
from office365.runtime.resource_path import ResourcePath
class CalendarGroup(Entity):
"""
A group of user calendars.
"""
@property
def calendars(self):
"""The calendars in the calendar group. Navigation property. Read-only. Nullable."""
return self.properties.get('calendars',
CalendarCollection(self.context, ResourcePath("calendars", self.resource_path)))
|
botchat/sampleGroupChat | refs/heads/master | chatting/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
savoirfairelinux/OpenUpgrade | refs/heads/master | addons/mail/tests/test_message_read.py | 44 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.mail.tests.common import TestMail
class test_mail_access_rights(TestMail):
def test_00_message_read(self):
""" Tests for message_read and expandables. """
cr, uid, user_admin, user_raoul, group_pigs = self.cr, self.uid, self.user_admin, self.user_raoul, self.group_pigs
self.mail_group.message_subscribe_users(cr, uid, [group_pigs.id], [user_raoul.id])
pigs_domain = [('model', '=', 'mail.group'), ('res_id', '=', self.group_pigs_id)]
# Data: create a discussion in Pigs (3 threads, with respectively 0, 4 and 4 answers)
msg_id0 = self.group_pigs.message_post(body='0', subtype='mt_comment')
msg_id1 = self.group_pigs.message_post(body='1', subtype='mt_comment')
msg_id2 = self.group_pigs.message_post(body='2', subtype='mt_comment')
msg_id3 = self.group_pigs.message_post(body='1-1', subtype='mt_comment', parent_id=msg_id1)
msg_id4 = self.group_pigs.message_post(body='2-1', subtype='mt_comment', parent_id=msg_id2)
msg_id5 = self.group_pigs.message_post(body='1-2', subtype='mt_comment', parent_id=msg_id1)
msg_id6 = self.group_pigs.message_post(body='2-2', subtype='mt_comment', parent_id=msg_id2)
msg_id7 = self.group_pigs.message_post(body='1-1-1', subtype='mt_comment', parent_id=msg_id3)
msg_id8 = self.group_pigs.message_post(body='2-1-1', subtype='mt_comment', parent_id=msg_id4)
msg_id9 = self.group_pigs.message_post(body='1-1-1', subtype='mt_comment', parent_id=msg_id3)
msg_id10 = self.group_pigs.message_post(body='2-1-1', subtype='mt_comment', parent_id=msg_id4)
msg_ids = [msg_id10, msg_id9, msg_id8, msg_id7, msg_id6, msg_id5, msg_id4, msg_id3, msg_id2, msg_id1, msg_id0]
ordered_msg_ids = [msg_id2, msg_id4, msg_id6, msg_id8, msg_id10, msg_id1, msg_id3, msg_id5, msg_id7, msg_id9, msg_id0]
# Test: raoul received notifications
raoul_notification_ids = self.mail_notification.search(cr, user_raoul.id, [('read', '=', False), ('message_id', 'in', msg_ids), ('partner_id', '=', user_raoul.partner_id.id)])
self.assertEqual(len(raoul_notification_ids), 11, 'message_post: wrong number of produced notifications')
# Test: read some specific ids
read_msg_list = self.mail_message.message_read(cr, user_raoul.id, ids=msg_ids[2:4], domain=[('body', 'like', 'dummy')], context={'mail_read_set_read': True})
read_msg_ids = [msg.get('id') for msg in read_msg_list]
self.assertEqual(msg_ids[2:4], read_msg_ids, 'message_read with direct ids should read only the requested ids')
# Test: read messages of Pigs through a domain, being thread or not threaded
read_msg_list = self.mail_message.message_read(cr, user_raoul.id, domain=pigs_domain, limit=200)
read_msg_ids = [msg.get('id') for msg in read_msg_list]
self.assertEqual(msg_ids, read_msg_ids, 'message_read flat with domain on Pigs should equal all messages of Pigs')
read_msg_list = self.mail_message.message_read(cr, user_raoul.id, domain=pigs_domain, limit=200, thread_level=1)
read_msg_ids = [msg.get('id') for msg in read_msg_list]
self.assertEqual(ordered_msg_ids, read_msg_ids,
'message_read threaded with domain on Pigs should equal all messages of Pigs, and sort them with newer thread first, last message last in thread')
# ----------------------------------------
# CASE1: message_read with domain, threaded
# We simulate an entire flow, using the expandables to test them
# ----------------------------------------
# Do: read last message, threaded
read_msg_list = self.mail_message.message_read(cr, uid, domain=pigs_domain, limit=1, thread_level=1)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# TDE TODO: test expandables order
type_list = map(lambda item: item.get('type'), read_msg_list)
# Test: structure content, ancestor is added to the read messages, ordered by id, ancestor is set, 2 expandables
self.assertEqual(len(read_msg_list), 4, 'message_read on last Pigs message should return 2 messages and 2 expandables')
self.assertEqual(set([msg_id2, msg_id10]), set(read_msg_ids), 'message_read on the last Pigs message should also get its parent')
self.assertEqual(read_msg_list[1].get('parent_id'), read_msg_list[0].get('id'), 'message_read should set the ancestor to the thread header')
# Data: get expandables
new_threads_exp, new_msg_exp = None, None
for msg in read_msg_list:
if msg.get('type') == 'expandable' and msg.get('nb_messages') == -1 and msg.get('max_limit'):
new_threads_exp = msg
elif msg.get('type') == 'expandable':
new_msg_exp = msg
# Do: fetch new messages in first thread, domain from expandable
self.assertIsNotNone(new_msg_exp, 'message_read on last Pigs message should have returned a new messages expandable')
domain = new_msg_exp.get('domain', [])
# Test: expandable, conditions in domain
self.assertIn(('id', 'child_of', msg_id2), domain, 'new messages expandable domain should contain a child_of condition')
self.assertIn(('id', '>=', msg_id4), domain, 'new messages expandable domain should contain an id greater than condition')
self.assertIn(('id', '<=', msg_id8), domain, 'new messages expandable domain should contain an id less than condition')
self.assertEqual(new_msg_exp.get('parent_id'), msg_id2, 'new messages expandable should have parent_id set to the thread header')
# Do: message_read with domain, thread_level=0, parent_id=msg_id2 (should be imposed by JS), 2 messages
read_msg_list = self.mail_message.message_read(cr, uid, domain=domain, limit=2, thread_level=0, parent_id=msg_id2)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
new_msg_exp = [msg for msg in read_msg_list if msg.get('type') == 'expandable'][0]
# Test: structure content, 2 messages and 1 thread expandable
self.assertEqual(len(read_msg_list), 3, 'message_read in Pigs thread should return 2 messages and 1 expandables')
self.assertEqual(set([msg_id6, msg_id8]), set(read_msg_ids), 'message_read in Pigs thread should return 2 more previous messages in thread')
# Do: read the last message
read_msg_list = self.mail_message.message_read(cr, uid, domain=new_msg_exp.get('domain'), limit=2, thread_level=0, parent_id=msg_id2)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# Test: structure content, 1 message
self.assertEqual(len(read_msg_list), 1, 'message_read in Pigs thread should return 1 message')
self.assertEqual(set([msg_id4]), set(read_msg_ids), 'message_read in Pigs thread should return the last message in thread')
# Do: fetch a new thread, domain from expandable
self.assertIsNotNone(new_threads_exp, 'message_read on last Pigs message should have returned a new threads expandable')
domain = new_threads_exp.get('domain', [])
# Test: expandable, conditions in domain
for condition in pigs_domain:
self.assertIn(condition, domain, 'new threads expandable domain should contain the message_read domain parameter')
self.assertFalse(new_threads_exp.get('parent_id'), 'new threads expandable should not have an parent_id')
# Do: message_read with domain, thread_level=1 (should be imposed by JS)
read_msg_list = self.mail_message.message_read(cr, uid, domain=domain, limit=1, thread_level=1)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# Test: structure content, ancestor is added to the read messages, ordered by id, ancestor is set, 2 expandables
self.assertEqual(len(read_msg_list), 4, 'message_read on Pigs should return 2 messages and 2 expandables')
self.assertEqual(set([msg_id1, msg_id9]), set(read_msg_ids), 'message_read on a Pigs message should also get its parent')
self.assertEqual(read_msg_list[1].get('parent_id'), read_msg_list[0].get('id'), 'message_read should set the ancestor to the thread header')
# Data: get expandables
new_threads_exp, new_msg_exp = None, None
for msg in read_msg_list:
if msg.get('type') == 'expandable' and msg.get('nb_messages') == -1 and msg.get('max_limit'):
new_threads_exp = msg
elif msg.get('type') == 'expandable':
new_msg_exp = msg
# Do: fetch new messages in second thread, domain from expandable
self.assertIsNotNone(new_msg_exp, 'message_read on Pigs message should have returned a new messages expandable')
domain = new_msg_exp.get('domain', [])
# Test: expandable, conditions in domain
self.assertIn(('id', 'child_of', msg_id1), domain, 'new messages expandable domain should contain a child_of condition')
self.assertIn(('id', '>=', msg_id3), domain, 'new messages expandable domain should contain an id greater than condition')
self.assertIn(('id', '<=', msg_id7), domain, 'new messages expandable domain should contain an id less than condition')
self.assertEqual(new_msg_exp.get('parent_id'), msg_id1, 'new messages expandable should have ancestor_id set to the thread header')
# Do: message_read with domain, thread_level=0, parent_id=msg_id1 (should be imposed by JS)
read_msg_list = self.mail_message.message_read(cr, uid, domain=domain, limit=200, thread_level=0, parent_id=msg_id1)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# Test: other message in thread have been fetch
self.assertEqual(set([msg_id3, msg_id5, msg_id7]), set(read_msg_ids), 'message_read on the last Pigs message should also get its parent')
# Test: fetch a new thread, domain from expandable
self.assertIsNotNone(new_threads_exp, 'message_read should have returned a new threads expandable')
domain = new_threads_exp.get('domain', [])
# Test: expandable, conditions in domain
for condition in pigs_domain:
self.assertIn(condition, domain, 'general expandable domain should contain the message_read domain parameter')
# Do: message_read with domain, thread_level=1 (should be imposed by JS)
read_msg_list = self.mail_message.message_read(cr, uid, domain=domain, limit=1, thread_level=1)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# Test: structure content, ancestor is added to the read messages, ordered by id, ancestor is set, 2 expandables
self.assertEqual(len(read_msg_list), 1, 'message_read on Pigs should return 1 message because everything else has been fetched')
self.assertEqual([msg_id0], read_msg_ids, 'message_read after 2 More should return only 1 last message')
# ----------------------------------------
# CASE2: message_read with domain, flat
# ----------------------------------------
# Do: read 2 lasts message, flat
read_msg_list = self.mail_message.message_read(cr, uid, domain=pigs_domain, limit=2, thread_level=0)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# Test: structure content, ancestor is added to the read messages, ordered by id, ancestor is not set, 1 expandable
self.assertEqual(len(read_msg_list), 3, 'message_read on last Pigs message should return 2 messages and 1 expandable')
self.assertEqual(set([msg_id9, msg_id10]), set(read_msg_ids), 'message_read flat on Pigs last messages should only return those messages')
self.assertFalse(read_msg_list[0].get('parent_id'), 'message_read flat should set the ancestor as False')
self.assertFalse(read_msg_list[1].get('parent_id'), 'message_read flat should set the ancestor as False')
# Data: get expandables
new_threads_exp, new_msg_exp = None, None
for msg in read_msg_list:
if msg.get('type') == 'expandable' and msg.get('nb_messages') == -1 and msg.get('max_limit'):
new_threads_exp = msg
# Do: fetch new messages, domain from expandable
self.assertIsNotNone(new_threads_exp, 'message_read flat on the 2 last Pigs messages should have returns a new threads expandable')
domain = new_threads_exp.get('domain', [])
# Test: expandable, conditions in domain
for condition in pigs_domain:
self.assertIn(condition, domain, 'new threads expandable domain should contain the message_read domain parameter')
# Do: message_read with domain, thread_level=0 (should be imposed by JS)
read_msg_list = self.mail_message.message_read(cr, uid, domain=domain, limit=20, thread_level=0)
read_msg_ids = [msg.get('id') for msg in read_msg_list if msg.get('type') != 'expandable']
# Test: structure content, ancestor is added to the read messages, ordered by id, ancestor is set, 2 expandables
self.assertEqual(len(read_msg_list), 9, 'message_read on Pigs should return 9 messages and 0 expandable')
self.assertEqual([msg_id8, msg_id7, msg_id6, msg_id5, msg_id4, msg_id3, msg_id2, msg_id1, msg_id0], read_msg_ids,
'message_read, More on flat, should return all remaning messages')
|
SnappleCap/oh-mainline | refs/heads/master | vendor/packages/requests/requests/models.py | 410 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring)
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
json_dumps = json.dumps
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata,
filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None,
json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindy call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(
to_native_string(url, 'utf8')))
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
if json is not None:
content_type = 'application/json'
body = json_dumps(json)
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data and json is None:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = builtin_str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif (self.method not in ('GET', 'HEAD')) and (self.headers.get('Content-Length') is None):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content',
'status_code',
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except HTTPError:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanant versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
.. note:: This method is not reentrant safe.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return json.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return json.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
return self.raw.release_conn()
|
pcu4dros/pandora-core | refs/heads/master | workspace/lib/python3.5/site-packages/werkzeug/contrib/testtools.py | 4 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.testtools
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module implements extended wrappers for simplified testing.
`TestResponse`
A response wrapper which adds various cached attributes for
simplified assertions on various content types.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from werkzeug.utils import cached_property, import_string
from werkzeug.wrappers import Response
from warnings import warn
warn(DeprecationWarning('werkzeug.contrib.testtools is deprecated and '
'will be removed with Werkzeug 1.0'))
class ContentAccessors(object):
"""
A mixin class for response objects that provides a couple of useful
accessors for unittesting.
"""
def xml(self):
"""Get an etree if possible."""
if 'xml' not in self.mimetype:
raise AttributeError(
'Not a XML response (Content-Type: %s)'
% self.mimetype)
for module in ['xml.etree.ElementTree', 'ElementTree',
'elementtree.ElementTree']:
etree = import_string(module, silent=True)
if etree is not None:
return etree.XML(self.body)
raise RuntimeError('You must have ElementTree installed '
'to use TestResponse.xml')
xml = cached_property(xml)
def lxml(self):
"""Get an lxml etree if possible."""
if ('html' not in self.mimetype and 'xml' not in self.mimetype):
raise AttributeError('Not an HTML/XML response')
from lxml import etree
try:
from lxml.html import fromstring
except ImportError:
fromstring = etree.HTML
if self.mimetype == 'text/html':
return fromstring(self.data)
return etree.XML(self.data)
lxml = cached_property(lxml)
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.mimetype:
raise AttributeError('Not a JSON response')
try:
from simplejson import loads
except ImportError:
from json import loads
return loads(self.data)
json = cached_property(json)
class TestResponse(Response, ContentAccessors):
"""Pass this to `werkzeug.test.Client` for easier unittesting."""
|
CenterForOpenScience/osf.io | refs/heads/develop | api_tests/providers/registrations/views/test_registration_provider_moderator_list.py | 5 | import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
RegistrationProviderFactory,
)
from api_tests.providers.preprints.views.test_preprint_provider_moderator_list import ProviderModeratorListTestClass
@pytest.mark.django_db
class TestRegistrationProviderModeratorList(ProviderModeratorListTestClass):
@pytest.fixture()
def provider(self):
pp = RegistrationProviderFactory(name='EGAP')
pp.update_group_permissions()
return pp
@pytest.fixture()
def url(self, provider):
return f'/{API_BASE}providers/registrations/{provider._id}/moderators/'
|
ghchinoy/tensorflow | refs/heads/master | tensorflow/contrib/rnn/python/ops/core_rnn_cell.py | 19 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module implementing RNN Cells that used to be in core.
@@EmbeddingWrapper
@@InputProjectionWrapper
@@OutputProjectionWrapper
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
# pylint: disable=protected-access,invalid-name
RNNCell = rnn_cell_impl.RNNCell
_WEIGHTS_VARIABLE_NAME = rnn_cell_impl._WEIGHTS_VARIABLE_NAME
_BIAS_VARIABLE_NAME = rnn_cell_impl._BIAS_VARIABLE_NAME
# pylint: enable=protected-access,invalid-name
class _Linear(object):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch, n, Tensors.
output_size: int, second dimension of weight variable.
dtype: data type for variables.
build_bias: boolean, whether to build a bias variable.
bias_initializer: starting value to initialize the bias
(default is all zeros).
kernel_initializer: starting value to initialize the weight.
Raises:
ValueError: if inputs_shape is wrong.
"""
def __init__(self,
args,
output_size,
build_bias,
bias_initializer=None,
kernel_initializer=None):
self._build_bias = build_bias
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
self._is_sequence = False
else:
self._is_sequence = True
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape.dims[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape.dims[1].value
dtype = [a.dtype for a in args][0]
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
self._weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size],
dtype=dtype,
initializer=kernel_initializer)
if build_bias:
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
if bias_initializer is None:
bias_initializer = init_ops.constant_initializer(0.0, dtype=dtype)
self._biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=bias_initializer)
def __call__(self, args):
if not self._is_sequence:
args = [args]
if len(args) == 1:
res = math_ops.matmul(args[0], self._weights)
else:
# Explicitly creating a one for a minor performance improvement.
one = constant_op.constant(1, dtype=dtypes.int32)
res = math_ops.matmul(array_ops.concat(args, one), self._weights)
if self._build_bias:
res = nn_ops.bias_add(res, self._biases)
return res
# TODO(xpan): Remove this function in a follow up.
def _linear(args,
output_size,
bias,
bias_initializer=None,
kernel_initializer=None):
"""Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
Args:
args: a 2D Tensor or a list of 2D, batch, n, Tensors.
output_size: int, second dimension of W[i].
bias: boolean, whether to add a bias term or not.
bias_initializer: starting value to initialize the bias
(default is all zeros).
kernel_initializer: starting value to initialize the weight.
Returns:
A 2D Tensor with shape `[batch, output_size]` equal to
sum_i(args[i] * W[i]), where W[i]s are newly created matrices.
Raises:
ValueError: if some of the arguments has unspecified or wrong shape.
"""
if args is None or (nest.is_sequence(args) and not args):
raise ValueError("`args` must be specified")
if not nest.is_sequence(args):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape() for a in args]
for shape in shapes:
if shape.ndims != 2:
raise ValueError("linear is expecting 2D arguments: %s" % shapes)
if shape.dims[1].value is None:
raise ValueError("linear expects shape[1] to be provided for shape %s, "
"but saw %s" % (shape, shape[1]))
else:
total_arg_size += shape.dims[1].value
dtype = [a.dtype for a in args][0]
# Now the computation.
scope = vs.get_variable_scope()
with vs.variable_scope(scope) as outer_scope:
weights = vs.get_variable(
_WEIGHTS_VARIABLE_NAME, [total_arg_size, output_size],
dtype=dtype,
initializer=kernel_initializer)
if len(args) == 1:
res = math_ops.matmul(args[0], weights)
else:
res = math_ops.matmul(array_ops.concat(args, 1), weights)
if not bias:
return res
with vs.variable_scope(outer_scope) as inner_scope:
inner_scope.set_partitioner(None)
if bias_initializer is None:
bias_initializer = init_ops.constant_initializer(0.0, dtype=dtype)
biases = vs.get_variable(
_BIAS_VARIABLE_NAME, [output_size],
dtype=dtype,
initializer=bias_initializer)
return nn_ops.bias_add(res, biases)
class EmbeddingWrapper(RNNCell):
"""Operator adding input embedding to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your inputs in time,
do the embedding on this batch-concatenated sequence, then split it and
feed into your RNN.
"""
def __init__(self,
cell,
embedding_classes,
embedding_size,
initializer=None,
reuse=None):
"""Create a cell with an added input embedding.
Args:
cell: an RNNCell, an embedding will be put before its inputs.
embedding_classes: integer, how many symbols will be embedded.
embedding_size: integer, the size of the vectors we embed into.
initializer: an initializer to use when creating the embedding;
if None, the initializer from variable scope or a default one is used.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if embedding_classes is not positive.
"""
super(EmbeddingWrapper, self).__init__(_reuse=reuse)
rnn_cell_impl.assert_like_rnncell("cell", cell)
if embedding_classes <= 0 or embedding_size <= 0:
raise ValueError("Both embedding_classes and embedding_size must be > 0: "
"%d, %d." % (embedding_classes, embedding_size))
self._cell = cell
self._embedding_classes = embedding_classes
self._embedding_size = embedding_size
self._initializer = initializer
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
return self._cell.zero_state(batch_size, dtype)
def call(self, inputs, state):
"""Run the cell on embedded inputs."""
with ops.device("/cpu:0"):
if self._initializer:
initializer = self._initializer
elif vs.get_variable_scope().initializer:
initializer = vs.get_variable_scope().initializer
else:
# Default initializer for embeddings should have variance=1.
sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1.
initializer = init_ops.random_uniform_initializer(-sqrt3, sqrt3)
if isinstance(state, tuple):
data_type = state[0].dtype
else:
data_type = state.dtype
embedding = vs.get_variable(
"embedding", [self._embedding_classes, self._embedding_size],
initializer=initializer,
dtype=data_type)
embedded = embedding_ops.embedding_lookup(embedding,
array_ops.reshape(inputs, [-1]))
return self._cell(embedded, state)
class InputProjectionWrapper(RNNCell):
"""Operator adding an input projection to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your inputs in time,
do the projection on this batch-concatenated sequence, then split it.
"""
def __init__(self,
cell,
num_proj,
activation=None,
input_size=None,
reuse=None):
"""Create a cell with input projection.
Args:
cell: an RNNCell, a projection of inputs is added before it.
num_proj: Python integer. The dimension to project to.
activation: (optional) an optional activation function.
input_size: Deprecated and unused.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
"""
super(InputProjectionWrapper, self).__init__(_reuse=reuse)
if input_size is not None:
logging.warn("%s: The input_size parameter is deprecated.", self)
rnn_cell_impl.assert_like_rnncell("cell", cell)
self._cell = cell
self._num_proj = num_proj
self._activation = activation
self._linear = None
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
return self._cell.zero_state(batch_size, dtype)
def call(self, inputs, state):
"""Run the input projection and then the cell."""
# Default scope: "InputProjectionWrapper"
if self._linear is None:
self._linear = _Linear(inputs, self._num_proj, True)
projected = self._linear(inputs)
if self._activation:
projected = self._activation(projected)
return self._cell(projected, state)
class OutputProjectionWrapper(RNNCell):
"""Operator adding an output projection to the given cell.
Note: in many cases it may be more efficient to not use this wrapper,
but instead concatenate the whole sequence of your outputs in time,
do the projection on this batch-concatenated sequence, then split it
if needed or directly feed into a softmax.
"""
def __init__(self, cell, output_size, activation=None, reuse=None):
"""Create a cell with output projection.
Args:
cell: an RNNCell, a projection to output_size is added to it.
output_size: integer, the size of the output after projection.
activation: (optional) an optional activation function.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
TypeError: if cell is not an RNNCell.
ValueError: if output_size is not positive.
"""
super(OutputProjectionWrapper, self).__init__(_reuse=reuse)
rnn_cell_impl.assert_like_rnncell("cell", cell)
if output_size < 1:
raise ValueError("Parameter output_size must be > 0: %d." % output_size)
self._cell = cell
self._output_size = output_size
self._activation = activation
self._linear = None
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._output_size
def zero_state(self, batch_size, dtype):
with ops.name_scope(type(self).__name__ + "ZeroState", values=[batch_size]):
return self._cell.zero_state(batch_size, dtype)
def call(self, inputs, state):
"""Run the cell and output projection on inputs, starting from state."""
output, res_state = self._cell(inputs, state)
if self._linear is None:
self._linear = _Linear(output, self._output_size, True)
projected = self._linear(output)
if self._activation:
projected = self._activation(projected)
return projected, res_state
|
No dataset card yet
New: Create and edit this dataset card directly on the website!
Contribute a Dataset Card