code
stringlengths
2
1.05M
repo_name
stringlengths
5
104
path
stringlengths
4
251
language
stringclasses
1 value
license
stringclasses
15 values
size
int32
2
1.05M
""" Generating and counting primes. """ from __future__ import print_function, division import random from bisect import bisect # Using arrays for sieving instead of lists greatly reduces # memory consumption from array import array as _array from sympy import Function, S from sympy.core.compatibility import as_int, range from .primetest import isprime def _azeros(n): return _array('l', [0]*n) def _aset(*v): return _array('l', v) def _arange(a, b): return _array('l', range(a, b)) class Sieve: """An infinite list of prime numbers, implemented as a dynamically growing sieve of Eratosthenes. When a lookup is requested involving an odd number that has not been sieved, the sieve is automatically extended up to that number. Examples ======== >>> from sympy import sieve >>> sieve._reset() # this line for doctest only >>> 25 in sieve False >>> sieve._list array('l', [2, 3, 5, 7, 11, 13, 17, 19, 23]) """ # data shared (and updated) by all Sieve instances def __init__(self): self._n = 6 self._list = _aset(2, 3, 5, 7, 11, 13) # primes self._tlist = _aset(0, 1, 1, 2, 2, 4) # totient self._mlist = _aset(0, 1, -1, -1, 0, -1) # mobius assert all(len(i) == self._n for i in (self._list, self._tlist, self._mlist)) def __repr__(self): return ("<%s sieve (%i): %i, %i, %i, ... %i, %i\n" "%s sieve (%i): %i, %i, %i, ... %i, %i\n" "%s sieve (%i): %i, %i, %i, ... %i, %i>") % ( 'prime', len(self._list), self._list[0], self._list[1], self._list[2], self._list[-2], self._list[-1], 'totient', len(self._tlist), self._tlist[0], self._tlist[1], self._tlist[2], self._tlist[-2], self._tlist[-1], 'mobius', len(self._mlist), self._mlist[0], self._mlist[1], self._mlist[2], self._mlist[-2], self._mlist[-1]) def _reset(self, prime=None, totient=None, mobius=None): """Reset all caches (default). To reset one or more set the desired keyword to True.""" if all(i is None for i in (prime, totient, mobius)): prime = totient = mobius = True if prime: self._list = self._list[:self._n] if totient: self._tlist = self._tlist[:self._n] if mobius: self._mlist = self._mlist[:self._n] def extend(self, n): """Grow the sieve to cover all primes <= n (a real number). Examples ======== >>> from sympy import sieve >>> sieve._reset() # this line for doctest only >>> sieve.extend(30) >>> sieve[10] == 29 True """ n = int(n) if n <= self._list[-1]: return # We need to sieve against all bases up to sqrt(n). # This is a recursive call that will do nothing if there are enough # known bases already. maxbase = int(n**0.5) + 1 self.extend(maxbase) # Create a new sieve starting from sqrt(n) begin = self._list[-1] + 1 newsieve = _arange(begin, n + 1) # Now eliminate all multiples of primes in [2, sqrt(n)] for p in self.primerange(2, maxbase): # Start counting at a multiple of p, offsetting # the index to account for the new sieve's base index startindex = (-begin) % p for i in range(startindex, len(newsieve), p): newsieve[i] = 0 # Merge the sieves self._list += _array('l', [x for x in newsieve if x]) def extend_to_no(self, i): """Extend to include the ith prime number. Parameters ========== i : integer Examples ======== >>> from sympy import sieve >>> sieve._reset() # this line for doctest only >>> sieve.extend_to_no(9) >>> sieve._list array('l', [2, 3, 5, 7, 11, 13, 17, 19, 23]) Notes ===== The list is extended by 50% if it is too short, so it is likely that it will be longer than requested. """ i = as_int(i) while len(self._list) < i: self.extend(int(self._list[-1] * 1.5)) def primerange(self, a, b): """Generate all prime numbers in the range [a, b). Examples ======== >>> from sympy import sieve >>> print([i for i in sieve.primerange(7, 18)]) [7, 11, 13, 17] """ from sympy.functions.elementary.integers import ceiling # wrapping ceiling in as_int will raise an error if there was a problem # determining whether the expression was exactly an integer or not a = max(2, as_int(ceiling(a))) b = as_int(ceiling(b)) if a >= b: return self.extend(b) i = self.search(a)[1] maxi = len(self._list) + 1 while i < maxi: p = self._list[i - 1] if p < b: yield p i += 1 else: return def totientrange(self, a, b): """Generate all totient numbers for the range [a, b). Examples ======== >>> from sympy import sieve >>> print([i for i in sieve.totientrange(7, 18)]) [6, 4, 6, 4, 10, 4, 12, 6, 8, 8, 16] """ from sympy.functions.elementary.integers import ceiling # wrapping ceiling in as_int will raise an error if there was a problem # determining whether the expression was exactly an integer or not a = max(1, as_int(ceiling(a))) b = as_int(ceiling(b)) n = len(self._tlist) if a >= b: return elif b <= n: for i in range(a, b): yield self._tlist[i] else: self._tlist += _arange(n, b) for i in range(1, n): ti = self._tlist[i] startindex = (n + i - 1) // i * i for j in range(startindex, b, i): self._tlist[j] -= ti if i >= a: yield ti for i in range(n, b): ti = self._tlist[i] for j in range(2 * i, b, i): self._tlist[j] -= ti if i >= a: yield ti def mobiusrange(self, a, b): """Generate all mobius numbers for the range [a, b). Parameters ========== a : integer First number in range b : integer First number outside of range Examples ======== >>> from sympy import sieve >>> print([i for i in sieve.mobiusrange(7, 18)]) [-1, 0, 0, 1, -1, 0, -1, 1, 1, 0, -1] """ from sympy.functions.elementary.integers import ceiling # wrapping ceiling in as_int will raise an error if there was a problem # determining whether the expression was exactly an integer or not a = max(1, as_int(ceiling(a))) b = as_int(ceiling(b)) n = len(self._mlist) if a >= b: return elif b <= n: for i in range(a, b): yield self._mlist[i] else: self._mlist += _azeros(b - n) for i in range(1, n): mi = self._mlist[i] startindex = (n + i - 1) // i * i for j in range(startindex, b, i): self._mlist[j] -= mi if i >= a: yield mi for i in range(n, b): mi = self._mlist[i] for j in range(2 * i, b, i): self._mlist[j] -= mi if i >= a: yield mi def search(self, n): """Return the indices i, j of the primes that bound n. If n is prime then i == j. Although n can be an expression, if ceiling cannot convert it to an integer then an n error will be raised. Examples ======== >>> from sympy import sieve >>> sieve.search(25) (9, 10) >>> sieve.search(23) (9, 9) """ from sympy.functions.elementary.integers import ceiling # wrapping ceiling in as_int will raise an error if there was a problem # determining whether the expression was exactly an integer or not test = as_int(ceiling(n)) n = as_int(n) if n < 2: raise ValueError("n should be >= 2 but got: %s" % n) if n > self._list[-1]: self.extend(n) b = bisect(self._list, n) if self._list[b - 1] == test: return b, b else: return b, b + 1 def __contains__(self, n): try: n = as_int(n) assert n >= 2 except (ValueError, AssertionError): return False if n % 2 == 0: return n == 2 a, b = self.search(n) return a == b def __getitem__(self, n): """Return the nth prime number""" if isinstance(n, slice): self.extend_to_no(n.stop) return self._list[n.start - 1:n.stop - 1:n.step] else: n = as_int(n) self.extend_to_no(n) return self._list[n - 1] # Generate a global object for repeated use in trial division etc sieve = Sieve() def prime(nth): """ Return the nth prime, with the primes indexed as prime(1) = 2, prime(2) = 3, etc.... The nth prime is approximately n*log(n). Logarithmic integral of x is a pretty nice approximation for number of primes <= x, i.e. li(x) ~ pi(x) In fact, for the numbers we are concerned about( x<1e11 ), li(x) - pi(x) < 50000 Also, li(x) > pi(x) can be safely assumed for the numbers which can be evaluated by this function. Here, we find the least integer m such that li(m) > n using binary search. Now pi(m-1) < li(m-1) <= n, We find pi(m - 1) using primepi function. Starting from m, we have to find n - pi(m-1) more primes. For the inputs this implementation can handle, we will have to test primality for at max about 10**5 numbers, to get our answer. Examples ======== >>> from sympy import prime >>> prime(10) 29 >>> prime(1) 2 >>> prime(100000) 1299709 See Also ======== sympy.ntheory.primetest.isprime : Test if n is prime primerange : Generate all primes in a given range primepi : Return the number of primes less than or equal to n References ========== .. [1] https://en.wikipedia.org/wiki/Prime_number_theorem#Table_of_.CF.80.28x.29.2C_x_.2F_log_x.2C_and_li.28x.29 .. [2] https://en.wikipedia.org/wiki/Prime_number_theorem#Approximations_for_the_nth_prime_number .. [3] https://en.wikipedia.org/wiki/Skewes%27_number """ n = as_int(nth) if n < 1: raise ValueError("nth must be a positive integer; prime(1) == 2") if n <= len(sieve._list): return sieve[n] from sympy.functions.special.error_functions import li from sympy.functions.elementary.exponential import log a = 2 # Lower bound for binary search b = int(n*(log(n) + log(log(n)))) # Upper bound for the search. while a < b: mid = (a + b) >> 1 if li(mid) > n: b = mid else: a = mid + 1 n_primes = primepi(a - 1) while n_primes < n: if isprime(a): n_primes += 1 a += 1 return a - 1 class primepi(Function): """ Represents the prime counting function pi(n) = the number of prime numbers less than or equal to n. Algorithm Description: In sieve method, we remove all multiples of prime p except p itself. Let phi(i,j) be the number of integers 2 <= k <= i which remain after sieving from primes less than or equal to j. Clearly, pi(n) = phi(n, sqrt(n)) If j is not a prime, phi(i,j) = phi(i, j - 1) if j is a prime, We remove all numbers(except j) whose smallest prime factor is j. Let x= j*a be such a number, where 2 <= a<= i / j Now, after sieving from primes <= j - 1, a must remain (because x, and hence a has no prime factor <= j - 1) Clearly, there are phi(i / j, j - 1) such a which remain on sieving from primes <= j - 1 Now, if a is a prime less than equal to j - 1, x= j*a has smallest prime factor = a, and has already been removed(by sieving from a). So, we don't need to remove it again. (Note: there will be pi(j - 1) such x) Thus, number of x, that will be removed are: phi(i / j, j - 1) - phi(j - 1, j - 1) (Note that pi(j - 1) = phi(j - 1, j - 1)) => phi(i,j) = phi(i, j - 1) - phi(i / j, j - 1) + phi(j - 1, j - 1) So,following recursion is used and implemented as dp: phi(a, b) = phi(a, b - 1), if b is not a prime phi(a, b) = phi(a, b-1)-phi(a / b, b-1) + phi(b-1, b-1), if b is prime Clearly a is always of the form floor(n / k), which can take at most 2*sqrt(n) values. Two arrays arr1,arr2 are maintained arr1[i] = phi(i, j), arr2[i] = phi(n // i, j) Finally the answer is arr2[1] Examples ======== >>> from sympy import primepi >>> primepi(25) 9 See Also ======== sympy.ntheory.primetest.isprime : Test if n is prime primerange : Generate all primes in a given range prime : Return the nth prime """ @classmethod def eval(cls, n): if n is S.Infinity: return S.Infinity if n is S.NegativeInfinity: return S.Zero try: n = int(n) except TypeError: if n.is_real == False or n is S.NaN: raise ValueError("n must be real") return if n < 2: return S.Zero if n <= sieve._list[-1]: return S(sieve.search(n)[0]) lim = int(n ** 0.5) lim -= 1 lim = max(lim, 0) while lim * lim <= n: lim += 1 lim -= 1 arr1 = [0] * (lim + 1) arr2 = [0] * (lim + 1) for i in range(1, lim + 1): arr1[i] = i - 1 arr2[i] = n // i - 1 for i in range(2, lim + 1): # Presently, arr1[k]=phi(k,i - 1), # arr2[k] = phi(n // k,i - 1) if arr1[i] == arr1[i - 1]: continue p = arr1[i - 1] for j in range(1, min(n // (i * i), lim) + 1): st = i * j if st <= lim: arr2[j] -= arr2[st] - p else: arr2[j] -= arr1[n // st] - p lim2 = min(lim, i * i - 1) for j in range(lim, lim2, -1): arr1[j] -= arr1[j // i] - p return S(arr2[1]) def nextprime(n, ith=1): """ Return the ith prime greater than n. i must be an integer. Notes ===== Potential primes are located at 6*j +/- 1. This property is used during searching. >>> from sympy import nextprime >>> [(i, nextprime(i)) for i in range(10, 15)] [(10, 11), (11, 13), (12, 13), (13, 17), (14, 17)] >>> nextprime(2, ith=2) # the 2nd prime after 2 5 See Also ======== prevprime : Return the largest prime smaller than n primerange : Generate all primes in a given range """ n = int(n) i = as_int(ith) if i > 1: pr = n j = 1 while 1: pr = nextprime(pr) j += 1 if j > i: break return pr if n < 2: return 2 if n < 7: return {2: 3, 3: 5, 4: 5, 5: 7, 6: 7}[n] if n <= sieve._list[-2]: l, u = sieve.search(n) if l == u: return sieve[u + 1] else: return sieve[u] nn = 6*(n//6) if nn == n: n += 1 if isprime(n): return n n += 4 elif n - nn == 5: n += 2 if isprime(n): return n n += 4 else: n = nn + 5 while 1: if isprime(n): return n n += 2 if isprime(n): return n n += 4 def prevprime(n): """ Return the largest prime smaller than n. Notes ===== Potential primes are located at 6*j +/- 1. This property is used during searching. >>> from sympy import prevprime >>> [(i, prevprime(i)) for i in range(10, 15)] [(10, 7), (11, 7), (12, 11), (13, 11), (14, 13)] See Also ======== nextprime : Return the ith prime greater than n primerange : Generates all primes in a given range """ from sympy.functions.elementary.integers import ceiling # wrapping ceiling in as_int will raise an error if there was a problem # determining whether the expression was exactly an integer or not n = as_int(ceiling(n)) if n < 3: raise ValueError("no preceding primes") if n < 8: return {3: 2, 4: 3, 5: 3, 6: 5, 7: 5}[n] if n <= sieve._list[-1]: l, u = sieve.search(n) if l == u: return sieve[l-1] else: return sieve[l] nn = 6*(n//6) if n - nn <= 1: n = nn - 1 if isprime(n): return n n -= 4 else: n = nn + 1 while 1: if isprime(n): return n n -= 2 if isprime(n): return n n -= 4 def primerange(a, b): """ Generate a list of all prime numbers in the range [a, b). If the range exists in the default sieve, the values will be returned from there; otherwise values will be returned but will not modify the sieve. Examples ======== >>> from sympy import primerange, sieve >>> print([i for i in primerange(1, 30)]) [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] The Sieve method, primerange, is generally faster but it will occupy more memory as the sieve stores values. The default instance of Sieve, named sieve, can be used: >>> list(sieve.primerange(1, 30)) [2, 3, 5, 7, 11, 13, 17, 19, 23, 29] Notes ===== Some famous conjectures about the occurrence of primes in a given range are [1]: - Twin primes: though often not, the following will give 2 primes an infinite number of times: primerange(6*n - 1, 6*n + 2) - Legendre's: the following always yields at least one prime primerange(n**2, (n+1)**2+1) - Bertrand's (proven): there is always a prime in the range primerange(n, 2*n) - Brocard's: there are at least four primes in the range primerange(prime(n)**2, prime(n+1)**2) The average gap between primes is log(n) [2]; the gap between primes can be arbitrarily large since sequences of composite numbers are arbitrarily large, e.g. the numbers in the sequence n! + 2, n! + 3 ... n! + n are all composite. See Also ======== nextprime : Return the ith prime greater than n prevprime : Return the largest prime smaller than n randprime : Returns a random prime in a given range primorial : Returns the product of primes based on condition Sieve.primerange : return range from already computed primes or extend the sieve to contain the requested range. References ========== .. [1] https://en.wikipedia.org/wiki/Prime_number .. [2] http://primes.utm.edu/notes/gaps.html """ from sympy.functions.elementary.integers import ceiling if a >= b: return # if we already have the range, return it if b <= sieve._list[-1]: for i in sieve.primerange(a, b): yield i return # otherwise compute, without storing, the desired range. # wrapping ceiling in as_int will raise an error if there was a problem # determining whether the expression was exactly an integer or not a = as_int(ceiling(a)) - 1 b = as_int(ceiling(b)) while 1: a = nextprime(a) if a < b: yield a else: return def randprime(a, b): """ Return a random prime number in the range [a, b). Bertrand's postulate assures that randprime(a, 2*a) will always succeed for a > 1. Examples ======== >>> from sympy import randprime, isprime >>> randprime(1, 30) #doctest: +SKIP 13 >>> isprime(randprime(1, 30)) True See Also ======== primerange : Generate all primes in a given range References ========== .. [1] https://en.wikipedia.org/wiki/Bertrand's_postulate """ if a >= b: return a, b = map(int, (a, b)) n = random.randint(a - 1, b) p = nextprime(n) if p >= b: p = prevprime(b) if p < a: raise ValueError("no primes exist in the specified range") return p def primorial(n, nth=True): """ Returns the product of the first n primes (default) or the primes less than or equal to n (when ``nth=False``). Examples ======== >>> from sympy.ntheory.generate import primorial, randprime, primerange >>> from sympy import factorint, Mul, primefactors, sqrt >>> primorial(4) # the first 4 primes are 2, 3, 5, 7 210 >>> primorial(4, nth=False) # primes <= 4 are 2 and 3 6 >>> primorial(1) 2 >>> primorial(1, nth=False) 1 >>> primorial(sqrt(101), nth=False) 210 One can argue that the primes are infinite since if you take a set of primes and multiply them together (e.g. the primorial) and then add or subtract 1, the result cannot be divided by any of the original factors, hence either 1 or more new primes must divide this product of primes. In this case, the number itself is a new prime: >>> factorint(primorial(4) + 1) {211: 1} In this case two new primes are the factors: >>> factorint(primorial(4) - 1) {11: 1, 19: 1} Here, some primes smaller and larger than the primes multiplied together are obtained: >>> p = list(primerange(10, 20)) >>> sorted(set(primefactors(Mul(*p) + 1)).difference(set(p))) [2, 5, 31, 149] See Also ======== primerange : Generate all primes in a given range """ if nth: n = as_int(n) else: n = int(n) if n < 1: raise ValueError("primorial argument must be >= 1") p = 1 if nth: for i in range(1, n + 1): p *= prime(i) else: for i in primerange(2, n + 1): p *= i return p def cycle_length(f, x0, nmax=None, values=False): """For a given iterated sequence, return a generator that gives the length of the iterated cycle (lambda) and the length of terms before the cycle begins (mu); if ``values`` is True then the terms of the sequence will be returned instead. The sequence is started with value ``x0``. Note: more than the first lambda + mu terms may be returned and this is the cost of cycle detection with Brent's method; there are, however, generally less terms calculated than would have been calculated if the proper ending point were determined, e.g. by using Floyd's method. >>> from sympy.ntheory.generate import cycle_length This will yield successive values of i <-- func(i): >>> def iter(func, i): ... while 1: ... ii = func(i) ... yield ii ... i = ii ... A function is defined: >>> func = lambda i: (i**2 + 1) % 51 and given a seed of 4 and the mu and lambda terms calculated: >>> next(cycle_length(func, 4)) (6, 2) We can see what is meant by looking at the output: >>> n = cycle_length(func, 4, values=True) >>> list(ni for ni in n) [17, 35, 2, 5, 26, 14, 44, 50, 2, 5, 26, 14] There are 6 repeating values after the first 2. If a sequence is suspected of being longer than you might wish, ``nmax`` can be used to exit early (and mu will be returned as None): >>> next(cycle_length(func, 4, nmax = 4)) (4, None) >>> [ni for ni in cycle_length(func, 4, nmax = 4, values=True)] [17, 35, 2, 5] Code modified from: https://en.wikipedia.org/wiki/Cycle_detection. """ nmax = int(nmax or 0) # main phase: search successive powers of two power = lam = 1 tortoise, hare = x0, f(x0) # f(x0) is the element/node next to x0. i = 0 while tortoise != hare and (not nmax or i < nmax): i += 1 if power == lam: # time to start a new power of two? tortoise = hare power *= 2 lam = 0 if values: yield hare hare = f(hare) lam += 1 if nmax and i == nmax: if values: return else: yield nmax, None return if not values: # Find the position of the first repetition of length lambda mu = 0 tortoise = hare = x0 for i in range(lam): hare = f(hare) while tortoise != hare: tortoise = f(tortoise) hare = f(hare) mu += 1 if mu: mu -= 1 yield lam, mu def composite(nth): """ Return the nth composite number, with the composite numbers indexed as composite(1) = 4, composite(2) = 6, etc.... Examples ======== >>> from sympy import composite >>> composite(36) 52 >>> composite(1) 4 >>> composite(17737) 20000 See Also ======== sympy.ntheory.primetest.isprime : Test if n is prime primerange : Generate all primes in a given range primepi : Return the number of primes less than or equal to n prime : Return the nth prime compositepi : Return the number of positive composite numbers less than or equal to n """ n = as_int(nth) if n < 1: raise ValueError("nth must be a positive integer; composite(1) == 4") composite_arr = [4, 6, 8, 9, 10, 12, 14, 15, 16, 18] if n <= 10: return composite_arr[n - 1] a, b = 4, sieve._list[-1] if n <= b - primepi(b) - 1: while a < b - 1: mid = (a + b) >> 1 if mid - primepi(mid) - 1 > n: b = mid else: a = mid if isprime(a): a -= 1 return a from sympy.functions.special.error_functions import li from sympy.functions.elementary.exponential import log a = 4 # Lower bound for binary search b = int(n*(log(n) + log(log(n)))) # Upper bound for the search. while a < b: mid = (a + b) >> 1 if mid - li(mid) - 1 > n: b = mid else: a = mid + 1 n_composites = a - primepi(a) - 1 while n_composites > n: if not isprime(a): n_composites -= 1 a -= 1 if isprime(a): a -= 1 return a def compositepi(n): """ Return the number of positive composite numbers less than or equal to n. The first positive composite is 4, i.e. compositepi(4) = 1. Examples ======== >>> from sympy import compositepi >>> compositepi(25) 15 >>> compositepi(1000) 831 See Also ======== sympy.ntheory.primetest.isprime : Test if n is prime primerange : Generate all primes in a given range prime : Return the nth prime primepi : Return the number of primes less than or equal to n composite : Return the nth composite number """ n = int(n) if n < 4: return 0 return n - primepi(n) - 1
kaushik94/sympy
sympy/ntheory/generate.py
Python
bsd-3-clause
28,626
import base64 import logging import string import warnings from datetime import datetime, timedelta from django.conf import settings from django.contrib.sessions.exceptions import SuspiciousSession from django.core import signing from django.core.exceptions import SuspiciousOperation from django.utils import timezone from django.utils.crypto import ( constant_time_compare, get_random_string, salted_hmac, ) from django.utils.deprecation import RemovedInDjango40Warning from django.utils.module_loading import import_string from django.utils.translation import LANGUAGE_SESSION_KEY # session_key should not be case sensitive because some backends can store it # on case insensitive file systems. VALID_KEY_CHARS = string.ascii_lowercase + string.digits class CreateError(Exception): """ Used internally as a consistent exception type to catch from save (see the docstring for SessionBase.save() for details). """ pass class UpdateError(Exception): """ Occurs if Django tries to update a session that was deleted. """ pass class SessionBase: """ Base class for all Session classes. """ TEST_COOKIE_NAME = 'testcookie' TEST_COOKIE_VALUE = 'worked' __not_given = object() def __init__(self, session_key=None): self._session_key = session_key self.accessed = False self.modified = False self.serializer = import_string(settings.SESSION_SERIALIZER) def __contains__(self, key): return key in self._session def __getitem__(self, key): if key == LANGUAGE_SESSION_KEY: warnings.warn( 'The user language will no longer be stored in ' 'request.session in Django 4.0. Read it from ' 'request.COOKIES[settings.LANGUAGE_COOKIE_NAME] instead.', RemovedInDjango40Warning, stacklevel=2, ) return self._session[key] def __setitem__(self, key, value): self._session[key] = value self.modified = True def __delitem__(self, key): del self._session[key] self.modified = True @property def key_salt(self): return 'django.contrib.sessions.' + self.__class__.__qualname__ def get(self, key, default=None): return self._session.get(key, default) def pop(self, key, default=__not_given): self.modified = self.modified or key in self._session args = () if default is self.__not_given else (default,) return self._session.pop(key, *args) def setdefault(self, key, value): if key in self._session: return self._session[key] else: self.modified = True self._session[key] = value return value def set_test_cookie(self): self[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE def test_cookie_worked(self): return self.get(self.TEST_COOKIE_NAME) == self.TEST_COOKIE_VALUE def delete_test_cookie(self): del self[self.TEST_COOKIE_NAME] def _hash(self, value): # RemovedInDjango40Warning: pre-Django 3.1 format will be invalid. key_salt = "django.contrib.sessions" + self.__class__.__name__ return salted_hmac(key_salt, value).hexdigest() def encode(self, session_dict): "Return the given session dictionary serialized and encoded as a string." # RemovedInDjango40Warning: DEFAULT_HASHING_ALGORITHM will be removed. if settings.DEFAULT_HASHING_ALGORITHM == 'sha1': return self._legacy_encode(session_dict) return signing.dumps( session_dict, salt=self.key_salt, serializer=self.serializer, compress=True, ) def decode(self, session_data): try: return signing.loads(session_data, salt=self.key_salt, serializer=self.serializer) # RemovedInDjango40Warning: when the deprecation ends, handle here # exceptions similar to what _legacy_decode() does now. except signing.BadSignature: try: # Return an empty session if data is not in the pre-Django 3.1 # format. return self._legacy_decode(session_data) except Exception: logger = logging.getLogger('django.security.SuspiciousSession') logger.warning('Session data corrupted') return {} except Exception: return self._legacy_decode(session_data) def _legacy_encode(self, session_dict): # RemovedInDjango40Warning. serialized = self.serializer().dumps(session_dict) hash = self._hash(serialized) return base64.b64encode(hash.encode() + b':' + serialized).decode('ascii') def _legacy_decode(self, session_data): # RemovedInDjango40Warning: pre-Django 3.1 format will be invalid. encoded_data = base64.b64decode(session_data.encode('ascii')) try: # could produce ValueError if there is no ':' hash, serialized = encoded_data.split(b':', 1) expected_hash = self._hash(serialized) if not constant_time_compare(hash.decode(), expected_hash): raise SuspiciousSession("Session data corrupted") else: return self.serializer().loads(serialized) except Exception as e: # ValueError, SuspiciousOperation, unpickling exceptions. If any of # these happen, just return an empty dictionary (an empty session). if isinstance(e, SuspiciousOperation): logger = logging.getLogger('django.security.%s' % e.__class__.__name__) logger.warning(str(e)) return {} def update(self, dict_): self._session.update(dict_) self.modified = True def has_key(self, key): return key in self._session def keys(self): return self._session.keys() def values(self): return self._session.values() def items(self): return self._session.items() def clear(self): # To avoid unnecessary persistent storage accesses, we set up the # internals directly (loading data wastes time, since we are going to # set it to an empty dict anyway). self._session_cache = {} self.accessed = True self.modified = True def is_empty(self): "Return True when there is no session_key and the session is empty." try: return not self._session_key and not self._session_cache except AttributeError: return True def _get_new_session_key(self): "Return session key that isn't being used." while True: session_key = get_random_string(32, VALID_KEY_CHARS) if not self.exists(session_key): return session_key def _get_or_create_session_key(self): if self._session_key is None: self._session_key = self._get_new_session_key() return self._session_key def _validate_session_key(self, key): """ Key must be truthy and at least 8 characters long. 8 characters is an arbitrary lower bound for some minimal key security. """ return key and len(key) >= 8 def _get_session_key(self): return self.__session_key def _set_session_key(self, value): """ Validate session key on assignment. Invalid values will set to None. """ if self._validate_session_key(value): self.__session_key = value else: self.__session_key = None session_key = property(_get_session_key) _session_key = property(_get_session_key, _set_session_key) def _get_session(self, no_load=False): """ Lazily load session from storage (unless "no_load" is True, when only an empty dict is stored) and store it in the current instance. """ self.accessed = True try: return self._session_cache except AttributeError: if self.session_key is None or no_load: self._session_cache = {} else: self._session_cache = self.load() return self._session_cache _session = property(_get_session) def get_session_cookie_age(self): return settings.SESSION_COOKIE_AGE def get_expiry_age(self, **kwargs): """Get the number of seconds until the session expires. Optionally, this function accepts `modification` and `expiry` keyword arguments specifying the modification and expiry of the session. """ try: modification = kwargs['modification'] except KeyError: modification = timezone.now() # Make the difference between "expiry=None passed in kwargs" and # "expiry not passed in kwargs", in order to guarantee not to trigger # self.load() when expiry is provided. try: expiry = kwargs['expiry'] except KeyError: expiry = self.get('_session_expiry') if not expiry: # Checks both None and 0 cases return self.get_session_cookie_age() if not isinstance(expiry, datetime): return expiry delta = expiry - modification return delta.days * 86400 + delta.seconds def get_expiry_date(self, **kwargs): """Get session the expiry date (as a datetime object). Optionally, this function accepts `modification` and `expiry` keyword arguments specifying the modification and expiry of the session. """ try: modification = kwargs['modification'] except KeyError: modification = timezone.now() # Same comment as in get_expiry_age try: expiry = kwargs['expiry'] except KeyError: expiry = self.get('_session_expiry') if isinstance(expiry, datetime): return expiry expiry = expiry or self.get_session_cookie_age() return modification + timedelta(seconds=expiry) def set_expiry(self, value): """ Set a custom expiration for the session. ``value`` can be an integer, a Python ``datetime`` or ``timedelta`` object or ``None``. If ``value`` is an integer, the session will expire after that many seconds of inactivity. If set to ``0`` then the session will expire on browser close. If ``value`` is a ``datetime`` or ``timedelta`` object, the session will expire at that specific future time. If ``value`` is ``None``, the session uses the global session expiry policy. """ if value is None: # Remove any custom expiration for this session. try: del self['_session_expiry'] except KeyError: pass return if isinstance(value, timedelta): value = timezone.now() + value self['_session_expiry'] = value def get_expire_at_browser_close(self): """ Return ``True`` if the session is set to expire when the browser closes, and ``False`` if there's an expiry date. Use ``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry date/age, if there is one. """ if self.get('_session_expiry') is None: return settings.SESSION_EXPIRE_AT_BROWSER_CLOSE return self.get('_session_expiry') == 0 def flush(self): """ Remove the current session data from the database and regenerate the key. """ self.clear() self.delete() self._session_key = None def cycle_key(self): """ Create a new session key, while retaining the current session data. """ data = self._session key = self.session_key self.create() self._session_cache = data if key: self.delete(key) # Methods that child classes must implement. def exists(self, session_key): """ Return True if the given session_key already exists. """ raise NotImplementedError('subclasses of SessionBase must provide an exists() method') def create(self): """ Create a new session instance. Guaranteed to create a new object with a unique key and will have saved the result once (with empty data) before the method returns. """ raise NotImplementedError('subclasses of SessionBase must provide a create() method') def save(self, must_create=False): """ Save the session data. If 'must_create' is True, create a new session object (or raise CreateError). Otherwise, only update an existing object and don't create one (raise UpdateError if needed). """ raise NotImplementedError('subclasses of SessionBase must provide a save() method') def delete(self, session_key=None): """ Delete the session data under this key. If the key is None, use the current session key value. """ raise NotImplementedError('subclasses of SessionBase must provide a delete() method') def load(self): """ Load the session data and return a dictionary. """ raise NotImplementedError('subclasses of SessionBase must provide a load() method') @classmethod def clear_expired(cls): """ Remove expired sessions from the session store. If this operation isn't possible on a given backend, it should raise NotImplementedError. If it isn't necessary, because the backend has a built-in expiration mechanism, it should be a no-op. """ raise NotImplementedError('This backend does not support clear_expired().')
wkschwartz/django
django/contrib/sessions/backends/base.py
Python
bsd-3-clause
13,900
# -*- coding: utf-8 -*- import os CODE_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) def get_permdir(): return os.path.join(CODE_DIR, 'permdir') def get_repo_root(): return get_permdir() def get_tmpdir(): return os.path.join(CODE_DIR, 'tmpdir') def init_permdir(): path = get_permdir() if not os.path.exists(path): os.makedirs(path) init_permdir()
douban/code
vilya/libs/permdir.py
Python
bsd-3-clause
407
from social_core.backends.upwork import UpworkOAuth
cjltsod/python-social-auth
social/backends/upwork.py
Python
bsd-3-clause
52
""" This module collects helper functions and classes that "span" multiple levels of MVC. In other words, these functions/classes introduce controlled coupling for convenience's sake. """ import warnings from django.template import loader, RequestContext from django.template.context import _current_app_undefined from django.template.engine import ( _context_instance_undefined, _dictionary_undefined, _dirs_undefined) from django.http import HttpResponse, Http404 from django.http import HttpResponseRedirect, HttpResponsePermanentRedirect from django.db.models.base import ModelBase from django.db.models.manager import Manager from django.db.models.query import QuerySet from django.core import urlresolvers from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import force_text from django.utils.functional import Promise def render_to_response(template_name, context=None, context_instance=_context_instance_undefined, content_type=None, status=None, dirs=_dirs_undefined, dictionary=_dictionary_undefined): """ Returns a HttpResponse whose content is filled with the result of calling django.template.loader.render_to_string() with the passed arguments. """ if (context_instance is _context_instance_undefined and dirs is _dirs_undefined and dictionary is _dictionary_undefined): # No deprecated arguments were passed - use the new code path content = loader.render_to_string(template_name, context) else: # Some deprecated arguments were passed - use the legacy code path content = loader.render_to_string( template_name, context, context_instance, dirs, dictionary) return HttpResponse(content, content_type, status) def render(request, template_name, context=None, context_instance=_context_instance_undefined, content_type=None, status=None, current_app=_current_app_undefined, dirs=_dirs_undefined, dictionary=_dictionary_undefined): """ Returns a HttpResponse whose content is filled with the result of calling django.template.loader.render_to_string() with the passed arguments. Uses a RequestContext by default. """ if (context_instance is _context_instance_undefined and current_app is _current_app_undefined and dirs is _dirs_undefined and dictionary is _dictionary_undefined): # No deprecated arguments were passed - use the new code path # In Django 2.0, request should become a positional argument. content = loader.render_to_string(template_name, context, request=request) else: # Some deprecated arguments were passed - use the legacy code path if context_instance is not _context_instance_undefined: if current_app is not _current_app_undefined: raise ValueError('If you provide a context_instance you must ' 'set its current_app before calling render()') else: context_instance = RequestContext(request) if current_app is not _current_app_undefined: warnings.warn( "The current_app argument of render is deprecated. " "Set the current_app attribute of request instead.", RemovedInDjango20Warning, stacklevel=2) request.current_app = current_app # Directly set the private attribute to avoid triggering the # warning in RequestContext.__init__. context_instance._current_app = current_app content = loader.render_to_string( template_name, context, context_instance, dirs, dictionary) return HttpResponse(content, content_type, status) def redirect(to, *args, **kwargs): """ Returns an HttpResponseRedirect to the appropriate URL for the arguments passed. The arguments could be: * A model: the model's `get_absolute_url()` function will be called. * A view name, possibly with arguments: `urlresolvers.reverse()` will be used to reverse-resolve the name. * A URL, which will be used as-is for the redirect location. By default issues a temporary redirect; pass permanent=True to issue a permanent redirect """ if kwargs.pop('permanent', False): redirect_class = HttpResponsePermanentRedirect else: redirect_class = HttpResponseRedirect return redirect_class(resolve_url(to, *args, **kwargs)) def _get_queryset(klass): """ Returns a QuerySet from a Model, Manager, or QuerySet. Created to make get_object_or_404 and get_list_or_404 more DRY. Raises a ValueError if klass is not a Model, Manager, or QuerySet. """ if isinstance(klass, QuerySet): return klass elif isinstance(klass, Manager): manager = klass elif isinstance(klass, ModelBase): manager = klass._default_manager else: if isinstance(klass, type): klass__name = klass.__name__ else: klass__name = klass.__class__.__name__ raise ValueError("Object is of type '%s', but must be a Django Model, " "Manager, or QuerySet" % klass__name) return manager.all() def get_object_or_404(klass, *args, **kwargs): """ Uses get() to return an object, or raises a Http404 exception if the object does not exist. klass may be a Model, Manager, or QuerySet object. All other passed arguments and keyword arguments are used in the get() query. Note: Like with get(), an MultipleObjectsReturned will be raised if more than one object is found. """ queryset = _get_queryset(klass) try: return queryset.get(*args, **kwargs) except queryset.model.DoesNotExist: raise Http404('No %s matches the given query.' % queryset.model._meta.object_name) def get_list_or_404(klass, *args, **kwargs): """ Uses filter() to return a list of objects, or raise a Http404 exception if the list is empty. klass may be a Model, Manager, or QuerySet object. All other passed arguments and keyword arguments are used in the filter() query. """ queryset = _get_queryset(klass) obj_list = list(queryset.filter(*args, **kwargs)) if not obj_list: raise Http404('No %s matches the given query.' % queryset.model._meta.object_name) return obj_list def resolve_url(to, *args, **kwargs): """ Return a URL appropriate for the arguments passed. The arguments could be: * A model: the model's `get_absolute_url()` function will be called. * A view name, possibly with arguments: `urlresolvers.reverse()` will be used to reverse-resolve the name. * A URL, which will be returned as-is. """ # If it's a model, use get_absolute_url() if hasattr(to, 'get_absolute_url'): return to.get_absolute_url() if isinstance(to, Promise): # Expand the lazy instance, as it can cause issues when it is passed # further to some Python functions like urlparse. to = force_text(to) if isinstance(to, six.string_types): # Handle relative URLs if any(to.startswith(path) for path in ('./', '../')): return to # Next try a reverse URL resolution. try: return urlresolvers.reverse(to, args=args, kwargs=kwargs) except urlresolvers.NoReverseMatch: # If this is a callable, re-raise. if callable(to): raise # If this doesn't "feel" like a URL, re-raise. if '/' not in to and '.' not in to: raise # Finally, fall back and assume it's a URL return to
doismellburning/django
django/shortcuts.py
Python
bsd-3-clause
7,865
from settings import * DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'denorm', 'HOST': 'localhost', 'USER': 'denorm', 'PASSWORD': 'denorm1', } }
mjtamlyn/django-denorm
test_project/settings_mysql.py
Python
bsd-3-clause
221
#!/usr/bin/env python import argparse import time from pytx import ThreatIndicator from pytx.vocabulary import ThreatExchange as te from pytx.vocabulary import ThreatType as tt from pytx.vocabulary import Types as t def get_results(options): ''' Builds a query string based on the specified options and runs it. ''' if options.since is None or options.until is None: raise Exception('You must specify both "since" and "until" values') results = ThreatIndicator.objects(threat_type=tt.COMPROMISED_CREDENTIAL, type_=t.EMAIL_ADDRESS, limit=options.limit, fields=['indicator', 'passwords'], since=options.since, until=options.until) return results def process_result(handle, result): ''' Process the threat indicators received from the server. This version writes the indicators to the output file specified by 'handle', if any. Indicators are written one per line. ''' for password in result.passwords: output = '%s:%s\n' % (result.indicator, password) if handle is None: print output, else: handle.write(output) def run_query(options, handle): start = int(time.time()) print 'READING %s%s' % (te.URL, te.THREAT_INDICATORS) results = get_results(options) count = 0 for result in results: process_result(handle, result) count += 1 end = int(time.time()) print ('SUCCESS: Got %d indicators in %d seconds' % (count, end - start)) return def get_args(): parser = argparse.ArgumentParser(description='Query ThreatExchange for Compromised Credentials') parser.add_argument('-o', '--output', default='/dev/stdout', help='[OPTIONAL] output file path.') parser.add_argument('-s', '--since', help='[OPTIONAL] Start time for search') parser.add_argument('-u', '--until', help='[OPTIONAL] End time for search') parser.add_argument('-l', '--limit', help='[OPTIONAL] Maximum number of results') return parser.parse_args() def main(): args = get_args() with open(args.output, 'w') as fp: run_query(args, fp) if __name__ == '__main__': main()
arirubinstein/ThreatExchange
scripts/get_compromised_credentials.py
Python
bsd-3-clause
2,285
""" kombu.transport.pyamqp ====================== pure python amqp transport. """ from __future__ import absolute_import import amqp from kombu.five import items from kombu.utils.amq_manager import get_manager from kombu.utils.text import version_string_as_tuple from . import base DEFAULT_PORT = 5672 DEFAULT_SSL_PORT = 5671 class Message(base.Message): def __init__(self, channel, msg, **kwargs): props = msg.properties super(Message, self).__init__( channel, body=msg.body, delivery_tag=msg.delivery_tag, content_type=props.get('content_type'), content_encoding=props.get('content_encoding'), delivery_info=msg.delivery_info, properties=msg.properties, headers=props.get('application_headers') or {}, **kwargs) class Channel(amqp.Channel, base.StdChannel): Message = Message def prepare_message(self, body, priority=None, content_type=None, content_encoding=None, headers=None, properties=None, _Message=amqp.Message): """Prepares message so that it can be sent using this transport.""" return _Message( body, priority=priority, content_type=content_type, content_encoding=content_encoding, application_headers=headers, **properties or {} ) def message_to_python(self, raw_message): """Convert encoded message body back to a Python value.""" return self.Message(self, raw_message) class Connection(amqp.Connection): Channel = Channel class Transport(base.Transport): Connection = Connection default_port = DEFAULT_PORT default_ssl_port = DEFAULT_SSL_PORT # it's very annoying that pyamqp sometimes raises AttributeError # if the connection is lost, but nothing we can do about that here. connection_errors = amqp.Connection.connection_errors channel_errors = amqp.Connection.channel_errors recoverable_connection_errors = \ amqp.Connection.recoverable_connection_errors recoverable_channel_errors = amqp.Connection.recoverable_channel_errors driver_name = 'py-amqp' driver_type = 'amqp' supports_heartbeats = True supports_ev = True def __init__(self, client, default_port=None, default_ssl_port=None, **kwargs): self.client = client self.default_port = default_port or self.default_port self.default_ssl_port = default_ssl_port or self.default_ssl_port def driver_version(self): return amqp.__version__ def create_channel(self, connection): return connection.channel() def drain_events(self, connection, **kwargs): return connection.drain_events(**kwargs) def establish_connection(self): """Establish connection to the AMQP broker.""" conninfo = self.client for name, default_value in items(self.default_connection_params): if not getattr(conninfo, name, None): setattr(conninfo, name, default_value) if conninfo.hostname == 'localhost': conninfo.hostname = '127.0.0.1' opts = dict({ 'host': conninfo.host, 'userid': conninfo.userid, 'password': conninfo.password, 'login_method': conninfo.login_method, 'virtual_host': conninfo.virtual_host, 'insist': conninfo.insist, 'ssl': conninfo.ssl, 'connect_timeout': conninfo.connect_timeout, 'heartbeat': conninfo.heartbeat, }, **conninfo.transport_options or {}) conn = self.Connection(**opts) conn.client = self.client return conn def verify_connection(self, connection): return connection.connected def close_connection(self, connection): """Close the AMQP broker connection.""" connection.client = None connection.close() def get_heartbeat_interval(self, connection): return connection.heartbeat def register_with_event_loop(self, connection, loop): loop.add_reader(connection.sock, self.on_readable, connection, loop) def heartbeat_check(self, connection, rate=2): return connection.heartbeat_tick(rate=rate) def qos_semantics_matches_spec(self, connection): props = connection.server_properties if props.get('product') == 'RabbitMQ': return version_string_as_tuple(props['version']) < (3, 3) return True @property def default_connection_params(self): return { 'userid': 'guest', 'password': 'guest', 'port': (self.default_ssl_port if self.client.ssl else self.default_port), 'hostname': 'localhost', 'login_method': 'AMQPLAIN', } def get_manager(self, *args, **kwargs): return get_manager(self.client, *args, **kwargs)
sunze/py_flask
venv/lib/python3.4/site-packages/kombu/transport/pyamqp.py
Python
mit
5,008
#------------------------------------------------------------------------------ # pycparser: c_generator.py # # C code generator from pycparser AST nodes. # # Copyright (C) 2008-2012, Eli Bendersky # License: BSD #------------------------------------------------------------------------------ from . import c_ast class CGenerator(object): """ Uses the same visitor pattern as c_ast.NodeVisitor, but modified to return a value from each visit method, using string accumulation in generic_visit. """ def __init__(self): self.output = '' # Statements start with indentation of self.indent_level spaces, using # the _make_indent method # self.indent_level = 0 def _make_indent(self): return ' ' * self.indent_level def visit(self, node): method = 'visit_' + node.__class__.__name__ return getattr(self, method, self.generic_visit)(node) def generic_visit(self, node): #~ print('generic:', type(node)) if node is None: return '' else: return ''.join(self.visit(c) for c in node.children()) def visit_Constant(self, n): return n.value def visit_ID(self, n): return n.name def visit_ArrayRef(self, n): arrref = self._parenthesize_unless_simple(n.name) return arrref + '[' + self.visit(n.subscript) + ']' def visit_StructRef(self, n): sref = self._parenthesize_unless_simple(n.name) return sref + n.type + self.visit(n.field) def visit_FuncCall(self, n): fref = self._parenthesize_unless_simple(n.name) return fref + '(' + self.visit(n.args) + ')' def visit_UnaryOp(self, n): operand = self._parenthesize_unless_simple(n.expr) if n.op == 'p++': return '%s++' % operand elif n.op == 'p--': return '%s--' % operand elif n.op == 'sizeof': # Always parenthesize the argument of sizeof since it can be # a name. return 'sizeof(%s)' % self.visit(n.expr) else: return '%s%s' % (n.op, operand) def visit_BinaryOp(self, n): lval_str = self._parenthesize_if(n.left, lambda d: not self._is_simple_node(d)) rval_str = self._parenthesize_if(n.right, lambda d: not self._is_simple_node(d)) return '%s %s %s' % (lval_str, n.op, rval_str) def visit_Assignment(self, n): rval_str = self._parenthesize_if( n.rvalue, lambda n: isinstance(n, c_ast.Assignment)) return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str) def visit_IdentifierType(self, n): return ' '.join(n.names) def visit_Decl(self, n, no_type=False): # no_type is used when a Decl is part of a DeclList, where the type is # explicitly only for the first delaration in a list. # s = n.name if no_type else self._generate_decl(n) if n.bitsize: s += ' : ' + self.visit(n.bitsize) if n.init: if isinstance(n.init, c_ast.InitList): s += ' = {' + self.visit(n.init) + '}' elif isinstance(n.init, c_ast.ExprList): s += ' = (' + self.visit(n.init) + ')' else: s += ' = ' + self.visit(n.init) return s def visit_DeclList(self, n): s = self.visit(n.decls[0]) if len(n.decls) > 1: s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True) for decl in n.decls[1:]) return s def visit_Typedef(self, n): s = '' if n.storage: s += ' '.join(n.storage) + ' ' s += self._generate_type(n.type) return s def visit_Cast(self, n): s = '(' + self._generate_type(n.to_type) + ')' return s + ' ' + self._parenthesize_unless_simple(n.expr) def visit_ExprList(self, n): visited_subexprs = [] for expr in n.exprs: if isinstance(expr, c_ast.ExprList): visited_subexprs.append('{' + self.visit(expr) + '}') else: visited_subexprs.append(self.visit(expr)) return ', '.join(visited_subexprs) def visit_InitList(self, n): visited_subexprs = [] for expr in n.exprs: if isinstance(expr, c_ast.InitList): visited_subexprs.append('(' + self.visit(expr) + ')') else: visited_subexprs.append(self.visit(expr)) return ', '.join(visited_subexprs) def visit_Enum(self, n): s = 'enum' if n.name: s += ' ' + n.name if n.values: s += ' {' for i, enumerator in enumerate(n.values.enumerators): s += enumerator.name if enumerator.value: s += ' = ' + self.visit(enumerator.value) if i != len(n.values.enumerators) - 1: s += ', ' s += '}' return s def visit_FuncDef(self, n): decl = self.visit(n.decl) self.indent_level = 0 body = self.visit(n.body) if n.param_decls: knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls) return decl + '\n' + knrdecls + ';\n' + body + '\n' else: return decl + '\n' + body + '\n' def visit_FileAST(self, n): s = '' for ext in n.ext: if isinstance(ext, c_ast.FuncDef): s += self.visit(ext) else: s += self.visit(ext) + ';\n' return s def visit_Compound(self, n): s = self._make_indent() + '{\n' self.indent_level += 2 if n.block_items: s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items) self.indent_level -= 2 s += self._make_indent() + '}\n' return s def visit_EmptyStatement(self, n): return ';' def visit_ParamList(self, n): return ', '.join(self.visit(param) for param in n.params) def visit_Return(self, n): s = 'return' if n.expr: s += ' ' + self.visit(n.expr) return s + ';' def visit_Break(self, n): return 'break;' def visit_Continue(self, n): return 'continue;' def visit_TernaryOp(self, n): s = self.visit(n.cond) + ' ? ' s += self.visit(n.iftrue) + ' : ' s += self.visit(n.iffalse) return s def visit_If(self, n): s = 'if (' if n.cond: s += self.visit(n.cond) s += ')\n' s += self._generate_stmt(n.iftrue, add_indent=True) if n.iffalse: s += self._make_indent() + 'else\n' s += self._generate_stmt(n.iffalse, add_indent=True) return s def visit_For(self, n): s = 'for (' if n.init: s += self.visit(n.init) s += ';' if n.cond: s += ' ' + self.visit(n.cond) s += ';' if n.next: s += ' ' + self.visit(n.next) s += ')\n' s += self._generate_stmt(n.stmt, add_indent=True) return s def visit_While(self, n): s = 'while (' if n.cond: s += self.visit(n.cond) s += ')\n' s += self._generate_stmt(n.stmt, add_indent=True) return s def visit_DoWhile(self, n): s = 'do\n' s += self._generate_stmt(n.stmt, add_indent=True) s += self._make_indent() + 'while (' if n.cond: s += self.visit(n.cond) s += ');' return s def visit_Switch(self, n): s = 'switch (' + self.visit(n.cond) + ')\n' s += self._generate_stmt(n.stmt, add_indent=True) return s def visit_Case(self, n): s = 'case ' + self.visit(n.expr) + ':\n' for stmt in n.stmts: s += self._generate_stmt(stmt, add_indent=True) return s def visit_Default(self, n): s = 'default:\n' for stmt in n.stmts: s += self._generate_stmt(stmt, add_indent=True) return s def visit_Label(self, n): return n.name + ':\n' + self._generate_stmt(n.stmt) def visit_Goto(self, n): return 'goto ' + n.name + ';' def visit_EllipsisParam(self, n): return '...' def visit_Struct(self, n): return self._generate_struct_union(n, 'struct') def visit_Typename(self, n): return self._generate_type(n.type) def visit_Union(self, n): return self._generate_struct_union(n, 'union') def visit_NamedInitializer(self, n): s = '' for name in n.name: if isinstance(name, c_ast.ID): s += '.' + name.name elif isinstance(name, c_ast.Constant): s += '[' + name.value + ']' s += ' = ' + self.visit(n.expr) return s def _generate_struct_union(self, n, name): """ Generates code for structs and unions. name should be either 'struct' or union. """ s = name + ' ' + (n.name or '') if n.decls: s += '\n' s += self._make_indent() self.indent_level += 2 s += '{\n' for decl in n.decls: s += self._generate_stmt(decl) self.indent_level -= 2 s += self._make_indent() + '}' return s def _generate_stmt(self, n, add_indent=False): """ Generation from a statement node. This method exists as a wrapper for individual visit_* methods to handle different treatment of some statements in this context. """ typ = type(n) if add_indent: self.indent_level += 2 indent = self._make_indent() if add_indent: self.indent_level -= 2 if typ in ( c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp, c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef, c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef): # These can also appear in an expression context so no semicolon # is added to them automatically # return indent + self.visit(n) + ';\n' elif typ in (c_ast.Compound,): # No extra indentation required before the opening brace of a # compound - because it consists of multiple lines it has to # compute its own indentation. # return self.visit(n) else: return indent + self.visit(n) + '\n' def _generate_decl(self, n): """ Generation from a Decl node. """ s = '' if n.funcspec: s = ' '.join(n.funcspec) + ' ' if n.storage: s += ' '.join(n.storage) + ' ' s += self._generate_type(n.type) return s def _generate_type(self, n, modifiers=[]): """ Recursive generation from a type node. n is the type node. modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers encountered on the way down to a TypeDecl, to allow proper generation from it. """ typ = type(n) #~ print(n, modifiers) if typ == c_ast.TypeDecl: s = '' if n.quals: s += ' '.join(n.quals) + ' ' s += self.visit(n.type) nstr = n.declname if n.declname else '' # Resolve modifiers. # Wrap in parens to distinguish pointer to array and pointer to # function syntax. # for i, modifier in enumerate(modifiers): if isinstance(modifier, c_ast.ArrayDecl): if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)): nstr = '(' + nstr + ')' nstr += '[' + self.visit(modifier.dim) + ']' elif isinstance(modifier, c_ast.FuncDecl): if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)): nstr = '(' + nstr + ')' nstr += '(' + self.visit(modifier.args) + ')' elif isinstance(modifier, c_ast.PtrDecl): if modifier.quals: nstr = '* %s %s' % (' '.join(modifier.quals), nstr) else: nstr = '*' + nstr if nstr: s += ' ' + nstr return s elif typ == c_ast.Decl: return self._generate_decl(n.type) elif typ == c_ast.Typename: return self._generate_type(n.type) elif typ == c_ast.IdentifierType: return ' '.join(n.names) + ' ' elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl): return self._generate_type(n.type, modifiers + [n]) else: return self.visit(n) def _parenthesize_if(self, n, condition): """ Visits 'n' and returns its string representation, parenthesized if the condition function applied to the node returns True. """ s = self.visit(n) if condition(n): return '(' + s + ')' else: return s def _parenthesize_unless_simple(self, n): """ Common use case for _parenthesize_if """ return self._parenthesize_if(n, lambda d: not self._is_simple_node(d)) def _is_simple_node(self, n): """ Returns True for nodes that are "simple" - i.e. nodes that always have higher precedence than operators. """ return isinstance(n,( c_ast.Constant, c_ast.ID, c_ast.ArrayRef, c_ast.StructRef, c_ast.FuncCall))
bussiere/pypyjs
website/demo/home/rfk/repos/pypy/lib_pypy/cffi/_pycparser/c_generator.py
Python
mit
13,798
# postgresql/pypostgresql.py # Copyright (C) 2005-2020 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ .. dialect:: postgresql+pypostgresql :name: py-postgresql :dbapi: pypostgresql :connectstring: postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...] :url: http://python.projects.pgfoundry.org/ .. note:: The pypostgresql dialect is **not tested as part of SQLAlchemy's continuous integration** and may have unresolved issues. The recommended PostgreSQL driver is psycopg2. """ # noqa from .base import PGDialect from .base import PGExecutionContext from ... import processors from ... import types as sqltypes from ... import util class PGNumeric(sqltypes.Numeric): def bind_processor(self, dialect): return processors.to_str def result_processor(self, dialect, coltype): if self.asdecimal: return None else: return processors.to_float class PGExecutionContext_pypostgresql(PGExecutionContext): pass class PGDialect_pypostgresql(PGDialect): driver = "pypostgresql" supports_unicode_statements = True supports_unicode_binds = True description_encoding = None default_paramstyle = "pyformat" # requires trunk version to support sane rowcounts # TODO: use dbapi version information to set this flag appropriately supports_sane_rowcount = True supports_sane_multi_rowcount = False execution_ctx_cls = PGExecutionContext_pypostgresql colspecs = util.update_copy( PGDialect.colspecs, { sqltypes.Numeric: PGNumeric, # prevents PGNumeric from being used sqltypes.Float: sqltypes.Float, }, ) @classmethod def dbapi(cls): from postgresql.driver import dbapi20 return dbapi20 _DBAPI_ERROR_NAMES = [ "Error", "InterfaceError", "DatabaseError", "DataError", "OperationalError", "IntegrityError", "InternalError", "ProgrammingError", "NotSupportedError", ] @util.memoized_property def dbapi_exception_translation_map(self): if self.dbapi is None: return {} return dict( (getattr(self.dbapi, name).__name__, name) for name in self._DBAPI_ERROR_NAMES ) def create_connect_args(self, url): opts = url.translate_connect_args(username="user") if "port" in opts: opts["port"] = int(opts["port"]) else: opts["port"] = 5432 opts.update(url.query) return ([], opts) def is_disconnect(self, e, connection, cursor): return "connection is closed" in str(e) dialect = PGDialect_pypostgresql
gltn/stdm
stdm/third_party/sqlalchemy/dialects/postgresql/pypostgresql.py
Python
gpl-2.0
2,915
# Copyright (C) 2007, One Laptop Per Child # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.
AbrahmAB/sugar
src/jarabe/journal/__init__.py
Python
gpl-3.0
679
#!/usr/bin/python # Copyright (c) 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Certificate chain with 1 intermediate and a non-self-signed trust anchor. Verification should succeed, it doesn't matter that the root was not self-signed if it is designated as the trust anchor.""" import common uber_root = common.create_self_signed_root_certificate('UberRoot') # Non-self-signed root certificate (used as trust anchor) root = common.create_intermediate_certificate('Root', uber_root) # Intermediate certificate. intermediate = common.create_intermediate_certificate('Intermediate', root) # Target certificate. target = common.create_end_entity_certificate('Target', intermediate) chain = [target, intermediate] trusted = common.TrustAnchor(root, constrained=False) time = common.DEFAULT_TIME verify_result = True errors = None common.write_test_file(__doc__, chain, trusted, time, verify_result, errors)
geminy/aidear
oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/net/data/verify_certificate_chain_unittest/generate-unconstrained-non-self-signed-root.py
Python
gpl-3.0
1,019
""" WSGI config for OIPA project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "OIPA.settings" os.environ.setdefault("DJANGO_SETTINGS_MODULE", "OIPA.settings") # This application object is used by any WSGI server configured to use this # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)
schlos/OIPA-V2.1
OIPA/OIPA/wsgi.py
Python
agpl-3.0
1,413
############################################################################## # Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * import os class Psi4(CMakePackage): """Psi4 is an open-source suite of ab initio quantum chemistry programs designed for efficient, high-accuracy simulations of a variety of molecular properties.""" homepage = "http://www.psicode.org/" url = "https://github.com/psi4/psi4/archive/0.5.tar.gz" version('0.5', '53041b8a9be3958384171d0d22f9fdd0') variant('build_type', default='Release', description='The build type to build', values=('Debug', 'Release')) # Required dependencies depends_on('blas') depends_on('lapack') depends_on('boost+chrono+filesystem+python+regex+serialization+system+timer+thread') depends_on('python') depends_on('[email protected]:', type='build') depends_on('py-numpy', type=('build', 'run')) # Optional dependencies # TODO: add packages for these # depends_on('perl') # depends_on('erd') # depends_on('pcm-solver') # depends_on('chemps2') def cmake_args(self): spec = self.spec return [ '-DBLAS_TYPE={0}'.format(spec['blas'].name.upper()), '-DBLAS_LIBRARIES={0}'.format(spec['blas'].libs.joined()), '-DLAPACK_TYPE={0}'.format(spec['lapack'].name.upper()), '-DLAPACK_LIBRARIES={0}'.format( spec['lapack'].libs.joined()), '-DBOOST_INCLUDEDIR={0}'.format(spec['boost'].prefix.include), '-DBOOST_LIBRARYDIR={0}'.format(spec['boost'].prefix.lib), '-DENABLE_CHEMPS2=OFF' ] @run_after('install') def filter_compilers(self, spec, prefix): """Run after install to tell the configuration files to use the compilers that Spack built the package with. If this isn't done, they'll have PLUGIN_CXX set to Spack's generic cxx. We want it to be bound to whatever compiler it was built with.""" kwargs = {'ignore_absent': True, 'backup': False, 'string': True} cc_files = ['bin/psi4-config'] cxx_files = ['bin/psi4-config', 'include/psi4/psiconfig.h'] template = 'share/psi4/plugin/Makefile.template' for filename in cc_files: filter_file(os.environ['CC'], self.compiler.cc, os.path.join(prefix, filename), **kwargs) for filename in cxx_files: filter_file(os.environ['CXX'], self.compiler.cxx, os.path.join(prefix, filename), **kwargs) # The binary still keeps track of the compiler used to install Psi4 # and uses it when creating a plugin template filter_file('@PLUGIN_CXX@', self.compiler.cxx, os.path.join(prefix, template), **kwargs) # The binary links to the build include directory instead of the # installation include directory: # https://github.com/psi4/psi4/issues/410 filter_file('@PLUGIN_INCLUDES@', '-I{0}'.format( ' -I'.join([ os.path.join(spec['psi4'].prefix.include, 'psi4'), os.path.join(spec['boost'].prefix.include, 'boost'), os.path.join(spec['python'].headers.directories[0]), spec['lapack'].prefix.include, spec['blas'].prefix.include, '/usr/include' ]) ), os.path.join(prefix, template), **kwargs)
EmreAtes/spack
var/spack/repos/builtin/packages/psi4/package.py
Python
lgpl-2.1
4,596
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function # $example on$ from pyspark.ml.feature import IndexToString, StringIndexer # $example off$ from pyspark.sql import SparkSession if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("IndexToStringExample")\ .getOrCreate() # $example on$ df = spark.createDataFrame( [(0, "a"), (1, "b"), (2, "c"), (3, "a"), (4, "a"), (5, "c")], ["id", "category"]) stringIndexer = StringIndexer(inputCol="category", outputCol="categoryIndex") model = stringIndexer.fit(df) indexed = model.transform(df) converter = IndexToString(inputCol="categoryIndex", outputCol="originalCategory") converted = converter.transform(indexed) converted.select("id", "originalCategory").show() # $example off$ spark.stop()
mrchristine/spark-examples-dbc
src/main/python/ml/index_to_string_example.py
Python
apache-2.0
1,615
# coding: utf-8 from __future__ import print_function, unicode_literals import os from boxsdk import Client from boxsdk.exception import BoxAPIException from boxsdk.object.collaboration import CollaborationRole from auth import authenticate def run_user_example(client): # 'me' is a handy value to get info on the current authenticated user. me = client.user(user_id='me').get(fields=['login']) print('The email of the user is: {0}'.format(me['login'])) def run_folder_examples(client): root_folder = client.folder(folder_id='0').get() print('The root folder is owned by: {0}'.format(root_folder.owned_by['login'])) items = root_folder.get_items(limit=100, offset=0) print('This is the first 100 items in the root folder:') for item in items: print(" " + item.name) def run_collab_examples(client): root_folder = client.folder(folder_id='0') collab_folder = root_folder.create_subfolder('collab folder') try: print('Folder {0} created'.format(collab_folder.get()['name'])) collaboration = collab_folder.add_collaborator('[email protected]', CollaborationRole.VIEWER) print('Created a collaboration') try: modified_collaboration = collaboration.update_info(role=CollaborationRole.EDITOR) print('Modified a collaboration: {0}'.format(modified_collaboration.role)) finally: collaboration.delete() print('Deleted a collaboration') finally: # Clean up print('Delete folder collab folder succeeded: {0}'.format(collab_folder.delete())) def rename_folder(client): root_folder = client.folder(folder_id='0') foo = root_folder.create_subfolder('foo') try: print('Folder {0} created'.format(foo.get()['name'])) bar = foo.rename('bar') print('Renamed to {0}'.format(bar.get()['name'])) finally: print('Delete folder bar succeeded: {0}'.format(foo.delete())) def get_folder_shared_link(client): root_folder = client.folder(folder_id='0') collab_folder = root_folder.create_subfolder('shared link folder') try: print('Folder {0} created'.format(collab_folder.get().name)) shared_link = collab_folder.get_shared_link() print('Got shared link:' + shared_link) finally: print('Delete folder collab folder succeeded: {0}'.format(collab_folder.delete())) def upload_file(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt') try: print('{0} uploaded: '.format(a_file.get()['name'])) finally: print('Delete i-am-a-file.txt succeeded: {0}'.format(a_file.delete())) def upload_accelerator(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') a_file = root_folder.upload(file_path, file_name='i-am-a-file.txt', upload_using_accelerator=True) try: print('{0} uploaded via Accelerator: '.format(a_file.get()['name'])) file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt') a_file = a_file.update_contents(file_v2_path, upload_using_accelerator=True) print('{0} updated via Accelerator: '.format(a_file.get()['name'])) finally: print('Delete i-am-a-file.txt succeeded: {0}'.format(a_file.delete())) def rename_file(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') foo = root_folder.upload(file_path, file_name='foo.txt') try: print('{0} uploaded '.format(foo.get()['name'])) bar = foo.rename('bar.txt') print('Rename succeeded: {0}'.format(bool(bar))) finally: foo.delete() def update_file(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') file_v1 = root_folder.upload(file_path, file_name='file_v1.txt') try: # print 'File content after upload: {}'.format(file_v1.content()) file_v2_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file_v2.txt') file_v2 = file_v1.update_contents(file_v2_path) # print 'File content after update: {}'.format(file_v2.content()) finally: file_v1.delete() def search_files(client): search_results = client.search( 'i-am-a-file.txt', limit=2, offset=0, ancestor_folders=[client.folder(folder_id='0')], file_extensions=['txt'], ) for item in search_results: item_with_name = item.get(fields=['name']) print('matching item: ' + item_with_name.id) else: print('no matching items') def copy_item(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') a_file = root_folder.upload(file_path, file_name='a file.txt') try: subfolder1 = root_folder.create_subfolder('copy_sub') try: a_file.copy(subfolder1) print(subfolder1.get_items(limit=10, offset=0)) subfolder2 = root_folder.create_subfolder('copy_sub2') try: subfolder1.copy(subfolder2) print(subfolder2.get_items(limit=10, offset=0)) finally: subfolder2.delete() finally: subfolder1.delete() finally: a_file.delete() def move_item(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') a_file = root_folder.upload(file_path, file_name='a file.txt') try: subfolder1 = root_folder.create_subfolder('move_sub') try: a_file.move(subfolder1) print(subfolder1.get_items(limit=10, offset=0)) subfolder2 = root_folder.create_subfolder('move_sub2') try: subfolder1.move(subfolder2) print(subfolder2.get_items(limit=10, offset=0)) finally: subfolder2.delete() finally: try: subfolder1.delete() except BoxAPIException: pass finally: try: a_file.delete() except BoxAPIException: pass def get_events(client): print(client.events().get_events(limit=100, stream_position='now')) def get_latest_stream_position(client): print(client.events().get_latest_stream_position()) def long_poll(client): print(client.events().long_poll()) def _delete_leftover_group(existing_groups, group_name): """ delete group if it already exists """ existing_group = next((g for g in existing_groups if g.name == group_name), None) if existing_group: existing_group.delete() def run_groups_example(client): """ Shows how to interact with 'Groups' in the Box API. How to: - Get info about all the Groups to which the current user belongs - Create a Group - Rename a Group - Add a member to the group - Remove a member from a group - Delete a Group """ try: # First delete group if it already exists original_groups = client.groups() _delete_leftover_group(original_groups, 'box_sdk_demo_group') _delete_leftover_group(original_groups, 'renamed_box_sdk_demo_group') new_group = client.create_group('box_sdk_demo_group') except BoxAPIException as ex: if ex.status != 403: raise print('The authenticated user does not have permissions to manage groups. Skipping the test of this demo.') return print('New group:', new_group.name, new_group.id) new_group = new_group.update_info({'name': 'renamed_box_sdk_demo_group'}) print("Group's new name:", new_group.name) me_dict = client.user().get(fields=['login']) me = client.user(user_id=me_dict['id']) group_membership = new_group.add_member(me, 'member') members = list(new_group.membership()) print('The group has a membership of: ', len(members)) print('The id of that membership: ', group_membership.object_id) group_membership.delete() print('After deleting that membership, the group has a membership of: ', len(list(new_group.membership()))) new_group.delete() groups_after_deleting_demo = client.groups() has_been_deleted = not any(g.name == 'renamed_box_sdk_demo_group' for g in groups_after_deleting_demo) print('The new group has been deleted: ', has_been_deleted) def run_metadata_example(client): root_folder = client.folder(folder_id='0') file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'file.txt') foo = root_folder.upload(file_path, file_name='foo.txt') print('{0} uploaded '.format(foo.get()['name'])) try: metadata = foo.metadata() metadata.create({'foo': 'bar'}) print('Created metadata: {0}'.format(metadata.get())) update = metadata.start_update() update.update('/foo', 'baz', 'bar') print('Updated metadata: {0}'.format(metadata.update(update))) finally: foo.delete() def run_examples(oauth): client = Client(oauth) run_user_example(client) run_folder_examples(client) run_collab_examples(client) rename_folder(client) get_folder_shared_link(client) upload_file(client) rename_file(client) update_file(client) search_files(client) copy_item(client) move_item(client) get_events(client) get_latest_stream_position(client) # long_poll(client) # Enterprise accounts only run_groups_example(client) run_metadata_example(client) # Premium Apps only upload_accelerator(client) def main(): # Please notice that you need to put in your client id and client secret in demo/auth.py in order to make this work. oauth = authenticate() run_examples(oauth) os._exit(0) if __name__ == '__main__': main()
lkabongoVC/box-python-sdk
demo/example.py
Python
apache-2.0
10,222
""" GeoJSON example using addItem Python 2/3 ArcREST version 3.5.0 """ from __future__ import print_function import arcrest if __name__ == "__main__": username = "" password = "" geojsonFile = r"" sh = arcrest.AGOLTokenSecurityHandler(username, password) admin = arcrest.manageorg.Administration(securityHandler=sh) user = admin.content.users.user() ip = arcrest.manageorg.ItemParameter() ip.title = "MyGeoJSONTestFile" ip.type = "GeoJson" ip.tags = "Geo1,Geo2" ip.description = "Publishing a geojson file" addedItem = user.addItem(itemParameters=ip, filePath=geojsonFile) itemId = addedItem.id pp = arcrest.manageorg.PublishGeoJSONParameter() pp.name = "Geojsonrocks" pp.hasStaticData = True print( user.publishItem(fileType="geojson", publishParameters=pp, itemId=itemId, wait=True))
Esri/ArcREST
samples/publishingGeoJSON.py
Python
apache-2.0
864
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('event_mapper', '0005_user_is_confirmed'), ] operations = [ migrations.AlterField( model_name='event', name='date_time', field=models.DateTimeField(help_text=b'Date and time when the event happened.', verbose_name=b'Date and Time'), preserve_default=True, ), migrations.AlterField( model_name='event', name='victim', field=models.ForeignKey(default=0, verbose_name=b'Victim', to='event_mapper.Victim', help_text=b'The victim of the event.'), preserve_default=True, ), ]
MariaSolovyeva/watchkeeper
django_project/event_mapper/migrations/0006_auto_20150505_0922.py
Python
bsd-2-clause
789
import datetime from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.db import models from django.http import Http404 from django.utils import timezone from django.utils.functional import cached_property from django.utils.translation import gettext as _ from django.views.generic.base import View from django.views.generic.detail import ( BaseDetailView, SingleObjectTemplateResponseMixin, ) from django.views.generic.list import ( MultipleObjectMixin, MultipleObjectTemplateResponseMixin, ) class YearMixin: """Mixin for views manipulating year-based data.""" year_format = '%Y' year = None def get_year_format(self): """ Get a year format string in strptime syntax to be used to parse the year from url variables. """ return self.year_format def get_year(self): """Return the year for which this view should display data.""" year = self.year if year is None: try: year = self.kwargs['year'] except KeyError: try: year = self.request.GET['year'] except KeyError: raise Http404(_("No year specified")) return year def get_next_year(self, date): """Get the next valid year.""" return _get_next_prev(self, date, is_previous=False, period='year') def get_previous_year(self, date): """Get the previous valid year.""" return _get_next_prev(self, date, is_previous=True, period='year') def _get_next_year(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ try: return date.replace(year=date.year + 1, month=1, day=1) except ValueError: raise Http404(_("Date out of range")) def _get_current_year(self, date): """Return the start date of the current interval.""" return date.replace(month=1, day=1) class MonthMixin: """Mixin for views manipulating month-based data.""" month_format = '%b' month = None def get_month_format(self): """ Get a month format string in strptime syntax to be used to parse the month from url variables. """ return self.month_format def get_month(self): """Return the month for which this view should display data.""" month = self.month if month is None: try: month = self.kwargs['month'] except KeyError: try: month = self.request.GET['month'] except KeyError: raise Http404(_("No month specified")) return month def get_next_month(self, date): """Get the next valid month.""" return _get_next_prev(self, date, is_previous=False, period='month') def get_previous_month(self, date): """Get the previous valid month.""" return _get_next_prev(self, date, is_previous=True, period='month') def _get_next_month(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ if date.month == 12: try: return date.replace(year=date.year + 1, month=1, day=1) except ValueError: raise Http404(_("Date out of range")) else: return date.replace(month=date.month + 1, day=1) def _get_current_month(self, date): """Return the start date of the previous interval.""" return date.replace(day=1) class DayMixin: """Mixin for views manipulating day-based data.""" day_format = '%d' day = None def get_day_format(self): """ Get a day format string in strptime syntax to be used to parse the day from url variables. """ return self.day_format def get_day(self): """Return the day for which this view should display data.""" day = self.day if day is None: try: day = self.kwargs['day'] except KeyError: try: day = self.request.GET['day'] except KeyError: raise Http404(_("No day specified")) return day def get_next_day(self, date): """Get the next valid day.""" return _get_next_prev(self, date, is_previous=False, period='day') def get_previous_day(self, date): """Get the previous valid day.""" return _get_next_prev(self, date, is_previous=True, period='day') def _get_next_day(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ return date + datetime.timedelta(days=1) def _get_current_day(self, date): """Return the start date of the current interval.""" return date class WeekMixin: """Mixin for views manipulating week-based data.""" week_format = '%U' week = None def get_week_format(self): """ Get a week format string in strptime syntax to be used to parse the week from url variables. """ return self.week_format def get_week(self): """Return the week for which this view should display data.""" week = self.week if week is None: try: week = self.kwargs['week'] except KeyError: try: week = self.request.GET['week'] except KeyError: raise Http404(_("No week specified")) return week def get_next_week(self, date): """Get the next valid week.""" return _get_next_prev(self, date, is_previous=False, period='week') def get_previous_week(self, date): """Get the previous valid week.""" return _get_next_prev(self, date, is_previous=True, period='week') def _get_next_week(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ try: return date + datetime.timedelta(days=7 - self._get_weekday(date)) except OverflowError: raise Http404(_("Date out of range")) def _get_current_week(self, date): """Return the start date of the current interval.""" return date - datetime.timedelta(self._get_weekday(date)) def _get_weekday(self, date): """ Return the weekday for a given date. The first day according to the week format is 0 and the last day is 6. """ week_format = self.get_week_format() if week_format == '%W': # week starts on Monday return date.weekday() elif week_format == '%U': # week starts on Sunday return (date.weekday() + 1) % 7 else: raise ValueError("unknown week format: %s" % week_format) class DateMixin: """Mixin class for views manipulating date-based data.""" date_field = None allow_future = False def get_date_field(self): """Get the name of the date field to be used to filter by.""" if self.date_field is None: raise ImproperlyConfigured("%s.date_field is required." % self.__class__.__name__) return self.date_field def get_allow_future(self): """ Return `True` if the view should be allowed to display objects from the future. """ return self.allow_future # Note: the following three methods only work in subclasses that also # inherit SingleObjectMixin or MultipleObjectMixin. @cached_property def uses_datetime_field(self): """ Return `True` if the date field is a `DateTimeField` and `False` if it's a `DateField`. """ model = self.get_queryset().model if self.model is None else self.model field = model._meta.get_field(self.get_date_field()) return isinstance(field, models.DateTimeField) def _make_date_lookup_arg(self, value): """ Convert a date into a datetime when the date field is a DateTimeField. When time zone support is enabled, `date` is assumed to be in the current time zone, so that displayed items are consistent with the URL. """ if self.uses_datetime_field: value = datetime.datetime.combine(value, datetime.time.min) if settings.USE_TZ: value = timezone.make_aware(value, timezone.get_current_timezone()) return value def _make_single_date_lookup(self, date): """ Get the lookup kwargs for filtering on a single date. If the date field is a DateTimeField, we can't just filter on date_field=date because that doesn't take the time into account. """ date_field = self.get_date_field() if self.uses_datetime_field: since = self._make_date_lookup_arg(date) until = self._make_date_lookup_arg(date + datetime.timedelta(days=1)) return { '%s__gte' % date_field: since, '%s__lt' % date_field: until, } else: # Skip self._make_date_lookup_arg, it's a no-op in this branch. return {date_field: date} class BaseDateListView(MultipleObjectMixin, DateMixin, View): """Abstract base class for date-based views displaying a list of objects.""" allow_empty = False date_list_period = 'year' def get(self, request, *args, **kwargs): self.date_list, self.object_list, extra_context = self.get_dated_items() context = self.get_context_data( object_list=self.object_list, date_list=self.date_list, **extra_context ) return self.render_to_response(context) def get_dated_items(self): """Obtain the list of dates and items.""" raise NotImplementedError('A DateView must provide an implementation of get_dated_items()') def get_ordering(self): """ Return the field or fields to use for ordering the queryset; use the date field by default. """ return '-%s' % self.get_date_field() if self.ordering is None else self.ordering def get_dated_queryset(self, **lookup): """ Get a queryset properly filtered according to `allow_future` and any extra lookup kwargs. """ qs = self.get_queryset().filter(**lookup) date_field = self.get_date_field() allow_future = self.get_allow_future() allow_empty = self.get_allow_empty() paginate_by = self.get_paginate_by(qs) if not allow_future: now = timezone.now() if self.uses_datetime_field else timezone_today() qs = qs.filter(**{'%s__lte' % date_field: now}) if not allow_empty: # When pagination is enabled, it's better to do a cheap query # than to load the unpaginated queryset in memory. is_empty = len(qs) == 0 if paginate_by is None else not qs.exists() if is_empty: raise Http404(_("No %(verbose_name_plural)s available") % { 'verbose_name_plural': qs.model._meta.verbose_name_plural, }) return qs def get_date_list_period(self): """ Get the aggregation period for the list of dates: 'year', 'month', or 'day'. """ return self.date_list_period def get_date_list(self, queryset, date_type=None, ordering='ASC'): """ Get a date list by calling `queryset.dates/datetimes()`, checking along the way for empty lists that aren't allowed. """ date_field = self.get_date_field() allow_empty = self.get_allow_empty() if date_type is None: date_type = self.get_date_list_period() if self.uses_datetime_field: date_list = queryset.datetimes(date_field, date_type, ordering) else: date_list = queryset.dates(date_field, date_type, ordering) if date_list is not None and not date_list and not allow_empty: raise Http404( _("No %(verbose_name_plural)s available") % { 'verbose_name_plural': queryset.model._meta.verbose_name_plural, } ) return date_list class BaseArchiveIndexView(BaseDateListView): """ Base class for archives of date-based items. Requires a response mixin. """ context_object_name = 'latest' def get_dated_items(self): """Return (date_list, items, extra_context) for this request.""" qs = self.get_dated_queryset() date_list = self.get_date_list(qs, ordering='DESC') if not date_list: qs = qs.none() return (date_list, qs, {}) class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView): """Top-level archive of date-based items.""" template_name_suffix = '_archive' class BaseYearArchiveView(YearMixin, BaseDateListView): """List of objects published in a given year.""" date_list_period = 'month' make_object_list = False def get_dated_items(self): """Return (date_list, items, extra_context) for this request.""" year = self.get_year() date_field = self.get_date_field() date = _date_from_string(year, self.get_year_format()) since = self._make_date_lookup_arg(date) until = self._make_date_lookup_arg(self._get_next_year(date)) lookup_kwargs = { '%s__gte' % date_field: since, '%s__lt' % date_field: until, } qs = self.get_dated_queryset(**lookup_kwargs) date_list = self.get_date_list(qs) if not self.get_make_object_list(): # We need this to be a queryset since parent classes introspect it # to find information about the model. qs = qs.none() return (date_list, qs, { 'year': date, 'next_year': self.get_next_year(date), 'previous_year': self.get_previous_year(date), }) def get_make_object_list(self): """ Return `True` if this view should contain the full list of objects in the given year. """ return self.make_object_list class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView): """List of objects published in a given year.""" template_name_suffix = '_archive_year' class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView): """List of objects published in a given month.""" date_list_period = 'day' def get_dated_items(self): """Return (date_list, items, extra_context) for this request.""" year = self.get_year() month = self.get_month() date_field = self.get_date_field() date = _date_from_string(year, self.get_year_format(), month, self.get_month_format()) since = self._make_date_lookup_arg(date) until = self._make_date_lookup_arg(self._get_next_month(date)) lookup_kwargs = { '%s__gte' % date_field: since, '%s__lt' % date_field: until, } qs = self.get_dated_queryset(**lookup_kwargs) date_list = self.get_date_list(qs) return (date_list, qs, { 'month': date, 'next_month': self.get_next_month(date), 'previous_month': self.get_previous_month(date), }) class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView): """List of objects published in a given month.""" template_name_suffix = '_archive_month' class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView): """List of objects published in a given week.""" def get_dated_items(self): """Return (date_list, items, extra_context) for this request.""" year = self.get_year() week = self.get_week() date_field = self.get_date_field() week_format = self.get_week_format() week_start = { '%W': '1', '%U': '0', }[week_format] date = _date_from_string(year, self.get_year_format(), week_start, '%w', week, week_format) since = self._make_date_lookup_arg(date) until = self._make_date_lookup_arg(self._get_next_week(date)) lookup_kwargs = { '%s__gte' % date_field: since, '%s__lt' % date_field: until, } qs = self.get_dated_queryset(**lookup_kwargs) return (None, qs, { 'week': date, 'next_week': self.get_next_week(date), 'previous_week': self.get_previous_week(date), }) class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView): """List of objects published in a given week.""" template_name_suffix = '_archive_week' class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView): """List of objects published on a given day.""" def get_dated_items(self): """Return (date_list, items, extra_context) for this request.""" year = self.get_year() month = self.get_month() day = self.get_day() date = _date_from_string(year, self.get_year_format(), month, self.get_month_format(), day, self.get_day_format()) return self._get_dated_items(date) def _get_dated_items(self, date): """ Do the actual heavy lifting of getting the dated items; this accepts a date object so that TodayArchiveView can be trivial. """ lookup_kwargs = self._make_single_date_lookup(date) qs = self.get_dated_queryset(**lookup_kwargs) return (None, qs, { 'day': date, 'previous_day': self.get_previous_day(date), 'next_day': self.get_next_day(date), 'previous_month': self.get_previous_month(date), 'next_month': self.get_next_month(date) }) class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView): """List of objects published on a given day.""" template_name_suffix = "_archive_day" class BaseTodayArchiveView(BaseDayArchiveView): """List of objects published today.""" def get_dated_items(self): """Return (date_list, items, extra_context) for this request.""" return self._get_dated_items(datetime.date.today()) class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView): """List of objects published today.""" template_name_suffix = "_archive_day" class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView): """ Detail view of a single object on a single date; this differs from the standard DetailView by accepting a year/month/day in the URL. """ def get_object(self, queryset=None): """Get the object this request displays.""" year = self.get_year() month = self.get_month() day = self.get_day() date = _date_from_string(year, self.get_year_format(), month, self.get_month_format(), day, self.get_day_format()) # Use a custom queryset if provided qs = self.get_queryset() if queryset is None else queryset if not self.get_allow_future() and date > datetime.date.today(): raise Http404(_( "Future %(verbose_name_plural)s not available because " "%(class_name)s.allow_future is False." ) % { 'verbose_name_plural': qs.model._meta.verbose_name_plural, 'class_name': self.__class__.__name__, }) # Filter down a queryset from self.queryset using the date from the # URL. This'll get passed as the queryset to DetailView.get_object, # which'll handle the 404 lookup_kwargs = self._make_single_date_lookup(date) qs = qs.filter(**lookup_kwargs) return super().get_object(queryset=qs) class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView): """ Detail view of a single object on a single date; this differs from the standard DetailView by accepting a year/month/day in the URL. """ template_name_suffix = '_detail' def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'): """ Get a datetime.date object given a format string and a year, month, and day (only year is mandatory). Raise a 404 for an invalid date. """ format = year_format + delim + month_format + delim + day_format datestr = str(year) + delim + str(month) + delim + str(day) try: return datetime.datetime.strptime(datestr, format).date() except ValueError: raise Http404(_("Invalid date string '%(datestr)s' given format '%(format)s'") % { 'datestr': datestr, 'format': format, }) def _get_next_prev(generic_view, date, is_previous, period): """ Get the next or the previous valid date. The idea is to allow links on month/day views to never be 404s by never providing a date that'll be invalid for the given view. This is a bit complicated since it handles different intervals of time, hence the coupling to generic_view. However in essence the logic comes down to: * If allow_empty and allow_future are both true, this is easy: just return the naive result (just the next/previous day/week/month, regardless of object existence.) * If allow_empty is true, allow_future is false, and the naive result isn't in the future, then return it; otherwise return None. * If allow_empty is false and allow_future is true, return the next date *that contains a valid object*, even if it's in the future. If there are no next objects, return None. * If allow_empty is false and allow_future is false, return the next date that contains a valid object. If that date is in the future, or if there are no next objects, return None. """ date_field = generic_view.get_date_field() allow_empty = generic_view.get_allow_empty() allow_future = generic_view.get_allow_future() get_current = getattr(generic_view, '_get_current_%s' % period) get_next = getattr(generic_view, '_get_next_%s' % period) # Bounds of the current interval start, end = get_current(date), get_next(date) # If allow_empty is True, the naive result will be valid if allow_empty: if is_previous: result = get_current(start - datetime.timedelta(days=1)) else: result = end if allow_future or result <= timezone_today(): return result else: return None # Otherwise, we'll need to go to the database to look for an object # whose date_field is at least (greater than/less than) the given # naive result else: # Construct a lookup and an ordering depending on whether we're doing # a previous date or a next date lookup. if is_previous: lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)} ordering = '-%s' % date_field else: lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)} ordering = date_field # Filter out objects in the future if appropriate. if not allow_future: # Fortunately, to match the implementation of allow_future, # we need __lte, which doesn't conflict with __lt above. if generic_view.uses_datetime_field: now = timezone.now() else: now = timezone_today() lookup['%s__lte' % date_field] = now qs = generic_view.get_queryset().filter(**lookup).order_by(ordering) # Snag the first object from the queryset; if it doesn't exist that # means there's no next/previous link available. try: result = getattr(qs[0], date_field) except IndexError: return None # Convert datetimes to dates in the current time zone. if generic_view.uses_datetime_field: if settings.USE_TZ: result = timezone.localtime(result) result = result.date() # Return the first day of the period. return get_current(result) def timezone_today(): """Return the current date in the current time zone.""" if settings.USE_TZ: return timezone.localdate() else: return datetime.date.today()
shacker/django
django/views/generic/dates.py
Python
bsd-3-clause
25,251
from __future__ import absolute_import, print_function import inspect import logging import raven import sentry from django.conf import settings from django.db.utils import DatabaseError from raven.contrib.django.client import DjangoClient from . import metrics UNSAFE_FILES = ( 'sentry/event_manager.py', 'sentry/tasks/process_buffer.py', ) def can_record_current_event(): """ Tests the current stack for unsafe locations that would likely cause recursion if an attempt to send to Sentry was made. """ for _, filename, _, _, _, _ in inspect.stack(): if filename.endswith(UNSAFE_FILES): return False return True class SentryInternalClient(DjangoClient): def is_enabled(self): if getattr(settings, 'DISABLE_RAVEN', False): return False return settings.SENTRY_PROJECT is not None def capture(self, *args, **kwargs): if not can_record_current_event(): metrics.incr('internal.uncaptured.events') self.error_logger.error('Not capturing event due to unsafe stacktrace:\n%r', kwargs) return return super(SentryInternalClient, self).capture(*args, **kwargs) def send(self, **kwargs): # TODO(dcramer): this should respect rate limits/etc and use the normal # pipeline from sentry.app import tsdb from sentry.coreapi import ClientApiHelper from sentry.event_manager import EventManager from sentry.models import Project helper = ClientApiHelper( agent='raven-python/%s (sentry %s)' % (raven.VERSION, sentry.VERSION), project_id=settings.SENTRY_PROJECT, version=self.protocol_version, ) try: project = Project.objects.get_from_cache(id=settings.SENTRY_PROJECT) except DatabaseError: self.error_logger.error('Unable to fetch internal project', exc_info=True) except Project.DoesNotExist: self.error_logger.error('Internal project (id=%s) does not exist', settings.SENTRY_PROJECT) return helper.context.bind_project(project) metrics.incr('events.total', 1) kwargs['project'] = project.id try: manager = EventManager(kwargs) data = manager.normalize() tsdb.incr_multi([ (tsdb.models.project_total_received, project.id), (tsdb.models.organization_total_received, project.organization_id), ]) helper.insert_data_to_database(data) except Exception as e: if self.raise_send_errors: raise self.error_logger.error( 'Unable to record event: %s\nEvent was: %r', e, kwargs['message'], exc_info=True) class SentryInternalFilter(logging.Filter): def filter(self, record): metrics.incr('internal.uncaptured.logs') return can_record_current_event()
hongliang5623/sentry
src/sentry/utils/raven.py
Python
bsd-3-clause
3,051
# Copyright 2012, Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can # be found in the LICENSE file. import os import shlex from subprocess import Popen, PIPE import time import unittest import utils class TestCase(unittest.TestCase): @classmethod def setenv(cls, env): cls.env = env def assertContains(self, b, a): self.assertTrue(a in b, "%r not found in %r" % (a, b)) class MultiDict(dict): def __getattr__(self, name): v = self[name] if type(v)==dict: v=MultiDict(v) return v def mget(self, mkey, default=None): keys = mkey.split(".") try: v = self for key in keys: v = v[key] except KeyError: v = default if type(v)==dict: v = MultiDict(v) return v class Tailer(object): def __init__(self, filepath, flush=None, sleep=0, timeout=10.0): self.filepath = filepath self.flush = flush self.sleep = sleep self.timeout = timeout self.f = None self.reset() def reset(self): """Call reset when you want to start using the tailer.""" if self.flush: self.flush() else: time.sleep(self.sleep) # Re-open the file if open. if self.f: self.f.close() self.f = None # Wait for file to exist. timeout = self.timeout while not os.path.exists(self.filepath): timeout = utils.wait_step('file exists: ' + self.filepath, timeout) self.f = open(self.filepath) self.f.seek(0, os.SEEK_END) self.pos = self.f.tell() def read(self): """Returns a string which may contain multiple lines.""" if self.flush: self.flush() else: time.sleep(self.sleep) self.f.seek(0, os.SEEK_END) newpos = self.f.tell() if newpos < self.pos: return "" self.f.seek(self.pos, os.SEEK_SET) size = newpos-self.pos self.pos = newpos return self.f.read(size) def readLines(self): """Returns a list of read lines.""" return self.read().splitlines() # FIXME: Hijacked from go/vt/tabletserver/test.py # Reuse when things come together def execute(cmd, trap_output=False, verbose=False, **kargs): args = shlex.split(cmd) if trap_output: kargs['stdout'] = PIPE kargs['stderr'] = PIPE if verbose: print "Execute:", cmd, ', '.join('%s=%s' % x for x in kargs.iteritems()) proc = Popen(args, **kargs) proc.args = args stdout, stderr = proc.communicate() if proc.returncode: raise Exception('FAIL: %s %s %s' % (args, stdout, stderr)) return stdout, stderr
anusornc/vitess
test/framework.py
Python
bsd-3-clause
2,552
# Copyright 2019 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys import json from command_executor import CommandExecutor _THIS_DIR = os.path.abspath(os.path.dirname(__file__)) _PARENT_DIR = os.path.join(_THIS_DIR, os.pardir) sys.path.insert(1, _PARENT_DIR) import chrome_paths sys.path.remove(_PARENT_DIR) sys.path.insert(0,os.path.join(chrome_paths.GetSrc(), 'third_party', 'catapult', 'telemetry', 'third_party', 'websocket-client')) import websocket class WebSocketCommands: CREATE_WEBSOCKET = \ '/session/:sessionId' SEND_OVER_WEBSOCKET = \ '/session/:sessionId/chromium/send_command_from_websocket' class WebSocketConnection(object): def __init__(self, server_url, session_id): self._server_url = server_url.replace('http', 'ws') self._session_id = session_id self._command_id = -1 cmd_params = {'sessionId': session_id} path = CommandExecutor.CreatePath( WebSocketCommands.CREATE_WEBSOCKET, cmd_params) self._websocket = websocket.create_connection(self._server_url + path) def SendCommand(self, cmd_params): cmd_params['id'] = self._command_id self._command_id -= 1 self._websocket.send(json.dumps(cmd_params)) def ReadMessage(self): return self._websocket.recv() def Close(self): self._websocket.close();
chromium/chromium
chrome/test/chromedriver/client/websocket_connection.py
Python
bsd-3-clause
1,471
from __future__ import absolute_import import numpy import chainer from chainer import _backend from chainer.backends import _cpu from chainer.configuration import config _ideep_version = None _error = None try: import ideep4py as ideep # NOQA from ideep4py import mdarray # type: ignore # NOQA _ideep_version = 2 if hasattr(ideep, '__version__') else 1 except ImportError as e: _error = e _ideep_version = None class mdarray(object): # type: ignore pass # for type testing class Intel64Device(_backend.Device): """Device for Intel64 (Intel Architecture) backend with iDeep""" xp = numpy name = '@intel64' supported_array_types = (numpy.ndarray, mdarray) __hash__ = _backend.Device.__hash__ def __init__(self): check_ideep_available() super(Intel64Device, self).__init__() @staticmethod def from_array(array): if isinstance(array, mdarray): return Intel64Device() return None def __eq__(self, other): return isinstance(other, Intel64Device) def __repr__(self): return '<{}>'.format(self.__class__.__name__) def send_array(self, array): if isinstance(array, ideep.mdarray): return array if not isinstance(array, numpy.ndarray): array = _cpu._to_cpu(array) # to numpy.ndarray if (isinstance(array, numpy.ndarray) and array.ndim in (1, 2, 4) and 0 not in array.shape): # TODO(kmaehashi): Remove ndim validation once iDeep has fixed. # Currently iDeep only supports (1, 2, 4)-dim arrays. # Note that array returned from `ideep.array` may not be an # iDeep mdarray, e.g., when the dtype is not float32. array = ideep.array(array, itype=ideep.wgt_array) return array def is_array_supported(self, array): return isinstance(array, (numpy.ndarray, mdarray)) # ------------------------------------------------------------------------------ # ideep configuration # ------------------------------------------------------------------------------ _SHOULD_USE_IDEEP = { '==always': {'always': True, 'auto': False, 'never': False}, '>=auto': {'always': True, 'auto': True, 'never': False}, } def is_ideep_available(): """Returns if iDeep is available. Returns: bool: ``True`` if the supported version of iDeep is installed. """ return _ideep_version is not None and _ideep_version == 2 def check_ideep_available(): """Checks if iDeep is available. When iDeep is correctly set up, nothing happens. Otherwise it raises ``RuntimeError``. """ if _ideep_version is None: # If the error is missing shared object, append a message to # redirect to the ideep website. msg = str(_error) if 'cannot open shared object file' in msg: msg += ('\n\nEnsure iDeep requirements are satisfied: ' 'https://github.com/intel/ideep') raise RuntimeError( 'iDeep is not available.\n' 'Reason: {}: {}'.format(type(_error).__name__, msg)) elif _ideep_version != 2: raise RuntimeError( 'iDeep is not available.\n' 'Reason: Unsupported iDeep version ({})'.format(_ideep_version)) def should_use_ideep(level): """Determines if we should use iDeep. This function checks ``chainer.config.use_ideep`` and availability of ``ideep4py`` package. Args: level (str): iDeep use level. It must be either ``'==always'`` or ``'>=auto'``. ``'==always'`` indicates that the ``use_ideep`` config must be ``'always'`` to use iDeep. Returns: bool: ``True`` if the caller should use iDeep. """ if not is_ideep_available(): return False # TODO(niboshi): # Add lowest_version argument and compare with ideep version. # Currently ideep does not provide a way to retrieve its version. if level not in _SHOULD_USE_IDEEP: raise ValueError('invalid iDeep use level: %s ' '(must be either of "==always" or ">=auto")' % repr(level)) flags = _SHOULD_USE_IDEEP[level] use_ideep = config.use_ideep if use_ideep not in flags: raise ValueError('invalid use_ideep configuration: %s ' '(must be either of "always", "auto", or "never")' % repr(use_ideep)) return flags[use_ideep] def inputs_all_ready(inputs, supported_ndim=(2, 4)): """Checks if input arrays are supported for an iDeep primitive. Before calling an iDeep primitive (e.g., ``ideep4py.linear.Forward``), you need to make sure that all input arrays are ready for the primitive by calling this function. Information to be checked includes array types, dimesions and data types. The function checks ``inputs`` info and ``supported_ndim``. Inputs to be tested can be any of ``Variable``, ``numpy.ndarray`` or ``ideep4py.mdarray``. However, all inputs to iDeep primitives must be ``ideep4py.mdarray``. Callers of iDeep primitives are responsible of converting all inputs to ``ideep4py.mdarray``. Args: inputs (sequence of arrays or variables): Inputs to be checked. supported_ndim (tuple of ints): Supported ndim values for the iDeep primitive. Returns: bool: ``True`` if all conditions meet. """ def _is_supported_array_type(a): return isinstance(a, ideep.mdarray) or ideep.check_type([a]) if not is_ideep_available(): return False inputs = [x.data if isinstance(x, chainer.variable.Variable) else x for x in inputs] return (ideep.check_ndim(inputs, supported_ndim) and all([_is_supported_array_type(a) for a in inputs]))
pfnet/chainer
chainer/backends/intel64.py
Python
mit
5,920
""" color scheme source data """ schemes = {'classic': [(255, 237, 237), (255, 224, 224), (255, 209, 209), (255, 193, 193), (255, 176, 176), (255, 159, 159), (255, 142, 142), (255, 126, 126), (255, 110, 110), (255, 94, 94), (255, 81, 81), (255, 67, 67), (255, 56, 56), (255, 46, 46), (255, 37, 37), (255, 29, 29), (255, 23, 23), (255, 18, 18), (255, 14, 14), (255, 11, 11), (255, 8, 8), (255, 6, 6), (255, 5, 5), (255, 3, 3), (255, 2, 2), (255, 2, 2), (255, 1, 1), (255, 1, 1), (255, 0, 0), (255, 0, 0), (255, 0, 0), (255, 0, 0), (255, 0, 0), (255, 0, 0), (255, 0, 0), (255, 0, 0), (255, 1, 0), (255, 4, 0), (255, 6, 0), (255, 10, 0), (255, 14, 0), (255, 18, 0), (255, 22, 0), (255, 26, 0), (255, 31, 0), (255, 36, 0), (255, 41, 0), (255, 45, 0), (255, 51, 0), (255, 57, 0), (255, 62, 0), (255, 68, 0), (255, 74, 0), (255, 81, 0), (255, 86, 0), (255, 93, 0), (255, 99, 0), (255, 105, 0), (255, 111, 0), (255, 118, 0), (255, 124, 0), (255, 131, 0), (255, 137, 0), (255, 144, 0), (255, 150, 0), (255, 156, 0), (255, 163, 0), (255, 169, 0), (255, 175, 0), (255, 181, 0), (255, 187, 0), (255, 192, 0), (255, 198, 0), (255, 203, 0), (255, 208, 0), (255, 213, 0), (255, 218, 0), (255, 222, 0), (255, 227, 0), (255, 232, 0), (255, 235, 0), (255, 238, 0), (255, 242, 0), (255, 245, 0), (255, 247, 0), (255, 250, 0), (255, 251, 0), (253, 252, 0), (250, 252, 1), (248, 252, 2), (244, 252, 2), (241, 252, 3), (237, 252, 3), (233, 252, 3), (229, 252, 4), (225, 252, 4), (220, 252, 5), (216, 252, 5), (211, 252, 6), (206, 252, 7), (201, 252, 7), (197, 252, 8), (191, 251, 8), (185, 249, 9), (180, 247, 9), (174, 246, 10), (169, 244, 11), (164, 242, 11), (158, 240, 12), (151, 238, 13), (146, 236, 14), (140, 233, 14), (134, 231, 15), (128, 228, 16), (122, 226, 17), (116, 223, 18), (110, 221, 19), (105, 218, 20), (99, 216, 21), (93, 214, 22), (88, 211, 23), (82, 209, 24), (76, 207, 25), (71, 204, 26), (66, 202, 28), (60, 200, 30), (55, 198, 31), (50, 196, 33), (45, 194, 34), (40, 191, 35), (36, 190, 37), (31, 188, 39), (27, 187, 40), (23, 185, 43), (19, 184, 44), (15, 183, 46), (12, 182, 48), (9, 181, 51), (6, 181, 53), (3, 180, 55), (1, 180, 57), (0, 180, 60), (0, 180, 62), (0, 180, 65), (0, 181, 68), (0, 182, 70), (0, 182, 74), (0, 183, 77), (0, 184, 80), (0, 184, 84), (0, 186, 88), (0, 187, 92), (0, 188, 95), (0, 190, 99), (0, 191, 104), (0, 193, 108), (0, 194, 112), (0, 196, 116), (0, 198, 120), (0, 200, 125), (0, 201, 129), (0, 203, 134), (0, 205, 138), (0, 207, 143), (0, 209, 147), (0, 211, 151), (0, 213, 156), (0, 215, 160), (0, 216, 165), (0, 219, 171), (0, 222, 178), (0, 224, 184), (0, 227, 190), (0, 229, 197), (0, 231, 203), (0, 233, 209), (0, 234, 214), (0, 234, 220), (0, 234, 225), (0, 234, 230), (0, 234, 234), (0, 234, 238), (0, 234, 242), (0, 234, 246), (0, 234, 248), (0, 234, 251), (0, 234, 254), (0, 234, 255), (0, 232, 255), (0, 228, 255), (0, 224, 255), (0, 219, 255), (0, 214, 254), (0, 208, 252), (0, 202, 250), (0, 195, 247), (0, 188, 244), (0, 180, 240), (0, 173, 236), (0, 164, 232), (0, 156, 228), (0, 147, 222), (0, 139, 218), (0, 130, 213), (0, 122, 208), (0, 117, 205), (0, 112, 203), (0, 107, 199), (0, 99, 196), (0, 93, 193), (0, 86, 189), (0, 78, 184), (0, 71, 180), (0, 65, 175), (0, 58, 171), (0, 52, 167), (0, 46, 162), (0, 40, 157), (0, 35, 152), (0, 30, 147), (0, 26, 142), (0, 22, 136), (0, 18, 131), (0, 15, 126), (0, 12, 120), (0, 9, 115), (1, 8, 110), (1, 6, 106), (1, 5, 101), (2, 4, 97), (3, 4, 92), (4, 5, 89), (5, 5, 85), (6, 6, 82), (7, 7, 79), (8, 8, 77), (10, 10, 77), (12, 12, 77), (14, 14, 76), (16, 16, 74), (19, 19, 73), (21, 21, 72), (24, 24, 71), (26, 26, 69), (29, 29, 70), (32, 32, 69), (35, 35, 68), (37, 37, 67), (40, 40, 67), (42, 42, 65), (44, 44, 65), (46, 46, 64), (48, 48, 63), (49, 50, 62), (51, 51, 61), (53, 52, 61)], 'fire': [(255, 255, 255), (255, 255, 253), (255, 255, 250), (255, 255, 247), (255, 255, 244), (255, 255, 241), (255, 255, 238), (255, 255, 234), (255, 255, 231), (255, 255, 227), (255, 255, 223), (255, 255, 219), (255, 255, 214), (255, 255, 211), (255, 255, 206), (255, 255, 202), (255, 255, 197), (255, 255, 192), (255, 255, 187), (255, 255, 183), (255, 255, 178), (255, 255, 172), (255, 255, 167), (255, 255, 163), (255, 255, 157), (255, 255, 152), (255, 255, 147), (255, 255, 142), (255, 255, 136), (255, 255, 132), (255, 255, 126), (255, 255, 121), (255, 255, 116), (255, 255, 111), (255, 255, 106), (255, 255, 102), (255, 255, 97), (255, 255, 91), (255, 255, 87), (255, 255, 82), (255, 255, 78), (255, 255, 74), (255, 255, 70), (255, 255, 65), (255, 255, 61), (255, 255, 57), (255, 255, 53), (255, 255, 50), (255, 255, 46), (255, 255, 43), (255, 255, 39), (255, 255, 38), (255, 255, 34), (255, 255, 31), (255, 255, 29), (255, 255, 26), (255, 255, 25), (255, 254, 23), (255, 251, 22), (255, 250, 22), (255, 247, 23), (255, 245, 23), (255, 242, 24), (255, 239, 24), (255, 236, 25), (255, 232, 25), (255, 229, 26), (255, 226, 26), (255, 222, 27), (255, 218, 27), (255, 215, 28), (255, 210, 28), (255, 207, 29), (255, 203, 29), (255, 199, 30), (255, 194, 30), (255, 190, 31), (255, 186, 31), (255, 182, 32), (255, 176, 32), (255, 172, 33), (255, 168, 34), (255, 163, 34), (255, 159, 35), (255, 154, 35), (255, 150, 36), (255, 145, 36), (255, 141, 37), (255, 136, 37), (255, 132, 38), (255, 128, 39), (255, 124, 39), (255, 119, 40), (255, 115, 40), (255, 111, 41), (255, 107, 41), (255, 103, 42), (255, 99, 42), (255, 95, 43), (255, 92, 44), (255, 89, 44), (255, 85, 45), (255, 81, 45), (255, 79, 46), (255, 76, 47), (255, 72, 47), (255, 70, 48), (255, 67, 48), (255, 65, 49), (255, 63, 50), (255, 60, 50), (255, 59, 51), (255, 57, 51), (255, 55, 52), (255, 55, 53), (255, 53, 53), (253, 54, 54), (253, 54, 54), (251, 55, 55), (250, 56, 56), (248, 56, 56), (247, 57, 57), (246, 57, 57), (244, 58, 58), (242, 59, 59), (240, 59, 59), (239, 60, 60), (238, 61, 61), (235, 61, 61), (234, 62, 62), (232, 62, 62), (229, 63, 63), (228, 64, 64), (226, 64, 64), (224, 65, 65), (222, 66, 66), (219, 66, 66), (218, 67, 67), (216, 67, 67), (213, 68, 68), (211, 69, 69), (209, 69, 69), (207, 70, 70), (205, 71, 71), (203, 71, 71), (200, 72, 72), (199, 73, 73), (196, 73, 73), (194, 74, 74), (192, 74, 74), (190, 75, 75), (188, 76, 76), (186, 76, 76), (183, 77, 77), (181, 78, 78), (179, 78, 78), (177, 79, 79), (175, 80, 80), (173, 80, 80), (170, 81, 81), (169, 82, 82), (166, 82, 82), (165, 83, 83), (162, 83, 83), (160, 84, 84), (158, 85, 85), (156, 85, 85), (154, 86, 86), (153, 87, 87), (150, 87, 87), (149, 88, 88), (147, 89, 89), (146, 90, 90), (144, 91, 91), (142, 92, 92), (142, 94, 94), (141, 95, 95), (140, 96, 96), (139, 98, 98), (138, 99, 99), (136, 100, 100), (135, 101, 101), (135, 103, 103), (134, 104, 104), (133, 105, 105), (133, 107, 107), (132, 108, 108), (131, 109, 109), (132, 111, 111), (131, 112, 112), (130, 113, 113), (130, 114, 114), (130, 116, 116), (130, 117, 117), (130, 118, 118), (129, 119, 119), (130, 121, 121), (130, 122, 122), (130, 123, 123), (130, 124, 124), (131, 126, 126), (131, 127, 127), (130, 128, 128), (131, 129, 129), (132, 131, 131), (132, 132, 132), (133, 133, 133), (134, 134, 134), (135, 135, 135), (136, 136, 136), (138, 138, 138), (139, 139, 139), (140, 140, 140), (141, 141, 141), (142, 142, 142), (143, 143, 143), (144, 144, 144), (145, 145, 145), (147, 147, 147), (148, 148, 148), (149, 149, 149), (150, 150, 150), (151, 151, 151), (152, 152, 152), (153, 153, 153), (154, 154, 154), (155, 155, 155), (156, 156, 156), (157, 157, 157), (158, 158, 158), (159, 159, 159), (160, 160, 160), (160, 160, 160), (161, 161, 161), (162, 162, 162), (163, 163, 163), (164, 164, 164), (165, 165, 165), (166, 166, 166), (167, 167, 167), (167, 167, 167), (168, 168, 168), (169, 169, 169), (170, 170, 170), (170, 170, 170), (171, 171, 171), (172, 172, 172), (173, 173, 173), (173, 173, 173), (174, 174, 174), (175, 175, 175), (175, 175, 175), (176, 176, 176), (176, 176, 176), (177, 177, 177), (177, 177, 177)], 'omg': [(255, 255, 255), (255, 254, 254), (255, 253, 253), (255, 251, 251), (255, 250, 250), (255, 249, 249), (255, 247, 247), (255, 246, 246), (255, 244, 244), (255, 242, 242), (255, 241, 241), (255, 239, 239), (255, 237, 237), (255, 235, 235), (255, 233, 233), (255, 231, 231), (255, 229, 229), (255, 227, 227), (255, 226, 226), (255, 224, 224), (255, 222, 222), (255, 220, 220), (255, 217, 217), (255, 215, 215), (255, 213, 213), (255, 210, 210), (255, 208, 208), (255, 206, 206), (255, 204, 204), (255, 202, 202), (255, 199, 199), (255, 197, 197), (255, 194, 194), (255, 192, 192), (255, 189, 189), (255, 188, 188), (255, 185, 185), (255, 183, 183), (255, 180, 180), (255, 178, 178), (255, 176, 176), (255, 173, 173), (255, 171, 171), (255, 169, 169), (255, 167, 167), (255, 164, 164), (255, 162, 162), (255, 160, 160), (255, 158, 158), (255, 155, 155), (255, 153, 153), (255, 151, 151), (255, 149, 149), (255, 147, 147), (255, 145, 145), (255, 143, 143), (255, 141, 141), (255, 139, 139), (255, 137, 137), (255, 136, 136), (255, 134, 134), (255, 132, 132), (255, 131, 131), (255, 129, 129), (255, 128, 128), (255, 127, 127), (255, 127, 127), (255, 126, 126), (255, 125, 125), (255, 125, 125), (255, 124, 124), (255, 123, 122), (255, 123, 122), (255, 122, 121), (255, 122, 121), (255, 121, 120), (255, 120, 119), (255, 119, 118), (255, 119, 118), (255, 118, 116), (255, 117, 116), (255, 117, 115), (255, 115, 114), (255, 115, 114), (255, 114, 113), (255, 114, 112), (255, 113, 111), (255, 113, 111), (255, 112, 110), (255, 111, 108), (255, 111, 108), (255, 110, 107), (255, 110, 107), (255, 109, 105), (255, 109, 105), (255, 108, 104), (255, 107, 104), (255, 107, 102), (255, 106, 102), (255, 106, 101), (255, 105, 101), (255, 104, 99), (255, 104, 99), (255, 103, 98), (255, 103, 98), (255, 102, 97), (255, 102, 96), (255, 101, 96), (255, 101, 96), (255, 100, 94), (255, 100, 94), (255, 99, 93), (255, 99, 92), (255, 98, 91), (255, 98, 91), (255, 97, 90), (255, 97, 89), (255, 96, 89), (255, 96, 89), (255, 95, 88), (255, 95, 88), (255, 94, 86), (255, 93, 86), (255, 93, 85), (255, 93, 85), (255, 92, 85), (255, 92, 84), (255, 91, 83), (255, 91, 83), (255, 90, 82), (255, 90, 82), (255, 89, 81), (255, 89, 82), (255, 89, 80), (255, 89, 80), (255, 89, 79), (255, 89, 79), (255, 88, 79), (255, 88, 79), (255, 87, 78), (255, 87, 78), (255, 87, 78), (255, 87, 77), (255, 87, 77), (255, 86, 77), (255, 86, 77), (255, 85, 76), (255, 85, 76), (255, 85, 75), (255, 85, 76), (255, 85, 75), (255, 85, 76), (255, 84, 75), (255, 84, 75), (255, 84, 75), (255, 84, 75), (255, 85, 75), (255, 84, 75), (255, 84, 75), (255, 83, 74), (255, 83, 75), (255, 83, 75), (255, 84, 75), (255, 83, 75), (255, 83, 75), (255, 83, 75), (255, 83, 75), (255, 83, 76), (255, 83, 76), (255, 83, 76), (255, 83, 76), (255, 83, 76), (255, 83, 76), (255, 83, 76), (255, 83, 76), (255, 83, 77), (255, 84, 78), (255, 83, 78), (255, 84, 79), (255, 84, 78), (255, 84, 79), (255, 83, 79), (255, 84, 80), (255, 83, 80), (255, 84, 81), (255, 85, 82), (255, 85, 82), (255, 85, 83), (255, 85, 83), (255, 85, 84), (255, 85, 84), (255, 86, 85), (255, 86, 85), (255, 87, 87), (254, 89, 89), (254, 91, 92), (253, 92, 93), (252, 94, 96), (251, 96, 98), (251, 97, 100), (249, 99, 103), (249, 100, 105), (248, 102, 108), (247, 104, 111), (246, 105, 113), (245, 107, 116), (244, 109, 119), (243, 110, 122), (242, 112, 125), (241, 113, 127), (240, 115, 130), (239, 117, 134), (238, 118, 136), (237, 120, 140), (236, 121, 142), (235, 123, 145), (234, 124, 148), (233, 126, 151), (232, 127, 154), (232, 129, 157), (230, 130, 159), (230, 132, 162), (229, 133, 165), (228, 135, 168), (227, 136, 170), (227, 138, 173), (226, 139, 176), (225, 140, 178), (224, 142, 181), (223, 143, 183), (223, 144, 185), (223, 146, 188), (222, 147, 190), (221, 148, 192), (221, 150, 195), (220, 151, 197), (219, 152, 199), (219, 153, 201), (219, 154, 202), (219, 156, 205), (218, 157, 207), (217, 158, 208), (217, 159, 210), (217, 160, 211), (217, 161, 213), (216, 162, 214), (216, 163, 216), (216, 164, 217), (215, 165, 218), (216, 166, 219), (215, 166, 220), (215, 167, 222), (215, 168, 223), (215, 169, 223), (215, 170, 224), (215, 170, 225)], 'pbj': [(41, 10, 89), (41, 10, 89), (42, 10, 89), (42, 10, 89), (42, 10, 88), (43, 10, 88), (43, 9, 88), (43, 9, 88), (44, 9, 88), (44, 9, 88), (45, 10, 89), (46, 10, 88), (46, 9, 88), (47, 9, 88), (47, 9, 88), (47, 9, 88), (48, 8, 88), (48, 8, 87), (49, 8, 87), (49, 8, 87), (49, 7, 87), (50, 7, 87), (50, 7, 87), (51, 7, 86), (51, 6, 86), (53, 7, 86), (53, 7, 86), (54, 7, 86), (54, 6, 85), (55, 6, 85), (55, 6, 85), (56, 5, 85), (56, 5, 85), (57, 5, 84), (57, 5, 84), (58, 4, 84), (59, 4, 84), (59, 5, 84), (60, 4, 84), (60, 4, 84), (61, 4, 84), (61, 4, 83), (62, 3, 83), (63, 3, 83), (63, 3, 83), (64, 3, 82), (64, 3, 82), (65, 3, 82), (66, 3, 82), (67, 4, 82), (68, 4, 82), (69, 4, 82), (69, 4, 81), (70, 4, 81), (71, 4, 81), (71, 4, 80), (72, 4, 80), (73, 4, 80), (73, 4, 79), (75, 5, 80), (76, 5, 80), (77, 5, 79), (77, 5, 79), (78, 5, 79), (79, 5, 78), (80, 5, 78), (80, 5, 78), (80, 5, 77), (81, 5, 77), (83, 6, 76), (83, 6, 76), (84, 6, 76), (85, 6, 75), (86, 6, 75), (87, 6, 74), (88, 6, 74), (88, 6, 73), (89, 6, 73), (91, 7, 73), (92, 7, 73), (93, 7, 72), (94, 7, 72), (94, 7, 71), (95, 7, 71), (96, 7, 70), (96, 7, 70), (97, 7, 69), (99, 9, 70), (100, 9, 69), (101, 10, 69), (102, 10, 68), (103, 11, 67), (104, 11, 67), (105, 12, 66), (106, 13, 66), (107, 14, 66), (108, 15, 65), (109, 16, 64), (110, 16, 64), (111, 17, 63), (112, 18, 62), (113, 18, 61), (114, 19, 61), (115, 20, 60), (118, 22, 60), (119, 22, 59), (120, 22, 58), (120, 23, 58), (121, 24, 57), (122, 25, 56), (124, 26, 55), (125, 27, 54), (127, 29, 54), (128, 30, 54), (130, 31, 53), (131, 32, 52), (132, 33, 51), (133, 34, 50), (134, 35, 49), (135, 36, 48), (137, 38, 48), (138, 39, 47), (140, 40, 46), (141, 41, 46), (142, 42, 45), (143, 42, 44), (144, 43, 43), (145, 44, 42), (146, 45, 42), (149, 47, 41), (150, 48, 41), (151, 49, 40), (152, 50, 39), (153, 51, 38), (154, 52, 38), (155, 53, 37), (157, 55, 36), (159, 57, 36), (160, 57, 35), (160, 58, 34), (162, 59, 33), (163, 60, 33), (164, 61, 32), (165, 62, 31), (167, 63, 30), (168, 65, 30), (169, 66, 29), (170, 67, 29), (172, 68, 28), (173, 69, 27), (174, 70, 26), (175, 71, 26), (176, 71, 25), (178, 73, 25), (179, 74, 24), (180, 75, 24), (181, 76, 23), (182, 77, 23), (183, 78, 23), (184, 79, 22), (186, 80, 22), (187, 81, 21), (188, 82, 21), (189, 83, 21), (190, 83, 20), (191, 84, 20), (192, 85, 19), (192, 86, 19), (193, 87, 18), (194, 87, 18), (196, 89, 18), (196, 90, 18), (197, 90, 18), (198, 90, 18), (199, 91, 18), (200, 92, 18), (201, 93, 18), (202, 93, 18), (203, 94, 18), (204, 96, 19), (204, 96, 19), (205, 97, 19), (206, 98, 19), (207, 99, 19), (208, 99, 19), (209, 100, 19), (210, 100, 19), (211, 100, 19), (212, 102, 20), (213, 103, 20), (214, 103, 20), (214, 104, 20), (215, 105, 20), (215, 105, 20), (216, 106, 20), (217, 107, 20), (218, 107, 20), (219, 108, 20), (220, 109, 21), (221, 109, 21), (222, 110, 21), (222, 111, 21), (223, 111, 21), (224, 112, 21), (225, 113, 21), (226, 113, 21), (227, 114, 21), (227, 114, 21), (228, 115, 22), (229, 116, 22), (229, 116, 22), (230, 117, 22), (231, 117, 22), (231, 118, 22), (232, 119, 22), (233, 119, 22), (234, 120, 22), (234, 120, 22), (235, 121, 22), (236, 121, 22), (237, 122, 23), (237, 122, 23), (238, 123, 23), (239, 124, 23), (239, 124, 23), (240, 125, 23), (240, 125, 23), (241, 126, 23), (241, 126, 23), (242, 127, 23), (243, 127, 23), (243, 128, 23), (244, 128, 24), (244, 128, 24), (245, 129, 24), (246, 129, 24), (246, 130, 24), (247, 130, 24), (247, 131, 24), (248, 131, 24), (249, 131, 24), (249, 132, 24), (250, 132, 24), (250, 133, 24), (250, 133, 24), (250, 133, 24), (251, 134, 24), (251, 134, 25), (252, 135, 25), (252, 135, 25), (253, 135, 25), (253, 136, 25), (253, 136, 25), (254, 136, 25), (254, 136, 25), (255, 137, 25)], 'pgaitch': [(255, 254, 165), (255, 254, 164), (255, 253, 163), (255, 253, 162), (255, 253, 161), (255, 252, 160), (255, 252, 159), (255, 252, 157), (255, 251, 156), (255, 251, 155), (255, 251, 153), (255, 250, 152), (255, 250, 150), (255, 250, 149), (255, 249, 148), (255, 249, 146), (255, 249, 145), (255, 248, 143), (255, 248, 141), (255, 248, 139), (255, 247, 138), (255, 247, 136), (255, 246, 134), (255, 246, 132), (255, 246, 130), (255, 245, 129), (255, 245, 127), (255, 245, 125), (255, 244, 123), (255, 244, 121), (255, 243, 119), (255, 243, 117), (255, 242, 114), (255, 242, 112), (255, 241, 111), (255, 241, 109), (255, 240, 107), (255, 240, 105), (255, 239, 102), (255, 239, 100), (255, 238, 99), (255, 238, 97), (255, 237, 95), (255, 237, 92), (255, 236, 90), (255, 237, 89), (255, 236, 87), (255, 235, 84), (255, 235, 82), (255, 234, 80), (255, 233, 79), (255, 233, 77), (255, 232, 74), (255, 231, 72), (255, 230, 70), (255, 230, 69), (255, 229, 67), (255, 228, 65), (255, 227, 63), (255, 226, 61), (255, 225, 60), (255, 225, 58), (255, 224, 56), (255, 223, 54), (255, 222, 52), (255, 222, 51), (255, 221, 49), (255, 220, 47), (255, 219, 46), (255, 218, 44), (255, 216, 43), (255, 215, 42), (255, 214, 41), (255, 213, 39), (255, 212, 39), (255, 211, 37), (255, 209, 36), (255, 208, 34), (255, 208, 33), (255, 206, 33), (255, 205, 32), (255, 204, 30), (255, 202, 29), (255, 201, 29), (255, 199, 28), (254, 199, 28), (254, 199, 27), (253, 198, 27), (252, 197, 27), (251, 196, 27), (250, 195, 26), (249, 195, 26), (248, 194, 26), (248, 193, 26), (247, 192, 26), (246, 192, 25), (245, 191, 26), (244, 190, 26), (243, 189, 25), (241, 188, 25), (240, 187, 25), (239, 187, 25), (238, 186, 25), (236, 185, 25), (236, 184, 26), (235, 183, 26), (233, 182, 25), (232, 181, 25), (230, 181, 26), (229, 180, 26), (228, 179, 25), (227, 178, 25), (226, 177, 26), (224, 176, 26), (222, 176, 25), (221, 175, 25), (220, 173, 26), (219, 172, 26), (217, 171, 25), (215, 170, 25), (214, 170, 26), (212, 169, 26), (211, 167, 25), (209, 166, 25), (208, 166, 26), (206, 165, 26), (204, 163, 26), (203, 162, 26), (202, 161, 25), (200, 161, 26), (198, 159, 26), (197, 158, 26), (195, 157, 26), (193, 157, 27), (192, 155, 27), (190, 154, 27), (189, 153, 27), (187, 152, 28), (186, 151, 28), (184, 150, 28), (182, 149, 28), (181, 148, 29), (179, 147, 29), (177, 146, 29), (175, 144, 29), (174, 144, 30), (172, 142, 30), (170, 141, 30), (169, 140, 30), (167, 139, 31), (165, 138, 31), (164, 137, 31), (162, 136, 31), (161, 135, 32), (159, 134, 32), (157, 133, 32), (154, 132, 32), (153, 131, 33), (151, 130, 33), (150, 129, 33), (148, 127, 33), (147, 127, 34), (145, 126, 34), (143, 124, 34), (141, 123, 34), (140, 122, 35), (139, 121, 35), (137, 120, 35), (135, 119, 35), (134, 118, 36), (132, 117, 36), (130, 116, 36), (129, 115, 36), (127, 113, 36), (126, 113, 37), (124, 112, 37), (122, 111, 37), (121, 110, 37), (120, 109, 38), (118, 108, 38), (116, 107, 38), (115, 105, 38), (113, 104, 38), (112, 104, 39), (110, 103, 39), (108, 102, 39), (107, 101, 39), (106, 100, 40), (104, 99, 40), (102, 98, 40), (101, 96, 40), (99, 96, 40), (99, 96, 41), (97, 94, 41), (96, 93, 41), (94, 92, 41), (92, 91, 41), (92, 90, 42), (90, 90, 42), (89, 89, 42), (87, 87, 42), (86, 86, 42), (85, 86, 43), (84, 85, 43), (83, 84, 43), (81, 83, 43), (80, 82, 43), (80, 82, 44), (78, 80, 44), (77, 80, 44), (75, 79, 44), (75, 78, 44), (74, 78, 45), (73, 76, 45), (71, 75, 45), (71, 75, 45), (70, 74, 45), (69, 74, 46), (68, 73, 46), (67, 72, 46), (66, 71, 46), (65, 71, 46), (64, 69, 46), (64, 69, 47), (63, 68, 47), (62, 67, 47), (61, 67, 47), (60, 66, 47), (59, 65, 47), (59, 65, 48), (59, 64, 48), (58, 63, 48), (57, 63, 48), (56, 62, 48), (56, 62, 48), (55, 61, 48), (55, 61, 49), (55, 60, 49), (55, 60, 49), (54, 59, 49), (53, 58, 49), (53, 57, 49), (52, 57, 49), (52, 57, 50), (52, 56, 50), (52, 56, 50), (52, 56, 50), (52, 55, 50), (51, 54, 50), (51, 53, 50), (51, 53, 50), (51, 52, 50), (51, 53, 51), (51, 53, 51), (51, 52, 51), (51, 52, 51)]} def valid_schemes(): return schemes.keys()
ashmastaflash/IDCOAS
integration/heatmaps/heatmap/colorschemes.py
Python
gpl-2.0
33,688
#!/usr/bin/python # # GEOMTERY.OUT to OpenDX # # Created: April 2009 (AVK) # Modified: February 2012 (AVK) # import math import sys def r3minv(a, b): t1 = a[0][2] * (a[1][0] * a[2][1] - a[1][1] * a[2][0]) + \ a[0][1] * (a[1][2] * a[2][0] - a[1][0] * a[2][2]) + \ a[0][0] * (a[1][1] * a[2][2] - a[1][2] * a[2][1]) if math.fabs(t1) < 1e-40: print "r3mv: singular matrix" sys.exit(0) t1 = 1.0/t1 b[0][0] = t1 * (a[1][1] * a[2][2] - a[1][2] * a[2][1]) b[0][1] = t1 * (a[0][2] * a[2][1] - a[0][1] * a[2][2]) b[0][2] = t1 * (a[0][1] * a[1][2] - a[0][2] * a[1][1]) b[1][0] = t1 * (a[1][2] * a[2][0] - a[1][0] * a[2][2]) b[1][1] = t1 * (a[0][0] * a[2][2] - a[0][2] * a[2][0]) b[1][2] = t1 * (a[0][2] * a[1][0] - a[0][0] * a[1][2]) b[2][0] = t1 * (a[1][0] * a[2][1] - a[1][1] * a[2][0]) b[2][1] = t1 * (a[0][1] * a[2][0] - a[0][0] * a[2][1]) b[2][2] = t1 * (a[0][0] * a[1][1] - a[0][1] * a[1][0]) return # # angular part of site-centered orbital # current implementation is for d-orbitals only # class Orbital: def __init__(self,coefs): self.pos=[] self.tri=[] self.coefs=coefs self.make() # real spherical harmonics def Rlm(self,l,m,theta,phi): if l==2 and m==-2: return -math.sqrt(15.0/(16*math.pi))*math.sin(2*phi)*math.sin(theta)**2 if l==2 and m==-1: return -math.sqrt(15.0/(16*math.pi))*math.sin(phi)*math.sin(2*theta) if l==2 and m==0: return math.sqrt(5/(64*math.pi))*(1+3*math.cos(2*theta)) if l==2 and m==1: return -math.sqrt(15.0/(16*math.pi))*math.cos(phi)*math.sin(2*theta) if l==2 and m==2: return math.sqrt(15.0/(16*math.pi))*math.cos(2*phi)*math.sin(theta)**2 def val(self,theta,phi): v=0 for m in range(5): v+=self.coefs[m]*self.Rlm(2,m-2,theta,phi) return v def make(self): raw_pos=[] raw_con=[] n=30 for t in range(n): theta=math.pi*t/(n-1) for p in range(n): phi=2*math.pi*p/(n-1) v=5.5*self.val(theta,phi) x=v*math.sin(theta)*math.cos(phi) y=v*math.sin(theta)*math.sin(phi) z=v*math.cos(theta) raw_pos.append([x,y,z]) for t in range(n): for p in range(n): i1=t i2=(t+1)%n j1=p j2=(p+1)%n n1=i1*n+j1 n2=i1*n+j2 n3=i2*n+j2 n4=i2*n+j1 raw_con.append([n1,n2,n3]) raw_con.append([n1,n3,n4]) # find equal positions eq_pos=[-1 for i in range(n*n)] l=0 for i in range(n*n): if eq_pos[i]==-1: eq_pos[i]=l self.pos.append(raw_pos[i]) for j in range(i+1,n*n): if abs(raw_pos[i][0]-raw_pos[j][0])<1e-10 and \ abs(raw_pos[i][1]-raw_pos[j][1])<1e-10 and \ abs(raw_pos[i][2]-raw_pos[j][2])<1e-10: eq_pos[j]=l l+=1 npos=l # substitute positions in triangles by non-equal positions for i in range(2*n*n): raw_con[i][0]=eq_pos[raw_con[i][0]] raw_con[i][1]=eq_pos[raw_con[i][1]] raw_con[i][2]=eq_pos[raw_con[i][2]] eq_con=[-1 for i in range(2*n*n)] # mark degenerate triangles for i in range(2*n*n): if raw_con[i][0]==raw_con[i][1] or raw_con[i][0]==raw_con[i][2] or \ raw_con[i][1]==raw_con[i][2]: eq_con[i]=-2 # find equal triangles l=0 for i in range(2*n*n): if eq_con[i]==-1: eq_con[i]=l self.tri.append(raw_con[i]) for j in range(i+1,2*n*n): if raw_con[i][0]==raw_con[j][0] and raw_con[i][1]==raw_con[j][1] and \ raw_con[i][2]==raw_con[j][2]: eq_con[j]=l l+=1 # # species-specific variables # class Species: def __init__(self, label): self.label = label self.R = 1.0 self.color = [0.5, 0.5, 0.5] self.visible = True # # atom-specific variables # class Atom: def __init__(self, species, posc, posl): self.species = species self.posc = posc self.posl = posl self.nghbr = [] self.orbital = 0 # # geometry-specific variables # class Geometry: def __init__(self): self.avec = [] self.speciesList = {} self.atomList = [] # read 'GEOMETRY.OUT' self.readGeometry() # make a list of nearest neighbours for each atom self.findNeighbours() # print basic info self.printGeometry() def readGeometry(self): fin = open("GEOMETRY.OUT","r") while True : line = fin.readline() if not line: break line = line.strip(" \n") if line == "avec": for i in range(3): s1 = fin.readline().strip(" \n").split() self.avec.append([float(s1[0]), float(s1[1]), float(s1[2])]) if line == "atoms": # get number of species s1 = fin.readline().strip(" \n").split() nspecies = int(s1[0]) # go over species for i in range(nspecies): # construct label from species file name s1 = fin.readline().strip(" \n").split() label = s1[0][1:s1[0].find(".in")] # crate new species sp = Species(label) # put species to the list self.speciesList[label] = sp # get number of atoms for current species s1 = fin.readline().strip(" \n").split() natoms = int(s1[0]) # go over atoms for j in range(natoms): s1 = fin.readline().strip(" \n").split() posl = [float(s1[0]), float(s1[1]), float(s1[2])] posc = [0, 0, 0] for l in range(3): for x in range(3): posc[x] += posl[l] * self.avec[l][x] # create new atom self.atomList.append(Atom(sp, posc, posl)) fin.close() def printGeometry(self): print "lattice vectors" print " a1 : %12.6f %12.6f %12.6f"%(self.avec[0][0], self.avec[0][1], self.avec[0][2]) print " a2 : %12.6f %12.6f %12.6f"%(self.avec[1][0], self.avec[1][1], self.avec[1][2]) print " a3 : %12.6f %12.6f %12.6f"%(self.avec[2][0], self.avec[2][1], self.avec[2][2]) print "atoms" for i in range(len(self.atomList)): print "%4i (%2s) at position %12.6f %12.6f %12.6f"%\ (i, self.atomList[i].species.label, self.atomList[i].posc[0],\ self.atomList[i].posc[1], self.atomList[i].posc[2]) def findNeighbours(self): for iat in range(len(self.atomList)): xi = self.atomList[iat].posc nn = [] # add nearest neigbours for jat in range(len(self.atomList)): xj = self.atomList[jat].posc for i1 in range(-4,5): for i2 in range(-4,5): for i3 in range(-4,5): t = [0, 0, 0] for x in range(3): t[x] = i1 * self.avec[0][x] + i2 * self.avec[1][x] + i3 * self.avec[2][x] r = [0, 0, 0] for x in range(3): r[x] = xj[x] + t[x] - xi[x] d = math.sqrt(r[0]**2 + r[1]**2 + r[2]**2) if (d <= 10.0): nn.append([jat, r, d]) # sort by distance for i in range(len(nn) - 1): for j in range(i+1, len(nn)): if nn[j][2] < nn[i][2]: nn[i], nn[j] = nn[j], nn[i] self.atomList[iat].nghbr = nn[:] # # cell (not necessarily primitive) with atoms and bonds # class Cell: def __init__(self, geometry, box): self.geometry = geometry self.box = box self.bonds = [] self.atoms = [] self.bondList = [] return def hide(self, label): print " " print "hiding", label self.geometry.speciesList[label].visible = False return def atomSphere(self, label, color, R): self.geometry.speciesList[label].color = color self.geometry.speciesList[label].R = R return def bond(self, label1, label2, length, extend): self.bondList.append([label1, label2, length, extend]) return def atomOrbital(self, ias, fname, iorb): fin = open(fname, "r") f1 = [] for i in range(5): s1 = fin.readline().strip(" \n").split() if i == (iorb - 1): for j in range(5): f1.append(float(s1[j])) self.geometry.atomList[ias].orbital = Orbital(f1) return def write(self): self.fillBox() self.makeBonds() self.writeAtoms() self.writeBonds() #self.writeOrbitals() # def inBox(self, p, box): # n=[0,0,0] # a=box[0] # b=box[1] # c=box[2] # i=0 # # n[0]=a[1]*b[2]-a[2]*b[1] # n[1]=a[2]*b[0]-a[0]*b[2] # n[2]=a[0]*b[1]-a[1]*b[0] # d1=n[0]*p[0]+n[1]*p[1]+n[2]*p[2] # d2=n[0]*(p[0]-c[0])+n[1]*(p[1]-c[1])+n[2]*(p[2]-c[2]) # if cmp(d1,0)*cmp(d2,0)==-1 or abs(d1) < 1e-4 or abs(d2) < 1e-4: # i+=1; # # n[0]=a[1]*c[2]-a[2]*c[1] # n[1]=a[2]*c[0]-a[0]*c[2] # n[2]=a[0]*c[1]-a[1]*c[0] # d1=n[0]*p[0]+n[1]*p[1]+n[2]*p[2] # d2=n[0]*(p[0]-b[0])+n[1]*(p[1]-b[1])+n[2]*(p[2]-b[2]) # if cmp(d1,0)*cmp(d2,0)==-1 or abs(d1) < 1e-4 or abs(d2) < 1e-4: # i+=1; # # n[0]=b[1]*c[2]-b[2]*c[1] # n[1]=b[2]*c[0]-b[0]*c[2] # n[2]=b[0]*c[1]-b[1]*c[0] # d1=n[0]*p[0]+n[1]*p[1]+n[2]*p[2] # d2=n[0]*(p[0]-a[0])+n[1]*(p[1]-a[1])+n[2]*(p[2]-a[2]) # if cmp(d1,0)*cmp(d2,0)==-1 or abs(d1) < 1e-4 or abs(d2) < 1e-4: # i+=1; # # if i==3: return True # else: return False def inBox(self, r, imv): # coorinates in the inits of box vectors rb = [0, 0, 0] for i in range(3): for j in range(3): rb[i] += imv[i][j] * r[j] if (rb[0] >= -0.5 and rb[0] <= 0.5) and \ (rb[1] >= -0.5 and rb[1] <= 0.5) and \ (rb[2] >= -0.5 and rb[2] <= 0.5): return True else: return False def fillBox(self): print " " print "populating the box" print " box parameters" print " center : %12.6f %12.6f %12.6f"%(self.box[0][0], self.box[0][1], self.box[0][2]) print " v1 : %12.6f %12.6f %12.6f"%(self.box[1][0], self.box[1][1], self.box[1][2]) print " v2 : %12.6f %12.6f %12.6f"%(self.box[2][0], self.box[2][1], self.box[2][2]) print " v3 : %12.6f %12.6f %12.6f"%(self.box[3][0], self.box[3][1], self.box[3][2]) mv = [[0, 0, 0], [0, 0, 0], [0, 0, 0]] for i in range(3): for x in range(3): mv[x][i] = self.box[1+i][x] imv = [[0, 0, 0], [0, 0, 0], [0, 0, 0]] r3minv(mv, imv) for ias in range(len(self.geometry.atomList)): if self.geometry.atomList[ias].species.visible: for i1 in range(-4, 5): for i2 in range(-4, 5): for i3 in range(-4, 5): # absolute position (position in the unit cell + translation) r = [0, 0, 0] for x in range(3): r[x] = self.geometry.atomList[ias].posc[x] + \ i1 * self.geometry.avec[0][x] + \ i2 * self.geometry.avec[1][x] + \ i3 * self.geometry.avec[2][x] # position with respect to the center of the box r0 = [0, 0, 0] for x in range(3): r0[x] = r[x] - self.box[0][x] if self.inBox(r0, imv): self.atoms.append([ias, r]) return def writeAtoms(self): print " " print "writing ATOMS.dx" fout = open("ATOMS.dx", "w+") fout.write("object 1 class array type float rank 0 items %i data follows\n"%len(self.atoms)) for i in range(len(self.atoms)): ias = self.atoms[i][0] fout.write("%f\n"%self.geometry.atomList[ias].species.R) fout.write("attribute \"dep\" string \"positions\"\n") fout.write("#\n") fout.write("object 2 class array type float rank 1 shape 3 items %i data follows\n"%len(self.atoms)) for i in range(len(self.atoms)): ias = self.atoms[i][0] color = self.geometry.atomList[ias].species.color fout.write("%f %f %f\n"%(color[0], color[1], color[2])) fout.write("attribute \"dep\" string \"positions\"\n") fout.write("#\n") fout.write("object 3 class array type float rank 1 shape 3 items %i data follows\n"%len(self.atoms)) for i in range(len(self.atoms)): ias = self.atoms[i][0] pos = self.atoms[i][1] fout.write("%f %f %f # %s\n"%(pos[0], pos[1], pos[2], self.geometry.atomList[ias].species.label)) fout.write("attribute \"dep\" string \"positions\"\n") fout.write("#\n") fout.write("object \"atoms\" class field\n") fout.write("component \"data\" value 1\n") fout.write("component \"colors\" value 2\n") fout.write("component \"positions\" value 3\n") fout.write("attribute \"name\" string \"cell\"") fout.close() return def index_in_atoms(self, r): for i in range(len(self.atoms)): if math.fabs(r[0] - self.atoms[i][1][0]) < 1e-10 and \ math.fabs(r[1] - self.atoms[i][1][1]) < 1e-10 and \ math.fabs(r[2] - self.atoms[i][1][2]) < 1e-10: return i return -1 def makeBonds(self): for ibond in range(len(self.bondList)): lbl1 = self.bondList[ibond][0] lbl2 = self.bondList[ibond][1] length = self.bondList[ibond][2] extend = self.bondList[ibond][3] # go over all atoms in the box for i in range(len(self.atoms)): ias = self.atoms[i][0] if self.geometry.atomList[ias].species.label == lbl1: # go over nearest neigbours of atom ias for j in range(len(self.geometry.atomList[ias].nghbr)): jas = self.geometry.atomList[ias].nghbr[j][0] if (self.geometry.atomList[jas].species.label == lbl2) and \ (self.geometry.atomList[ias].nghbr[j][2] <= length): # absolute position of neigbour: position of central atom + connecting vector rj = [0, 0, 0] for x in range(3): rj[x] = self.atoms[i][1][x] + self.geometry.atomList[ias].nghbr[j][1][x] # index of this neigbour in the list of atoms in the box idx = self.index_in_atoms(rj) if idx!=-1: self.bonds.append([i, idx]) elif extend: self.atoms.append([jas, rj]) self.bonds.append([i, len(self.atoms)-1]) return def writeBonds(self): print " " print "writing BONDS.dx" fout = open("BONDS.dx","w+") fout.write("object 1 class array type float rank 1 shape 3 items %i data follows\n"%len(self.atoms)) for i in range(len(self.atoms)): pos = self.atoms[i][1] fout.write("%f %f %f\n"%(pos[0], pos[1], pos[2])) fout.write("#\n") fout.write("object 2 class array type int rank 1 shape 2 items %i data follows\n"%len(self.bonds)) for i in range(len(self.bonds)): fout.write("%i %i\n"%(self.bonds[i][0], self.bonds[i][1])) fout.write("attribute \"element type\" string \"lines\"\n") fout.write("attribute \"ref\" string \"positions\"\n") fout.write("#\n") fout.write("object \"atom_connect\" class field\n") fout.write("component \"positions\" value 1\n") fout.write("component \"connections\" value 2\n") fout.write("end\n") fout.close() return def writeOrbitals(self): print " " print "writing ORBITALS.dx" fout=open("ORBITALS.dx","w+") iorb=0 for iat in range(len(self.atomList)): print self.atomList[iat].orbital if self.atomList[iat].orbital != 0: iorb+=1 r0=self.atomList[iat].posc fout.write("object %i class array type float rank 1 shape 3 items %i data follows\n"%\ ((iorb-1)*2+1,len(self.atomList[iat].orbital.pos))) for i in range(len(self.atomList[iat].orbital.pos)): r=[0,0,0] for x in range(3): r[x]=r0[x]+self.atomList[iat].orbital.pos[i][x] fout.write("%f %f %f\n"%(r[0],r[1],r[2])) fout.write("#\n") fout.write("object %i class array type int rank 1 shape 3 items %i data follows\n"%\ ((iorb-1)*2+2,len(self.atomList[iat].orbital.tri))) for i in range(len(self.atomList[iat].orbital.tri)): fout.write("%i %i %i\n"%(self.atomList[iat].orbital.tri[i][0],\ self.atomList[iat].orbital.tri[i][1],\ self.atomList[iat].orbital.tri[i][2])) fout.write("attribute \"ref\" string \"positions\"\n") fout.write("attribute \"element type\" string \"triangles\"\n") fout.write("attribute \"dep\" string \"connections\"\n") fout.write("#\n") fout.write("object \"orbital%i\" class field\n"%iorb) fout.write("component \"positions\" value %i\n"%((iorb-1)*2+1)) fout.write("component \"connections\" value %i\n"%((iorb-1)*2+2)) fout.write("#\n") norb=iorb fout.write("object \"orbital\" class group\n") for iorb in range(norb): fout.write(" member %i value \"orbital%i\"\n"%(iorb,iorb+1)) fout.write("end\n") fout.close() # # # print " " print "GEOMTERY.OUT to OpenDX" print " " # # get the geometry # geometry = Geometry() # # 3D box (center point + 3 non-collinear vectors) # example: # box=[[0,0,0],[10,0,0],[0,10,0],[0,0,10]] box = [[0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]] #geometry.avec[0],geometry.avec[1],geometry.avec[2]] # # cell with user-defined shape # cell = Cell(geometry, box) # # cell.hide(label) # hides species with a given label # example: # cell.hide("Ca") #cell.hide("Y") # # cell.atomSphere(label,color,radius) # sets [r,g,b] color and radius of species with a given label # example: red Mn sphere with radius 1.0 # cell.atomSphere("Mn",[1.0,0.0,0.0],1.0) cell.atomSphere("La", [0.0, 1.0, 0.0], 1.0) cell.atomSphere("Cu", [1.0, 0.0, 0.0], 1.0) cell.atomSphere("O", [0.0, 0.0, 1.0], 1.0) # # cell.bond(label1,label2,d,extend) # defines a bond with a maximum length 'd' from species 'label1' to species 'label2' # if extend is True, the bond can go outside the box # example: Mn-O bond # cell.bond("Mn","O",5,True) cell.bond("Cu", "O", 5, True) # # cell.atomOrbital(j, file_name, i) # defines angular part of the site-centered orbital i for atom j; # the orbital coefficients are taken from file file_name # example: #cell.atomOrbital(4, "Cu1_mtrx.txt", 1) # # write to .dx files # cell.write()
rgvanwesep/exciting-plus-rgvw-mod
utilities/geometry2dx__mtrx.py
Python
gpl-3.0
20,319
#Copyright (C) 2014 Marc Herndon # #This program is free software; you can redistribute it and/or #modify it under the terms of the GNU General Public License, #version 2, as published by the Free Software Foundation. # #This program is distributed in the hope that it will be useful, #but WITHOUT ANY WARRANTY; without even the implied warranty of #MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #GNU General Public License for more details. # #You should have received a copy of the GNU General Public License #along with this program; if not, write to the Free Software #Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ This module contains the definition of the `Attribute` class, used to represent individual SMART attributes associated with a `Device`. """ class Attribute(object): """ Contains all of the information associated with a single SMART attribute in a `Device`'s SMART table. This data is intended to exactly mirror that obtained through smartctl. """ def __init__(self, num, name, flags, value, worst, thresh, attr_type, updated, when_failed, raw): self.num = num """**(str):** Attribute's ID as a decimal value (1-255).""" self.name = name """ **(str):** Attribute's name, as reported by smartmontools' drive.db. """ self.flags = flags """**(str):** Attribute flags as a hexadecimal value (ie: 0x0032).""" self.value = value """**(str):** Attribute's current normalized value.""" self.worst = worst """**(str):** Worst recorded normalized value for this attribute.""" self.thresh = thresh """**(str):** Attribute's failure threshold.""" self.type = attr_type """**(str):** Attribute's type, generally 'pre-fail' or 'old-age'.""" self.updated = updated """ **(str):** When is this attribute updated? Generally 'Always' or 'Offline' """ self.when_failed = when_failed """ **(str):** When did this attribute cross below `pySMART.attribute.Attribute.thresh`? Reads '-' when not failed. Generally either 'FAILING_NOW' or 'In_the_Past' otherwise. """ self.raw = raw """**(str):** Attribute's current raw (non-normalized) value.""" def __repr__(self): """Define a basic representation of the class object.""" return "<SMART Attribute %r %s/%s raw:%s>" % ( self.name, self.value, self.thresh, self.raw) def __str__(self): """ Define a formatted string representation of the object's content. In the interest of not overflowing 80-character lines this does not print the value of `pySMART.attribute.Attribute.flags_hex`. """ return "{0:>3} {1:24}{2:4}{3:4}{4:4}{5:9}{6:8}{7:12}{8}".format( self.num, self.name, self.value, self.worst, self.thresh, self.type, self.updated, self.when_failed, self.raw) __all__ = ['Attribute']
scith/htpc-manager_ynh
sources/libs/pySMART/attribute.py
Python
gpl-3.0
3,070
from .main import Sabnzbd def start(): return Sabnzbd() config = [{ 'name': 'sabnzbd', 'groups': [ { 'tab': 'downloaders', 'list': 'download_providers', 'name': 'sabnzbd', 'label': 'Sabnzbd', 'description': 'Use <a href="http://sabnzbd.org/" target="_blank">SABnzbd</a> (0.7+) to download NZBs.', 'wizard': True, 'options': [ { 'name': 'enabled', 'default': 0, 'type': 'enabler', 'radio_group': 'nzb', }, { 'name': 'host', 'default': 'localhost:8080', }, { 'name': 'api_key', 'label': 'Api Key', 'description': 'Used for all calls to Sabnzbd.', }, { 'name': 'category', 'label': 'Category', 'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>', }, { 'name': 'priority', 'label': 'Priority', 'type': 'dropdown', 'default': '0', 'advanced': True, 'values': [('Paused', -2), ('Low', -1), ('Normal', 0), ('High', 1), ('Forced', 2)], 'description': 'Add to the queue with this priority.', }, { 'name': 'manual', 'default': False, 'type': 'bool', 'advanced': True, 'description': 'Disable this downloader for automated searches, but use it when I manually send a release.', }, { 'name': 'remove_complete', 'advanced': True, 'label': 'Remove NZB', 'default': False, 'type': 'bool', 'description': 'Remove the NZB from history after it completed.', }, { 'name': 'delete_failed', 'default': True, 'advanced': True, 'type': 'bool', 'description': 'Delete a release after the download has failed.', }, ], } ], }]
jerbob92/CouchPotatoServer
couchpotato/core/downloaders/sabnzbd/__init__.py
Python
gpl-3.0
2,548
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2012 OpenERP S.A (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name' : 'Portal', 'version': '1.0', 'depends': [ 'base', 'share', 'auth_signup', ], 'author': 'OpenERP SA', 'category': 'Portal', 'description': """ Customize access to your OpenERP database to external users by creating portals. ================================================================================ A portal defines a specific user menu and access rights for its members. This menu can ben seen by portal members, public users and any other user that have the access to technical features (e.g. the administrator). Also, each portal member is linked to a specific partner. The module also associates user groups to the portal users (adding a group in the portal automatically adds it to the portal users, etc). That feature is very handy when used in combination with the module 'share'. """, 'website': 'http://www.openerp.com', 'data': [ 'portal_data.xml', 'portal_view.xml', 'wizard/portal_wizard_view.xml', 'wizard/share_wizard_view.xml', 'security/ir.model.access.csv', ], 'demo': ['portal_demo.xml'], 'css': ['static/src/css/portal.css'], 'auto_install': True, 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
poiesisconsulting/openerp-restaurant
portal/__openerp__.py
Python
agpl-3.0
2,288
""" Unit tests for the asset upload endpoint. """ from datetime import datetime from io import BytesIO from pytz import UTC import json from django.conf import settings from contentstore.tests.utils import CourseTestCase from contentstore.views import assets from contentstore.utils import reverse_course_url from xmodule.assetstore.assetmgr import AssetMetadataFoundTemporary from xmodule.assetstore import AssetMetadata from xmodule.contentstore.content import StaticContent from xmodule.contentstore.django import contentstore from xmodule.modulestore.django import modulestore from xmodule.modulestore.xml_importer import import_course_from_xml from django.test.utils import override_settings from opaque_keys.edx.locations import SlashSeparatedCourseKey, AssetLocation import mock from ddt import ddt from ddt import data TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT MAX_FILE_SIZE = settings.MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB * 1000 ** 2 class AssetsTestCase(CourseTestCase): """ Parent class for all asset tests. """ def setUp(self): super(AssetsTestCase, self).setUp() self.url = reverse_course_url('assets_handler', self.course.id) def upload_asset(self, name="asset-1", extension=".txt"): """ Post to the asset upload url """ f = self.get_sample_asset(name, extension) return self.client.post(self.url, {"name": name, "file": f}) def get_sample_asset(self, name, extension=".txt"): """Returns an in-memory file with the given name for testing""" f = BytesIO(name) f.name = name + extension return f class BasicAssetsTestCase(AssetsTestCase): """ Test getting assets via html w/o additional args """ def test_basic(self): resp = self.client.get(self.url, HTTP_ACCEPT='text/html') self.assertEquals(resp.status_code, 200) def test_static_url_generation(self): course_key = SlashSeparatedCourseKey('org', 'class', 'run') location = course_key.make_asset_key('asset', 'my_file_name.jpg') path = StaticContent.get_static_path_from_location(location) self.assertEquals(path, '/static/my_file_name.jpg') def test_pdf_asset(self): module_store = modulestore() course_items = import_course_from_xml( module_store, self.user.id, TEST_DATA_DIR, ['toy'], static_content_store=contentstore(), verbose=True ) course = course_items[0] url = reverse_course_url('assets_handler', course.id) # Test valid contentType for pdf asset (textbook.pdf) resp = self.client.get(url, HTTP_ACCEPT='application/json') self.assertContains(resp, "/c4x/edX/toy/asset/textbook.pdf") asset_location = AssetLocation.from_deprecated_string('/c4x/edX/toy/asset/textbook.pdf') content = contentstore().find(asset_location) # Check after import textbook.pdf has valid contentType ('application/pdf') # Note: Actual contentType for textbook.pdf in asset.json is 'text/pdf' self.assertEqual(content.content_type, 'application/pdf') class PaginationTestCase(AssetsTestCase): """ Tests the pagination of assets returned from the REST API. """ def test_json_responses(self): """ Test the ajax asset interfaces """ self.upload_asset("asset-1") self.upload_asset("asset-2") self.upload_asset("asset-3") self.upload_asset("asset-4", ".odt") # Verify valid page requests self.assert_correct_asset_response(self.url, 0, 4, 4) self.assert_correct_asset_response(self.url + "?page_size=2", 0, 2, 4) self.assert_correct_asset_response( self.url + "?page_size=2&page=1", 2, 2, 4) self.assert_correct_sort_response(self.url, 'date_added', 'asc') self.assert_correct_sort_response(self.url, 'date_added', 'desc') self.assert_correct_sort_response(self.url, 'display_name', 'asc') self.assert_correct_sort_response(self.url, 'display_name', 'desc') self.assert_correct_filter_response(self.url, 'asset_type', '') self.assert_correct_filter_response(self.url, 'asset_type', 'OTHER') self.assert_correct_filter_response( self.url, 'asset_type', 'Documents') # Verify querying outside the range of valid pages self.assert_correct_asset_response( self.url + "?page_size=2&page=-1", 0, 2, 4) self.assert_correct_asset_response( self.url + "?page_size=2&page=2", 2, 2, 4) self.assert_correct_asset_response( self.url + "?page_size=3&page=1", 3, 1, 4) @mock.patch('xmodule.contentstore.mongo.MongoContentStore.get_all_content_for_course') def test_mocked_filtered_response(self, mock_get_all_content_for_course): """ Test the ajax asset interfaces """ asset_key = self.course.id.make_asset_key( AssetMetadata.GENERAL_ASSET_TYPE, 'test.jpg') upload_date = datetime(2015, 1, 12, 10, 30, tzinfo=UTC) thumbnail_location = [ 'c4x', 'edX', 'toy', 'thumbnail', 'test_thumb.jpg', None] mock_get_all_content_for_course.return_value = [ [ { "asset_key": asset_key, "displayname": "test.jpg", "contentType": "image/jpg", "url": "/c4x/A/CS102/asset/test.jpg", "uploadDate": upload_date, "id": "/c4x/A/CS102/asset/test.jpg", "portable_url": "/static/test.jpg", "thumbnail": None, "thumbnail_location": thumbnail_location, "locked": None } ], 1 ] # Verify valid page requests self.assert_correct_filter_response(self.url, 'asset_type', 'OTHER') def assert_correct_asset_response(self, url, expected_start, expected_length, expected_total): """ Get from the url and ensure it contains the expected number of responses """ resp = self.client.get(url, HTTP_ACCEPT='application/json') json_response = json.loads(resp.content) assets_response = json_response['assets'] self.assertEquals(json_response['start'], expected_start) self.assertEquals(len(assets_response), expected_length) self.assertEquals(json_response['totalCount'], expected_total) def assert_correct_sort_response(self, url, sort, direction): """ Get from the url w/ a sort option and ensure items honor that sort """ resp = self.client.get( url + '?sort=' + sort + '&direction=' + direction, HTTP_ACCEPT='application/json') json_response = json.loads(resp.content) assets_response = json_response['assets'] name1 = assets_response[0][sort] name2 = assets_response[1][sort] name3 = assets_response[2][sort] if direction == 'asc': self.assertLessEqual(name1, name2) self.assertLessEqual(name2, name3) else: self.assertGreaterEqual(name1, name2) self.assertGreaterEqual(name2, name3) def assert_correct_filter_response(self, url, filter_type, filter_value): """ Get from the url w/ a filter option and ensure items honor that filter """ requested_file_types = settings.FILES_AND_UPLOAD_TYPE_FILTERS.get( filter_value, None) resp = self.client.get( url + '?' + filter_type + '=' + filter_value, HTTP_ACCEPT='application/json') json_response = json.loads(resp.content) assets_response = json_response['assets'] if filter_value is not '': content_types = [asset['content_type'].lower() for asset in assets_response] if filter_value is 'OTHER': all_file_type_extensions = [] for file_type in settings.FILES_AND_UPLOAD_TYPE_FILTERS: all_file_type_extensions.extend(file_type) for content_type in content_types: self.assertNotIn(content_type, all_file_type_extensions) else: for content_type in content_types: self.assertIn(content_type, requested_file_types) @ddt class UploadTestCase(AssetsTestCase): """ Unit tests for uploading a file """ def setUp(self): super(UploadTestCase, self).setUp() self.url = reverse_course_url('assets_handler', self.course.id) def test_happy_path(self): resp = self.upload_asset() self.assertEquals(resp.status_code, 200) def test_no_file(self): resp = self.client.post(self.url, {"name": "file.txt"}, "application/json") self.assertEquals(resp.status_code, 400) @data( (int(MAX_FILE_SIZE / 2.0), "small.file.test", 200), (MAX_FILE_SIZE, "justequals.file.test", 200), (MAX_FILE_SIZE + 90, "large.file.test", 413), ) @mock.patch('contentstore.views.assets.get_file_size') def test_file_size(self, case, get_file_size): max_file_size, name, status_code = case get_file_size.return_value = max_file_size f = self.get_sample_asset(name=name) resp = self.client.post(self.url, { "name": name, "file": f }) self.assertEquals(resp.status_code, status_code) class DownloadTestCase(AssetsTestCase): """ Unit tests for downloading a file. """ def setUp(self): super(DownloadTestCase, self).setUp() self.url = reverse_course_url('assets_handler', self.course.id) # First, upload something. self.asset_name = 'download_test' resp = self.upload_asset(self.asset_name) self.assertEquals(resp.status_code, 200) self.uploaded_url = json.loads(resp.content)['asset']['url'] def test_download(self): # Now, download it. resp = self.client.get(self.uploaded_url, HTTP_ACCEPT='text/html') self.assertEquals(resp.status_code, 200) self.assertEquals(resp.content, self.asset_name) def test_download_not_found_throw(self): url = self.uploaded_url.replace(self.asset_name, 'not_the_asset_name') resp = self.client.get(url, HTTP_ACCEPT='text/html') self.assertEquals(resp.status_code, 404) def test_metadata_found_in_modulestore(self): # Insert asset metadata into the modulestore (with no accompanying asset). asset_key = self.course.id.make_asset_key(AssetMetadata.GENERAL_ASSET_TYPE, 'pic1.jpg') asset_md = AssetMetadata(asset_key, { 'internal_name': 'EKMND332DDBK', 'basename': 'pix/archive', 'locked': False, 'curr_version': '14', 'prev_version': '13' }) modulestore().save_asset_metadata(asset_md, 15) # Get the asset metadata and have it be found in the modulestore. # Currently, no asset metadata should be found in the modulestore. The code is not yet storing it there. # If asset metadata *is* found there, an exception is raised. This test ensures the exception is indeed raised. # THIS IS TEMPORARY. Soon, asset metadata *will* be stored in the modulestore. with self.assertRaises((AssetMetadataFoundTemporary, NameError)): self.client.get(unicode(asset_key), HTTP_ACCEPT='text/html') class AssetToJsonTestCase(AssetsTestCase): """ Unit test for transforming asset information into something we can send out to the client via JSON. """ @override_settings(LMS_BASE="lms_base_url") def test_basic(self): upload_date = datetime(2013, 6, 1, 10, 30, tzinfo=UTC) content_type = 'image/jpg' course_key = SlashSeparatedCourseKey('org', 'class', 'run') location = course_key.make_asset_key('asset', 'my_file_name.jpg') thumbnail_location = course_key.make_asset_key('thumbnail', 'my_file_name_thumb.jpg') # pylint: disable=protected-access output = assets._get_asset_json("my_file", content_type, upload_date, location, thumbnail_location, True) self.assertEquals(output["display_name"], "my_file") self.assertEquals(output["date_added"], "Jun 01, 2013 at 10:30 UTC") self.assertEquals(output["url"], "/c4x/org/class/asset/my_file_name.jpg") self.assertEquals(output["external_url"], "lms_base_url/c4x/org/class/asset/my_file_name.jpg") self.assertEquals(output["portable_url"], "/static/my_file_name.jpg") self.assertEquals(output["thumbnail"], "/c4x/org/class/thumbnail/my_file_name_thumb.jpg") self.assertEquals(output["id"], unicode(location)) self.assertEquals(output['locked'], True) output = assets._get_asset_json("name", content_type, upload_date, location, None, False) self.assertIsNone(output["thumbnail"]) class LockAssetTestCase(AssetsTestCase): """ Unit test for locking and unlocking an asset. """ def test_locking(self): """ Tests a simple locking and unlocking of an asset in the toy course. """ def verify_asset_locked_state(locked): """ Helper method to verify lock state in the contentstore """ asset_location = StaticContent.get_location_from_path('/c4x/edX/toy/asset/sample_static.txt') content = contentstore().find(asset_location) self.assertEqual(content.locked, locked) def post_asset_update(lock, course): """ Helper method for posting asset update. """ content_type = 'application/txt' upload_date = datetime(2013, 6, 1, 10, 30, tzinfo=UTC) asset_location = course.id.make_asset_key('asset', 'sample_static.txt') url = reverse_course_url('assets_handler', course.id, kwargs={'asset_key_string': unicode(asset_location)}) resp = self.client.post( url, # pylint: disable=protected-access json.dumps(assets._get_asset_json( "sample_static.txt", content_type, upload_date, asset_location, None, lock)), "application/json" ) self.assertEqual(resp.status_code, 201) return json.loads(resp.content) # Load the toy course. module_store = modulestore() course_items = import_course_from_xml( module_store, self.user.id, TEST_DATA_DIR, ['toy'], static_content_store=contentstore(), verbose=True ) course = course_items[0] verify_asset_locked_state(False) # Lock the asset resp_asset = post_asset_update(True, course) self.assertTrue(resp_asset['locked']) verify_asset_locked_state(True) # Unlock the asset resp_asset = post_asset_update(False, course) self.assertFalse(resp_asset['locked']) verify_asset_locked_state(False)
beni55/edx-platform
cms/djangoapps/contentstore/views/tests/test_assets.py
Python
agpl-3.0
15,204
# Copyright 2013 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import socket import ssl import urllib2 import mock from oslo_config import cfg import testtools import webob import webob.exc from neutron.common import exceptions as exception from neutron.tests import base from neutron import wsgi CONF = cfg.CONF TEST_VAR_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'var')) def open_no_proxy(*args, **kwargs): # NOTE(jamespage): # Deal with more secure certification chain verficiation # introduced in python 2.7.9 under PEP-0476 # https://github.com/python/peps/blob/master/pep-0476.txt if hasattr(ssl, "_create_unverified_context"): opener = urllib2.build_opener( urllib2.ProxyHandler({}), urllib2.HTTPSHandler(context=ssl._create_unverified_context()) ) else: opener = urllib2.build_opener(urllib2.ProxyHandler({})) return opener.open(*args, **kwargs) class TestWorkerService(base.BaseTestCase): """WorkerService tests.""" @mock.patch('neutron.db.api') def test_start_withoutdb_call(self, apimock): _service = mock.Mock() _service.pool = mock.Mock() _service.pool.spawn = mock.Mock() _service.pool.spawn.return_value = None _app = mock.Mock() cfg.CONF.set_override("connection", "", "database") workerservice = wsgi.WorkerService(_service, _app) workerservice.start() self.assertFalse(apimock.get_engine.called) class TestWSGIServer(base.BaseTestCase): """WSGI server tests.""" def test_start_random_port(self): server = wsgi.Server("test_random_port") server.start(None, 0, host="127.0.0.1") self.assertNotEqual(0, server.port) server.stop() server.wait() @mock.patch('neutron.openstack.common.service.ProcessLauncher') def test_start_multiple_workers(self, ProcessLauncher): launcher = ProcessLauncher.return_value server = wsgi.Server("test_multiple_processes") server.start(None, 0, host="127.0.0.1", workers=2) launcher.launch_service.assert_called_once_with(mock.ANY, workers=2) server.stop() launcher.stop.assert_called_once_with() server.wait() launcher.wait.assert_called_once_with() def test_start_random_port_with_ipv6(self): server = wsgi.Server("test_random_port") server.start(None, 0, host="::1") self.assertEqual("::1", server.host) self.assertNotEqual(0, server.port) server.stop() server.wait() def test_ipv6_listen_called_with_scope(self): server = wsgi.Server("test_app") with mock.patch.object(wsgi.eventlet, 'listen') as mock_listen: with mock.patch.object(socket, 'getaddrinfo') as mock_get_addr: mock_get_addr.return_value = [ (socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP, '', ('fe80::204:acff:fe96:da87%eth0', 1234, 0, 2)) ] with mock.patch.object(server, 'pool') as mock_pool: server.start(None, 1234, host="fe80::204:acff:fe96:da87%eth0") mock_get_addr.assert_called_once_with( "fe80::204:acff:fe96:da87%eth0", 1234, socket.AF_UNSPEC, socket.SOCK_STREAM ) mock_listen.assert_called_once_with( ('fe80::204:acff:fe96:da87%eth0', 1234, 0, 2), family=socket.AF_INET6, backlog=cfg.CONF.backlog ) mock_pool.spawn.assert_has_calls([ mock.call( server._run, None, mock_listen.return_value) ]) def test_app(self): greetings = 'Hello, World!!!' def hello_world(env, start_response): if env['PATH_INFO'] != '/': start_response('404 Not Found', [('Content-Type', 'text/plain')]) return ['Not Found\r\n'] start_response('200 OK', [('Content-Type', 'text/plain')]) return [greetings] server = wsgi.Server("test_app") server.start(hello_world, 0, host="127.0.0.1") response = open_no_proxy('http://127.0.0.1:%d/' % server.port) self.assertEqual(greetings, response.read()) server.stop() @mock.patch.object(wsgi, 'eventlet') @mock.patch.object(wsgi, 'loggers') def test__run(self, logging_mock, eventlet_mock): server = wsgi.Server('test') server._run("app", "socket") eventlet_mock.wsgi.server.assert_called_once_with( 'socket', 'app', max_size=server.num_threads, log=mock.ANY, keepalive=CONF.wsgi_keep_alive, socket_timeout=server.client_socket_timeout ) self.assertTrue(len(logging_mock.mock_calls)) class SerializerTest(base.BaseTestCase): def test_serialize_unknown_content_type(self): """Verify that exception InvalidContentType is raised.""" input_dict = {'servers': {'test': 'pass'}} content_type = 'application/unknown' serializer = wsgi.Serializer() self.assertRaises( exception.InvalidContentType, serializer.serialize, input_dict, content_type) def test_get_deserialize_handler_unknown_content_type(self): """Verify that exception InvalidContentType is raised.""" content_type = 'application/unknown' serializer = wsgi.Serializer() self.assertRaises( exception.InvalidContentType, serializer.get_deserialize_handler, content_type) def test_serialize_content_type_json(self): """Test serialize with content type json.""" input_data = {'servers': ['test=pass']} content_type = 'application/json' serializer = wsgi.Serializer() result = serializer.serialize(input_data, content_type) self.assertEqual('{"servers": ["test=pass"]}', result) def test_deserialize_raise_bad_request(self): """Test serialize verifies that exception is raises.""" content_type = 'application/unknown' data_string = 'test' serializer = wsgi.Serializer() self.assertRaises( webob.exc.HTTPBadRequest, serializer.deserialize, data_string, content_type) def test_deserialize_json_content_type(self): """Test Serializer.deserialize with content type json.""" content_type = 'application/json' data_string = '{"servers": ["test=pass"]}' serializer = wsgi.Serializer() result = serializer.deserialize(data_string, content_type) self.assertEqual({'body': {u'servers': [u'test=pass']}}, result) class RequestDeserializerTest(testtools.TestCase): def setUp(self): super(RequestDeserializerTest, self).setUp() class JSONDeserializer(object): def deserialize(self, data, action='default'): return 'pew_json' self.body_deserializers = {'application/json': JSONDeserializer()} self.deserializer = wsgi.RequestDeserializer(self.body_deserializers) def test_get_deserializer(self): """Test RequestDeserializer.get_body_deserializer.""" expected_json_serializer = self.deserializer.get_body_deserializer( 'application/json') self.assertEqual( expected_json_serializer, self.body_deserializers['application/json']) def test_get_expected_content_type(self): """Test RequestDeserializer.get_expected_content_type.""" request = wsgi.Request.blank('/') request.headers['Accept'] = 'application/json' self.assertEqual('application/json', self.deserializer.get_expected_content_type(request)) def test_get_action_args(self): """Test RequestDeserializer.get_action_args.""" env = { 'wsgiorg.routing_args': [None, { 'controller': None, 'format': None, 'action': 'update', 'id': 12}]} expected = {'action': 'update', 'id': 12} self.assertEqual(expected, self.deserializer.get_action_args(env)) def test_deserialize(self): """Test RequestDeserializer.deserialize.""" with mock.patch.object( self.deserializer, 'get_action_args') as mock_method: mock_method.return_value = {'action': 'create'} request = wsgi.Request.blank('/') request.headers['Accept'] = 'application/json' deserialized = self.deserializer.deserialize(request) expected = ('create', {}, 'application/json') self.assertEqual(expected, deserialized) def test_get_body_deserializer_unknown_content_type(self): """Verify that exception InvalidContentType is raised.""" content_type = 'application/unknown' deserializer = wsgi.RequestDeserializer() self.assertRaises( exception.InvalidContentType, deserializer.get_body_deserializer, content_type) class ResponseSerializerTest(testtools.TestCase): def setUp(self): super(ResponseSerializerTest, self).setUp() class JSONSerializer(object): def serialize(self, data, action='default'): return 'pew_json' class HeadersSerializer(object): def serialize(self, response, data, action): response.status_int = 404 self.body_serializers = {'application/json': JSONSerializer()} self.serializer = wsgi.ResponseSerializer( self.body_serializers, HeadersSerializer()) def test_serialize_unknown_content_type(self): """Verify that exception InvalidContentType is raised.""" self.assertRaises( exception.InvalidContentType, self.serializer.serialize, {}, 'application/unknown') def test_get_body_serializer(self): """Verify that exception InvalidContentType is raised.""" self.assertRaises( exception.InvalidContentType, self.serializer.get_body_serializer, 'application/unknown') def test_get_serializer(self): """Test ResponseSerializer.get_body_serializer.""" content_type = 'application/json' self.assertEqual(self.body_serializers[content_type], self.serializer.get_body_serializer(content_type)) def test_serialize_json_response(self): response = self.serializer.serialize({}, 'application/json') self.assertEqual('application/json', response.headers['Content-Type']) self.assertEqual('pew_json', response.body) self.assertEqual(404, response.status_int) def test_serialize_response_None(self): response = self.serializer.serialize( None, 'application/json') self.assertEqual('application/json', response.headers['Content-Type']) self.assertEqual('', response.body) self.assertEqual(404, response.status_int) class RequestTest(base.BaseTestCase): def test_content_type_missing(self): request = wsgi.Request.blank('/tests/123', method='POST') request.body = "<body />" self.assertIsNone(request.get_content_type()) def test_content_type_unsupported(self): request = wsgi.Request.blank('/tests/123', method='POST') request.headers["Content-Type"] = "text/html" request.body = "fake<br />" self.assertIsNone(request.get_content_type()) def test_content_type_with_charset(self): request = wsgi.Request.blank('/tests/123') request.headers["Content-Type"] = "application/json; charset=UTF-8" result = request.get_content_type() self.assertEqual("application/json", result) def test_content_type_with_given_content_types(self): request = wsgi.Request.blank('/tests/123') request.headers["Content-Type"] = "application/new-type;" self.assertIsNone(request.get_content_type()) def test_content_type_from_accept(self): request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = "application/json" result = request.best_match_content_type() self.assertEqual("application/json", result) request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = ("application/json; q=0.3") result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_from_query_extension(self): request = wsgi.Request.blank('/tests/123.json') result = request.best_match_content_type() self.assertEqual("application/json", result) request = wsgi.Request.blank('/tests/123.invalid') result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_accept_and_query_extension(self): request = wsgi.Request.blank('/tests/123.json') request.headers["Accept"] = "application/json" result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_accept_default(self): request = wsgi.Request.blank('/tests/123.unsupported') request.headers["Accept"] = "application/unsupported1" result = request.best_match_content_type() self.assertEqual("application/json", result) def test_content_type_accept_with_given_content_types(self): request = wsgi.Request.blank('/tests/123') request.headers["Accept"] = "application/new_type" result = request.best_match_content_type() self.assertEqual("application/json", result) class ActionDispatcherTest(base.BaseTestCase): def test_dispatch(self): """Test ActionDispatcher.dispatch.""" serializer = wsgi.ActionDispatcher() serializer.create = lambda x: x self.assertEqual('pants', serializer.dispatch('pants', action='create')) def test_dispatch_action_None(self): """Test ActionDispatcher.dispatch with none action.""" serializer = wsgi.ActionDispatcher() serializer.create = lambda x: x + ' pants' serializer.default = lambda x: x + ' trousers' self.assertEqual('Two trousers', serializer.dispatch('Two', action=None)) def test_dispatch_default(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: x + ' pants' serializer.default = lambda x: x + ' trousers' self.assertEqual('Two trousers', serializer.dispatch('Two', action='update')) class ResponseHeadersSerializerTest(base.BaseTestCase): def test_default(self): serializer = wsgi.ResponseHeaderSerializer() response = webob.Response() serializer.serialize(response, {'v': '123'}, 'fake') self.assertEqual(200, response.status_int) def test_custom(self): class Serializer(wsgi.ResponseHeaderSerializer): def update(self, response, data): response.status_int = 404 response.headers['X-Custom-Header'] = data['v'] serializer = Serializer() response = webob.Response() serializer.serialize(response, {'v': '123'}, 'update') self.assertEqual(404, response.status_int) self.assertEqual('123', response.headers['X-Custom-Header']) class DictSerializerTest(base.BaseTestCase): def test_dispatch_default(self): serializer = wsgi.DictSerializer() self.assertEqual('', serializer.serialize({}, 'NonExistentAction')) class JSONDictSerializerTest(base.BaseTestCase): def test_json(self): input_dict = dict(servers=dict(a=(2, 3))) expected_json = '{"servers":{"a":[2,3]}}' serializer = wsgi.JSONDictSerializer() result = serializer.serialize(input_dict) result = result.replace('\n', '').replace(' ', '') self.assertEqual(expected_json, result) def test_json_with_utf8(self): input_dict = dict(servers=dict(a=(2, '\xe7\xbd\x91\xe7\xbb\x9c'))) expected_json = '{"servers":{"a":[2,"\\u7f51\\u7edc"]}}' serializer = wsgi.JSONDictSerializer() result = serializer.serialize(input_dict) result = result.replace('\n', '').replace(' ', '') self.assertEqual(expected_json, result) def test_json_with_unicode(self): input_dict = dict(servers=dict(a=(2, u'\u7f51\u7edc'))) expected_json = '{"servers":{"a":[2,"\\u7f51\\u7edc"]}}' serializer = wsgi.JSONDictSerializer() result = serializer.serialize(input_dict) result = result.replace('\n', '').replace(' ', '') self.assertEqual(expected_json, result) class TextDeserializerTest(base.BaseTestCase): def test_dispatch_default(self): deserializer = wsgi.TextDeserializer() self.assertEqual({}, deserializer.deserialize({}, 'update')) class JSONDeserializerTest(base.BaseTestCase): def test_json(self): data = """{"a": { "a1": "1", "a2": "2", "bs": ["1", "2", "3", {"c": {"c1": "1"}}], "d": {"e": "1"}, "f": "1"}}""" as_dict = { 'body': { 'a': { 'a1': '1', 'a2': '2', 'bs': ['1', '2', '3', {'c': {'c1': '1'}}], 'd': {'e': '1'}, 'f': '1'}}} deserializer = wsgi.JSONDeserializer() self.assertEqual(as_dict, deserializer.deserialize(data)) def test_default_raise_Malformed_Exception(self): """Test JsonDeserializer.default. Test verifies JsonDeserializer.default raises exception MalformedRequestBody correctly. """ data_string = "" deserializer = wsgi.JSONDeserializer() self.assertRaises( exception.MalformedRequestBody, deserializer.default, data_string) def test_json_with_utf8(self): data = '{"a": "\xe7\xbd\x91\xe7\xbb\x9c"}' as_dict = {'body': {'a': u'\u7f51\u7edc'}} deserializer = wsgi.JSONDeserializer() self.assertEqual(as_dict, deserializer.deserialize(data)) def test_json_with_unicode(self): data = '{"a": "\u7f51\u7edc"}' as_dict = {'body': {'a': u'\u7f51\u7edc'}} deserializer = wsgi.JSONDeserializer() self.assertEqual(as_dict, deserializer.deserialize(data)) class RequestHeadersDeserializerTest(base.BaseTestCase): def test_default(self): deserializer = wsgi.RequestHeadersDeserializer() req = wsgi.Request.blank('/') self.assertEqual({}, deserializer.deserialize(req, 'nonExistent')) def test_custom(self): class Deserializer(wsgi.RequestHeadersDeserializer): def update(self, request): return {'a': request.headers['X-Custom-Header']} deserializer = Deserializer() req = wsgi.Request.blank('/') req.headers['X-Custom-Header'] = 'b' self.assertEqual({'a': 'b'}, deserializer.deserialize(req, 'update')) class ResourceTest(base.BaseTestCase): @staticmethod def my_fault_body_function(): return 'off' class Controller(object): def index(self, request, index=None): return index def test_dispatch(self): resource = wsgi.Resource(self.Controller(), self.my_fault_body_function) actual = resource.dispatch( resource.controller, 'index', action_args={'index': 'off'}) expected = 'off' self.assertEqual(expected, actual) def test_dispatch_unknown_controller_action(self): resource = wsgi.Resource(self.Controller(), self.my_fault_body_function) self.assertRaises( AttributeError, resource.dispatch, resource.controller, 'create', {}) def test_malformed_request_body_throws_bad_request(self): resource = wsgi.Resource(None, self.my_fault_body_function) request = wsgi.Request.blank( "/", body="{mal:formed", method='POST', headers={'Content-Type': "application/json"}) response = resource(request) self.assertEqual(400, response.status_int) def test_wrong_content_type_throws_unsupported_media_type_error(self): resource = wsgi.Resource(None, self.my_fault_body_function) request = wsgi.Request.blank( "/", body="{some:json}", method='POST', headers={'Content-Type': "xxx"}) response = resource(request) self.assertEqual(400, response.status_int) def test_wrong_content_type_server_error(self): resource = wsgi.Resource(None, self.my_fault_body_function) request = wsgi.Request.blank( "/", method='POST', headers={'Content-Type': "unknow"}) response = resource(request) self.assertEqual(500, response.status_int) def test_call_resource_class_bad_request(self): class FakeRequest(object): def __init__(self): self.url = 'http://where.no' self.environ = 'environ' self.body = 'body' def method(self): pass def best_match_content_type(self): return 'best_match_content_type' resource = wsgi.Resource(self.Controller(), self.my_fault_body_function) request = FakeRequest() result = resource(request) self.assertEqual(400, result.status_int) def test_type_error(self): resource = wsgi.Resource(self.Controller(), self.my_fault_body_function) request = wsgi.Request.blank( "/", method='POST', headers={'Content-Type': "json"}) response = resource.dispatch( request, action='index', action_args='test') self.assertEqual(400, response.status_int) def test_call_resource_class_internal_error(self): class FakeRequest(object): def __init__(self): self.url = 'http://where.no' self.environ = 'environ' self.body = '{"Content-Type": "json"}' def method(self): pass def best_match_content_type(self): return 'application/json' resource = wsgi.Resource(self.Controller(), self.my_fault_body_function) request = FakeRequest() result = resource(request) self.assertEqual(500, result.status_int) class MiddlewareTest(base.BaseTestCase): def test_process_response(self): def application(environ, start_response): response = 'Success' return response response = application('test', 'fake') result = wsgi.Middleware(application).process_response(response) self.assertEqual('Success', result) class FaultTest(base.BaseTestCase): def test_call_fault(self): class MyException(object): status_int = 415 explanation = 'test' my_exceptions = MyException() my_fault = wsgi.Fault(exception=my_exceptions) request = wsgi.Request.blank( "/", method='POST', headers={'Content-Type': "unknow"}) response = my_fault(request) self.assertEqual(415, response.status_int) class TestWSGIServerWithSSL(base.BaseTestCase): """WSGI server tests.""" def test_app_using_ssl(self): CONF.set_default('use_ssl', True) CONF.set_default("ssl_cert_file", os.path.join(TEST_VAR_DIR, 'certificate.crt')) CONF.set_default("ssl_key_file", os.path.join(TEST_VAR_DIR, 'privatekey.key')) greetings = 'Hello, World!!!' @webob.dec.wsgify def hello_world(req): return greetings server = wsgi.Server("test_app") server.start(hello_world, 0, host="127.0.0.1") response = open_no_proxy('https://127.0.0.1:%d/' % server.port) self.assertEqual(greetings, response.read()) server.stop() def test_app_using_ssl_combined_cert_and_key(self): CONF.set_default('use_ssl', True) CONF.set_default("ssl_cert_file", os.path.join(TEST_VAR_DIR, 'certandkey.pem')) greetings = 'Hello, World!!!' @webob.dec.wsgify def hello_world(req): return greetings server = wsgi.Server("test_app") server.start(hello_world, 0, host="127.0.0.1") response = open_no_proxy('https://127.0.0.1:%d/' % server.port) self.assertEqual(greetings, response.read()) server.stop() def test_app_using_ipv6_and_ssl(self): CONF.set_default('use_ssl', True) CONF.set_default("ssl_cert_file", os.path.join(TEST_VAR_DIR, 'certificate.crt')) CONF.set_default("ssl_key_file", os.path.join(TEST_VAR_DIR, 'privatekey.key')) greetings = 'Hello, World!!!' @webob.dec.wsgify def hello_world(req): return greetings server = wsgi.Server("test_app") server.start(hello_world, 0, host="::1") response = open_no_proxy('https://[::1]:%d/' % server.port) self.assertEqual(greetings, response.read()) server.stop()
pnavarro/neutron
neutron/tests/unit/test_wsgi.py
Python
apache-2.0
26,842
# Copyright 2014, Rackspace, US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.conf import urls urlpatterns = [] # to register the URLs for your API endpoints, decorate the view class with # @register below, and the import the endpoint module in the # rest_api/__init__.py module def register(view): """Register API views to respond to a regex pattern. ``url_regex`` on a wrapped view class is used as the regex pattern. The view should be a standard Django class-based view implementing an as_view() method. The url_regex attribute of the view should be a standard Django URL regex pattern. """ p = urls.url(view.url_regex, view.as_view()) urlpatterns.append(p) return view
BiznetGIO/horizon
openstack_dashboard/api/rest/urls.py
Python
apache-2.0
1,231
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for 3d convolutional operations.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections import math import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import test_util from tensorflow.python.ops import gradient_checker from tensorflow.python.ops import gradients_impl from tensorflow.python.ops import nn_ops import tensorflow.python.ops.nn_grad # pylint: disable=unused-import from tensorflow.python.platform import test def GetTestConfigs(): """Get all the valid tests configs to run. Returns: all the valid test configs as tuples of data_format and use_gpu. """ test_configs = [("NDHWC", False), ("NDHWC", True)] if test.is_gpu_available(cuda_only=True): # "NCDHW" format is only supported on CUDA. test_configs += [("NCDHW", True)] return test_configs class Conv3DTest(test.TestCase): def _DtypesToTest(self, use_gpu): if use_gpu: if not test_util.CudaSupportsHalfMatMulAndConv(): return [dtypes.float64, dtypes.float32] else: # It is important that float32 comes before float16 here, # as we will be using its gradients as reference for fp16 gradients. return [dtypes.float64, dtypes.float32, dtypes.float16] else: return [dtypes.float64, dtypes.float32, dtypes.float16] def _SetupValuesForDevice(self, tensor_in_sizes, filter_in_sizes, stride, padding, data_format, dtype, use_gpu): total_size_tensor = 1 total_size_filter = 1 for s in tensor_in_sizes: total_size_tensor *= s for s in filter_in_sizes: total_size_filter *= s # Initializes the input tensor with array containing numbers from 0 to 1. # We keep the input tensor values fairly small to avoid overflowing float16 # during the conv3d. x1 = [f * 1.0 / total_size_tensor for f in range(1, total_size_tensor + 1)] x2 = [f * 1.0 / total_size_filter for f in range(1, total_size_filter + 1)] with self.cached_session(use_gpu=use_gpu): t1 = constant_op.constant(x1, shape=tensor_in_sizes, dtype=dtype) t2 = constant_op.constant(x2, shape=filter_in_sizes, dtype=dtype) if isinstance(stride, collections.Iterable): strides = [1] + list(stride) + [1] else: strides = [1, stride, stride, stride, 1] if data_format == "NCDHW": t1 = test_util.NHWCToNCHW(t1) strides = test_util.NHWCToNCHW(strides) conv = nn_ops.conv3d(t1, t2, strides, padding=padding, data_format=data_format) if data_format == "NCDHW": conv = test_util.NCHWToNHWC(conv) return conv def _VerifyValues(self, tensor_in_sizes, filter_in_sizes, stride, padding, expected): results = [] for data_format, use_gpu in GetTestConfigs(): for dtype in self._DtypesToTest(use_gpu): result = self._SetupValuesForDevice( tensor_in_sizes, filter_in_sizes, stride, padding, data_format, dtype, use_gpu=use_gpu) results.append(result) with self.cached_session() as sess: values = self.evaluate(results) for value in values: print("expected = ", expected) print("actual = ", value) tol = 1e-6 if value.dtype == np.float16: tol = 1e-3 self.assertAllClose(expected, value.flatten(), atol=tol, rtol=tol) def _ComputeReferenceDilatedConv(self, tensor_in_sizes, filter_in_sizes, stride, dilation, padding, data_format, use_gpu): total_size_tensor = 1 total_size_filter = 1 for s in tensor_in_sizes: total_size_tensor *= s for s in filter_in_sizes: total_size_filter *= s # Initializes the input tensor with array containing incrementing # numbers from 1. x1 = [f * 1.0 for f in range(1, total_size_tensor + 1)] x2 = [f * 1.0 for f in range(1, total_size_filter + 1)] with self.cached_session(use_gpu=use_gpu): t1 = constant_op.constant(x1, shape=tensor_in_sizes) t2 = constant_op.constant(x2, shape=filter_in_sizes) if isinstance(stride, collections.Iterable): strides = list(stride) else: strides = [stride, stride, stride] if data_format == "NCDHW": t1 = test_util.NHWCToNCHW(t1) full_strides = [1, 1] + strides full_dilation = [1, 1] + dilation else: full_strides = [1] + strides + [1] full_dilation = [1] + dilation + [1] expected = nn_ops.convolution( t1, t2, padding=padding, strides=strides, dilation_rate=dilation, data_format=data_format) computed = nn_ops.conv3d( t1, t2, strides=full_strides, dilations=full_dilation, padding=padding, data_format=data_format) if data_format == "NCDHW": expected = test_util.NCHWToNHWC(expected) computed = test_util.NCHWToNHWC(computed) return expected, computed def _VerifyDilatedConvValues(self, tensor_in_sizes, filter_in_sizes, stride, padding, dilations): expected_results = [] computed_results = [] default_dilations = ( dilations[0] == 1 and dilations[1] == 1 and dilations[2] == 1) for data_format, use_gpu in GetTestConfigs(): # If any dilation rate is larger than 1, only do test on the GPU # because we currently do not have a CPU implementation for arbitrary # dilation rates. if default_dilations or use_gpu: expected, computed = self._ComputeReferenceDilatedConv( tensor_in_sizes, filter_in_sizes, stride, dilations, padding, data_format, use_gpu) expected_results.append(expected) computed_results.append(computed) tolerance = 1e-2 if use_gpu else 1e-5 with self.cached_session() as sess: expected_values = self.evaluate(expected_results) computed_values = self.evaluate(computed_results) for e_value, c_value in zip(expected_values, computed_values): print("expected = ", e_value) print("actual = ", c_value) self.assertAllClose( e_value.flatten(), c_value.flatten(), atol=tolerance, rtol=1e-6) def testConv3D1x1x1Filter(self): expected_output = [ 0.18518519, 0.22222222, 0.25925926, 0.40740741, 0.5, 0.59259259, 0.62962963, 0.77777778, 0.92592593, 0.85185185, 1.05555556, 1.25925926, 1.07407407, 1.33333333, 1.59259259, 1.2962963, 1.61111111, 1.92592593 ] # These are equivalent to the Conv2D1x1 case. self._VerifyValues( tensor_in_sizes=[1, 2, 3, 1, 3], filter_in_sizes=[1, 1, 1, 3, 3], stride=1, padding="VALID", expected=expected_output) self._VerifyValues( tensor_in_sizes=[1, 2, 1, 3, 3], filter_in_sizes=[1, 1, 1, 3, 3], stride=1, padding="VALID", expected=expected_output) self._VerifyValues( tensor_in_sizes=[1, 1, 2, 3, 3], filter_in_sizes=[1, 1, 1, 3, 3], stride=1, padding="VALID", expected=expected_output) def testConv3D1x1x1Filter2x1x1Dilation(self): if test.is_gpu_available(cuda_only=True): self._VerifyDilatedConvValues( tensor_in_sizes=[1, 3, 6, 1, 1], filter_in_sizes=[1, 1, 1, 1, 1], stride=1, padding="VALID", dilations=[2, 1, 1]) # Expected values computed using scipy's correlate function. def testConv3D2x2x2Filter(self): expected_output = [ 3.77199074, 3.85069444, 3.92939815, 4.2650463, 4.35763889, 4.45023148, 6.73032407, 6.89236111, 7.05439815, 7.22337963, 7.39930556, 7.57523148, 9.68865741, 9.93402778, 10.17939815, 10.18171296, 10.44097222, 10.70023148 ] # expected_shape = [1, 3, 1, 2, 5] self._VerifyValues( tensor_in_sizes=[1, 4, 2, 3, 3], # b, z, y, x, fin filter_in_sizes=[2, 2, 2, 3, 3], # z, y, x, fin, fout stride=1, padding="VALID", expected=expected_output) def testConv3D2x2x2Filter1x2x1Dilation(self): if test.is_gpu_available(cuda_only=True): self._VerifyDilatedConvValues( tensor_in_sizes=[1, 4, 6, 3, 1], filter_in_sizes=[2, 2, 2, 1, 1], stride=1, padding="VALID", dilations=[1, 2, 1]) def testConv3DStrides(self): expected_output = [ 0.06071429, 0.08988095, 0.10238095, 0.11488095, 0.12738095, 0.13988095, 0.08452381, 0.26071429, 0.35238095, 0.36488095, 0.37738095, 0.38988095, 0.40238095, 0.23452381, 0.46071429, 0.61488095, 0.62738095, 0.63988095, 0.65238095, 0.66488095, 0.38452381, 1.12738095, 1.48988095, 1.50238095, 1.51488095, 1.52738095, 1.53988095, 0.88452381, 1.32738095, 1.75238095, 1.76488095, 1.77738095, 1.78988095, 1.80238095, 1.03452381, 1.52738095, 2.01488095, 2.02738095, 2.03988095, 2.05238095, 2.06488095, 1.18452381, 2.19404762, 2.88988095, 2.90238095, 2.91488095, 2.92738095, 2.93988095, 1.68452381, 2.39404762, 3.15238095, 3.16488095, 3.17738095, 3.18988095, 3.20238095, 1.83452381, 2.59404762, 3.41488095, 3.42738095, 3.43988095, 3.45238095, 3.46488095, 1.98452381 ] self._VerifyValues( tensor_in_sizes=[1, 5, 8, 7, 1], filter_in_sizes=[1, 2, 3, 1, 1], stride=[2, 3, 1], # different stride for each spatial dimension padding="SAME", expected=expected_output) def testConv3D2x2x2FilterStride2(self): expected_output = [ 3.77199074, 3.85069444, 3.92939815, 9.68865741, 9.93402778, 10.17939815 ] self._VerifyValues( tensor_in_sizes=[1, 4, 2, 3, 3], filter_in_sizes=[2, 2, 2, 3, 3], stride=2, padding="VALID", expected=expected_output) def testConv3DStride3(self): expected_output = [ 1.51140873, 1.57167659, 1.63194444, 1.56349206, 1.62673611, 1.68998016, 1.6155754, 1.68179563, 1.74801587, 1.9280754, 2.01215278, 2.09623016, 1.98015873, 2.0672123, 2.15426587, 2.03224206, 2.12227183, 2.21230159, 4.4280754, 4.65500992, 4.88194444, 4.48015873, 4.71006944, 4.93998016, 4.53224206, 4.76512897, 4.99801587, 4.84474206, 5.09548611, 5.34623016, 4.8968254, 5.15054563, 5.40426587, 4.94890873, 5.20560516, 5.46230159 ] self._VerifyValues( tensor_in_sizes=[1, 6, 7, 8, 2], filter_in_sizes=[3, 2, 1, 2, 3], stride=3, padding="VALID", expected=expected_output) def testConv3D2x2x2FilterStride2Same(self): expected_output = [ 3.77199074, 3.85069444, 3.92939815, 2.0162037, 2.06597222, 2.11574074, 9.68865741, 9.93402778, 10.17939815, 4.59953704, 4.73263889, 4.86574074 ] self._VerifyValues( tensor_in_sizes=[1, 4, 2, 3, 3], filter_in_sizes=[2, 2, 2, 3, 3], stride=2, padding="SAME", expected=expected_output) def testKernelSmallerThanStride(self): expected_output = [ 0.03703704, 0.11111111, 0.25925926, 0.33333333, 0.7037037, 0.77777778, 0.92592593, 1. ] self._VerifyValues( tensor_in_sizes=[1, 3, 3, 3, 1], filter_in_sizes=[1, 1, 1, 1, 1], stride=2, padding="SAME", expected=expected_output) self._VerifyValues( tensor_in_sizes=[1, 3, 3, 3, 1], filter_in_sizes=[1, 1, 1, 1, 1], stride=2, padding="VALID", expected=expected_output) expected_output = [ 0.54081633, 0.58017493, 0.28061224, 0.81632653, 0.85568513, 0.40306122, 0.41873178, 0.4340379, 0.19642857, 2.46938776, 2.50874636, 1.1377551, 2.74489796, 2.78425656, 1.26020408, 1.16873178, 1.1840379, 0.51785714, 1.09511662, 1.10604956, 0.44642857, 1.17164723, 1.18258017, 0.47704082, 0.3691691, 0.37244898, 0.125 ] self._VerifyValues( tensor_in_sizes=[1, 7, 7, 7, 1], filter_in_sizes=[2, 2, 2, 1, 1], stride=3, padding="SAME", expected=expected_output) expected_output = [ 0.540816, 0.580175, 0.816327, 0.855685, 2.469388, 2.508746, 2.744898, 2.784257 ] self._VerifyValues( tensor_in_sizes=[1, 7, 7, 7, 1], filter_in_sizes=[2, 2, 2, 1, 1], stride=3, padding="VALID", expected=expected_output) def testKernelSizeMatchesInputSize(self): self._VerifyValues( tensor_in_sizes=[1, 2, 1, 2, 1], filter_in_sizes=[2, 1, 2, 1, 2], stride=1, padding="VALID", expected=[1.5625, 1.875]) def _ConstructAndTestGradientForConfig( self, batch, input_shape, filter_shape, in_depth, out_depth, stride, padding, test_input, data_format, use_gpu): input_planes, input_rows, input_cols = input_shape filter_planes, filter_rows, filter_cols = filter_shape input_shape = [batch, input_planes, input_rows, input_cols, in_depth] filter_shape = [ filter_planes, filter_rows, filter_cols, in_depth, out_depth ] if isinstance(stride, collections.Iterable): strides = [1] + list(stride) + [1] else: strides = [1, stride, stride, stride, 1] if padding == "VALID": output_planes = int( math.ceil((input_planes - filter_planes + 1.0) / strides[1])) output_rows = int( math.ceil((input_rows - filter_rows + 1.0) / strides[2])) output_cols = int( math.ceil((input_cols - filter_cols + 1.0) / strides[3])) else: output_planes = int(math.ceil(float(input_planes) / strides[1])) output_rows = int(math.ceil(float(input_rows) / strides[2])) output_cols = int(math.ceil(float(input_cols) / strides[3])) output_shape = [batch, output_planes, output_rows, output_cols, out_depth] input_size = 1 for x in input_shape: input_size *= x filter_size = 1 for x in filter_shape: filter_size *= x input_data = [x * 1.0 / input_size for x in range(0, input_size)] filter_data = [x * 1.0 / filter_size for x in range(0, filter_size)] for data_type in self._DtypesToTest(use_gpu=use_gpu): # TODO(mjanusz): Modify gradient_checker to also provide max relative # error and synchronize the tolerance levels between the tests for forward # and backward computations. if data_type == dtypes.float64: tolerance = 1e-8 elif data_type == dtypes.float32: tolerance = 5e-3 elif data_type == dtypes.float16: tolerance = 1e-3 with self.cached_session(use_gpu=use_gpu): orig_input_tensor = constant_op.constant( input_data, shape=input_shape, dtype=data_type, name="input") filter_tensor = constant_op.constant( filter_data, shape=filter_shape, dtype=data_type, name="filter") if data_format == "NCDHW": input_tensor = test_util.NHWCToNCHW(orig_input_tensor) new_strides = test_util.NHWCToNCHW(strides) else: input_tensor = orig_input_tensor new_strides = strides conv = nn_ops.conv3d( input_tensor, filter_tensor, new_strides, padding, data_format=data_format, name="conv") if data_format == "NCDHW": conv = test_util.NCHWToNHWC(conv) self.assertEqual(conv.shape, tensor_shape.TensorShape(output_shape)) if test_input: jacob_t, jacob_n = gradient_checker.compute_gradient( orig_input_tensor, input_shape, conv, output_shape) else: jacob_t, jacob_n = gradient_checker.compute_gradient( filter_tensor, filter_shape, conv, output_shape) if data_type != dtypes.float16: reference_jacob_t = jacob_t err = np.fabs(jacob_t - jacob_n).max() else: # Compare fp16 theoretical gradients to fp32 theoretical gradients, # since fp16 numerical gradients are too imprecise. err = np.fabs(jacob_t - reference_jacob_t).max() print("conv3d gradient error = ", err) self.assertLess(err, tolerance) def ConstructAndTestGradient(self, **kwargs): for data_format, use_gpu in GetTestConfigs(): self._ConstructAndTestGradientForConfig(data_format=data_format, use_gpu=use_gpu, **kwargs) @test_util.run_deprecated_v1 def testInputGradientValidPaddingStrideOne(self): self.ConstructAndTestGradient( batch=2, input_shape=(3, 5, 4), filter_shape=(3, 3, 3), in_depth=2, out_depth=3, stride=1, padding="VALID", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientValidPaddingStrideOne(self): self.ConstructAndTestGradient( batch=4, input_shape=(4, 6, 5), filter_shape=(2, 2, 2), in_depth=2, out_depth=3, stride=1, padding="VALID", test_input=False) @test_util.run_deprecated_v1 def testInputGradientValidPaddingStrideTwo(self): self.ConstructAndTestGradient( batch=2, input_shape=(6, 3, 5), filter_shape=(3, 3, 3), in_depth=2, out_depth=3, stride=2, padding="VALID", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientValidPaddingStrideTwo(self): self.ConstructAndTestGradient( batch=2, input_shape=(7, 6, 5), filter_shape=(2, 2, 2), in_depth=2, out_depth=3, stride=2, padding="VALID", test_input=False) @test_util.run_deprecated_v1 def testInputGradientValidPaddingStrideThree(self): self.ConstructAndTestGradient( batch=2, input_shape=(3, 7, 6), filter_shape=(3, 3, 3), in_depth=2, out_depth=3, stride=3, padding="VALID", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientValidPaddingStrideThree(self): self.ConstructAndTestGradient( batch=2, input_shape=(4, 4, 7), filter_shape=(4, 4, 4), in_depth=2, out_depth=3, stride=3, padding="VALID", test_input=False) @test_util.run_deprecated_v1 def testInputGradientSamePaddingStrideOne(self): self.ConstructAndTestGradient( batch=2, input_shape=(3, 2, 2), filter_shape=(3, 2, 1), in_depth=2, out_depth=1, stride=1, padding="SAME", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientSamePaddingStrideOne(self): self.ConstructAndTestGradient( batch=2, input_shape=(3, 6, 5), filter_shape=(2, 2, 2), in_depth=2, out_depth=3, stride=1, padding="SAME", test_input=False) @test_util.run_deprecated_v1 def testInputGradientSamePaddingStrideTwo(self): self.ConstructAndTestGradient( batch=2, input_shape=(6, 3, 4), filter_shape=(3, 3, 3), in_depth=2, out_depth=3, stride=2, padding="SAME", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientSamePaddingStrideTwo(self): self.ConstructAndTestGradient( batch=4, input_shape=(7, 3, 5), filter_shape=(2, 2, 2), in_depth=2, out_depth=3, stride=2, padding="SAME", test_input=False) @test_util.run_deprecated_v1 def testInputGradientSamePaddingStrideThree(self): self.ConstructAndTestGradient( batch=2, input_shape=(9, 3, 6), filter_shape=(3, 3, 3), in_depth=2, out_depth=3, stride=3, padding="SAME", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientSamePaddingStrideThree(self): self.ConstructAndTestGradient( batch=2, input_shape=(9, 4, 7), filter_shape=(4, 4, 4), in_depth=2, out_depth=3, stride=3, padding="SAME", test_input=False) @test_util.run_deprecated_v1 def testInputGradientSamePaddingDifferentStrides(self): self.ConstructAndTestGradient( batch=1, input_shape=(5, 8, 7), filter_shape=(1, 2, 3), in_depth=2, out_depth=3, stride=[2, 3, 1], padding="SAME", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientKernelSizeMatchesInputSize(self): self.ConstructAndTestGradient( batch=2, input_shape=(5, 4, 3), filter_shape=(5, 4, 3), in_depth=2, out_depth=3, stride=1, padding="VALID", test_input=False) @test_util.run_deprecated_v1 def testInputGradientKernelSizeMatchesInputSize(self): self.ConstructAndTestGradient( batch=2, input_shape=(5, 4, 3), filter_shape=(5, 4, 3), in_depth=2, out_depth=3, stride=1, padding="VALID", test_input=True) def disabledtestFilterGradientSamePaddingDifferentStrides(self): self.ConstructAndTestGradient( batch=1, input_shape=(5, 8, 7), filter_shape=(1, 2, 3), in_depth=2, out_depth=3, stride=[2, 3, 1], padding="SAME", test_input=False) # Test the fast path in gemm_pack_rhs/mkldnn_gemm_pack, when channel # dimension is a multiple of packet size. @test_util.run_deprecated_v1 def testInputGradientValidPaddingStrideOneFastPath(self): self.ConstructAndTestGradient( batch=2, input_shape=(3, 5, 4), filter_shape=(2, 2, 2), in_depth=8, out_depth=2, stride=1, padding="VALID", test_input=True) @test_util.run_deprecated_v1 def testFilterGradientValidPaddingStrideOneFastPath(self): self.ConstructAndTestGradient( batch=2, input_shape=(4, 6, 5), filter_shape=(2, 2, 2), in_depth=8, out_depth=2, stride=1, padding="VALID", test_input=False) # Testing for backprops def _RunAndVerifyBackprop(self, input_sizes, filter_sizes, output_sizes, strides, dilations, padding, data_format, use_gpu, err, mode): total_input_size = 1 total_filter_size = 1 for s in input_sizes: total_input_size *= s for s in filter_sizes: total_filter_size *= s # Initializes the input tensor with array containing incrementing # numbers from 1. x1 = [f * 1.0 for f in range(1, total_input_size + 1)] x2 = [f * 1.0 for f in range(1, total_filter_size + 1)] default_dilations = ( dilations[0] == 1 and dilations[1] == 1 and dilations[2] == 1) # If any dilation rate is larger than 1, only do test on the GPU # because we currently do not have a CPU implementation for arbitrary # dilation rates. if default_dilations or use_gpu: with self.cached_session(use_gpu=use_gpu) as sess: if data_format == "NCDHW": input_sizes = test_util.NHWCToNCHW(input_sizes) t1 = constant_op.constant(x1, shape=input_sizes) t2 = constant_op.constant(x2, shape=filter_sizes) full_strides = [1] + strides + [1] full_dilations = [1] + dilations + [1] if data_format == "NCDHW": full_strides = test_util.NHWCToNCHW(full_strides) full_dilations = test_util.NHWCToNCHW(full_dilations) actual = nn_ops.conv3d( t1, t2, strides=full_strides, dilations=full_dilations, padding=padding, data_format=data_format) expected = nn_ops.convolution( t1, t2, padding=padding, strides=strides, dilation_rate=dilations, data_format=data_format) if data_format == "NCDHW": actual = test_util.NCHWToNHWC(actual) expected = test_util.NCHWToNHWC(expected) actual_grad = gradients_impl.gradients(actual, t1 if mode == "input" else t2)[0] expected_grad = gradients_impl.gradients(expected, t1 if mode == "input" else t2)[0] # "values" consists of two tensors for two backprops actual_value = self.evaluate(actual_grad) expected_value = self.evaluate(expected_grad) self.assertShapeEqual(actual_value, actual_grad) self.assertShapeEqual(expected_value, expected_grad) print("expected = ", expected_value) print("actual = ", actual_value) self.assertArrayNear(expected_value.flatten(), actual_value.flatten(), err) def testConv3D2x2Depth3ValidBackpropFilterStride1x1Dilation2x1(self): if test.is_gpu_available(cuda_only=True): for (data_format, use_gpu) in GetTestConfigs(): self._RunAndVerifyBackprop( input_sizes=[1, 3, 6, 1, 1], filter_sizes=[2, 2, 1, 1, 1], output_sizes=[1, 1, 5, 1, 1], strides=[1, 1, 1], dilations=[2, 1, 1], padding="VALID", data_format=data_format, use_gpu=use_gpu, err=1e-5, mode="filter") def testConv3D2x2Depth3ValidBackpropInputStride1x1Dilation2x1(self): if test.is_gpu_available(cuda_only=True): for (data_format, use_gpu) in GetTestConfigs(): self._RunAndVerifyBackprop( input_sizes=[1, 3, 6, 1, 1], filter_sizes=[2, 2, 1, 1, 1], output_sizes=[1, 1, 5, 1, 1], strides=[1, 1, 1], dilations=[2, 1, 1], padding="VALID", data_format=data_format, use_gpu=use_gpu, err=1e-5, mode="input") if __name__ == "__main__": test.main()
hfp/tensorflow-xsmm
tensorflow/python/kernel_tests/conv_ops_3d_test.py
Python
apache-2.0
27,016
# Copyright 2014-2015 Canonical Limited. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Bootstrap charm-helpers, installing its dependencies if necessary using # only standard libraries. import subprocess import sys try: import six # flake8: noqa except ImportError: if sys.version_info.major == 2: subprocess.check_call(['apt-get', 'install', '-y', 'python-six']) else: subprocess.check_call(['apt-get', 'install', '-y', 'python3-six']) import six # flake8: noqa try: import yaml # flake8: noqa except ImportError: if sys.version_info.major == 2: subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml']) else: subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml']) import yaml # flake8: noqa
CanonicalBootStack/charm-hacluster
tests/charmhelpers/__init__.py
Python
apache-2.0
1,285
# Copyright 2020 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utilities to process compresssed files.""" import contextlib import logging import os import pathlib import re import shutil import struct import tempfile import zipfile class _ApkFileManager: def __init__(self, temp_dir): self._temp_dir = pathlib.Path(temp_dir) self._subdir_by_apks_path = {} self._infolist_by_path = {} def _MapPath(self, path): # Use numbered subdirectories for uniqueness. # Suffix with basename(path) for readability. default = '-'.join( [str(len(self._subdir_by_apks_path)), os.path.basename(path)]) return self._temp_dir / self._subdir_by_apks_path.setdefault(path, default) def InfoList(self, path): """Returns zipfile.ZipFile(path).infolist().""" ret = self._infolist_by_path.get(path) if ret is None: with zipfile.ZipFile(path) as z: ret = z.infolist() self._infolist_by_path[path] = ret return ret def SplitPath(self, minimal_apks_path, split_name): """Returns the path to the apk split extracted by ExtractSplits. Args: minimal_apks_path: The .apks file that was passed to ExtractSplits(). split_name: Then name of the split. Returns: Path to the extracted .apk file. """ subdir = self._subdir_by_apks_path[minimal_apks_path] return self._temp_dir / subdir / 'splits' / f'{split_name}-master.apk' def ExtractSplits(self, minimal_apks_path): """Extracts the master splits in the given .apks file. Returns: List of split names, with "base" always appearing first. """ dest = self._MapPath(minimal_apks_path) split_names = [] logging.debug('Extracting %s', minimal_apks_path) with zipfile.ZipFile(minimal_apks_path) as z: for filename in z.namelist(): # E.g.: # splits/base-master.apk # splits/base-en.apk # splits/vr-master.apk # splits/vr-en.apk m = re.match(r'splits/(.*)-master\.apk', filename) if m: split_names.append(m.group(1)) z.extract(filename, dest) logging.debug('Extracting %s (done)', minimal_apks_path) # Make "base" comes first since that's the main chunk of work. # Also so that --abi-filter detection looks at it first. return sorted(split_names, key=lambda x: (x != 'base', x)) @contextlib.contextmanager def ApkFileManager(): """Context manager that extracts apk splits to a temp dir.""" # Cannot use tempfile.TemporaryDirectory() here because our use of # multiprocessing results in __del__ methods being called in forked processes. temp_dir = tempfile.mkdtemp(suffix='-supersize') zip_files = _ApkFileManager(temp_dir) yield zip_files shutil.rmtree(temp_dir) @contextlib.contextmanager def UnzipToTemp(zip_path, inner_path): """Extract a |inner_path| from a |zip_path| file to an auto-deleted temp file. Args: zip_path: Path to the zip file. inner_path: Path to the file within |zip_path| to extract. Yields: The path of the temp created (and auto-deleted when context exits). """ try: logging.debug('Extracting %s', inner_path) _, suffix = os.path.splitext(inner_path) # Can't use NamedTemporaryFile() because it deletes via __del__, which will # trigger in both this and the fork()'ed processes. fd, temp_file = tempfile.mkstemp(suffix=suffix) with zipfile.ZipFile(zip_path) as z: os.write(fd, z.read(inner_path)) os.close(fd) logging.debug('Extracting %s (done)', inner_path) yield temp_file finally: os.unlink(temp_file) def ReadZipInfoExtraFieldLength(zip_file, zip_info): """Reads the value of |extraLength| from |zip_info|'s local file header. |zip_info| has an |extra| field, but it's read from the central directory. Android's zipalign tool sets the extra field only in local file headers. """ # Refer to https://en.wikipedia.org/wiki/Zip_(file_format)#File_headers zip_file.fp.seek(zip_info.header_offset + 28) return struct.unpack('<H', zip_file.fp.read(2))[0] def MeasureApkSignatureBlock(zip_file): """Measures the size of the v2 / v3 signing block. Refer to: https://source.android.com/security/apksigning/v2 """ # Seek to "end of central directory" struct. eocd_offset_from_end = -22 - len(zip_file.comment) zip_file.fp.seek(eocd_offset_from_end, os.SEEK_END) assert zip_file.fp.read(4) == b'PK\005\006', ( 'failed to find end-of-central-directory') # Read out the "start of central directory" offset. zip_file.fp.seek(eocd_offset_from_end + 16, os.SEEK_END) start_of_central_directory = struct.unpack('<I', zip_file.fp.read(4))[0] # Compute the offset after the last zip entry. last_info = max(zip_file.infolist(), key=lambda i: i.header_offset) last_header_size = (30 + len(last_info.filename) + ReadZipInfoExtraFieldLength(zip_file, last_info)) end_of_last_file = (last_info.header_offset + last_header_size + last_info.compress_size) return start_of_central_directory - end_of_last_file
chromium/chromium
tools/binary_size/libsupersize/zip_util.py
Python
bsd-3-clause
5,188
"""Individual test""" import datetime from django.test import TransactionTestCase from django.contrib.auth.models import User from apps.managers.player_mgr.models import Profile from apps.utils import test_utils from apps.managers.challenge_mgr.models import RoundSetting class OverallPrizeTest(TransactionTestCase): """ Tests awarding a prize to the individual overall points winner. """ def setUp(self): """ Sets up a test individual prize for the rest of the tests. This prize is not saved, as the round field is not yet set. """ self.prize = test_utils.setup_prize(award_to="individual_overall", competition_type="points") self.current_round = "Round 1" test_utils.set_competition_round() # Create test users. self.users = [User.objects.create_user("test%d" % i, "[email protected]") for i in range(0, 3)] def testNumAwarded(self): """ Simple test to check that the number of prizes to be awarded is one. """ self.prize.round = RoundSetting.objects.get(name="Round 1") self.prize.save() self.assertEqual(self.prize.num_awarded(), 1, "This prize should not be awarded to more than one user.") def testRoundLeader(self): """ Tests that we can retrieve the overall individual points leader for a round prize. """ self.prize.round = RoundSetting.objects.get(name="Round 1") self.prize.save() # Test one user profile = self.users[0].get_profile() top_points = Profile.objects.all()[0].points() profile.add_points(top_points + 1, datetime.datetime.today() - datetime.timedelta(minutes=1), "test") profile.save() self.assertEqual(self.prize.leader(), profile, "Current prize leader is not the leading user.") # Have another user move ahead in points profile2 = self.users[1].get_profile() profile2.add_points(profile.points() + 1, datetime.datetime.today(), "test") profile2.save() self.assertEqual(self.prize.leader(), profile2, "User 2 should be the leading profile.") # Have this user get the same amount of points, but an earlier award date. profile3 = self.users[2].get_profile() profile3.add_points(profile2.points(), datetime.datetime.today() - datetime.timedelta(minutes=1), "test") profile3.save() self.assertEqual(self.prize.leader(), profile2, "User 2 should still be the leading profile.") def tearDown(self): """ Deletes the created image file in prizes. """ self.prize.image.delete() self.prize.delete() class TeamPrizeTest(TransactionTestCase): """ Tests awarding a prize to the individual on each team with the most points. """ def setUp(self): """ Sets up a test individual prize for the rest of the tests. This prize is not saved, as the round field is not yet set. """ self.prize = test_utils.setup_prize(award_to="individual_team", competition_type="points") self.current_round = "Round 1" test_utils.set_competition_round() test_utils.create_teams(self) def testNumAwarded(self): """ Tests that the number of prizes awarded corresponds to the number of teams. """ self.prize.round = RoundSetting.objects.get(name="Round 1") self.prize.save() self.assertEqual(self.prize.num_awarded(), len(self.teams), "This should correspond to the number of teams.") def testRoundLeader(self): """ Tests that we can retrieve the overall individual points leader for a round prize. """ self.prize.round = RoundSetting.objects.get(name="Round 1") self.prize.save() # Test one user profile = self.users[0].get_profile() profile.add_points(10, datetime.datetime.today(), "test") profile.save() self.assertEqual(self.prize.leader(team=profile.team), profile, "Current prize leader is not the leading user.") # Have a user on the same team move ahead in points. profile3 = self.users[2].get_profile() profile3.add_points(11, datetime.datetime.today(), "test") profile3.save() self.assertEqual(self.prize.leader(team=profile.team), profile3, "User 3 should be the the leader.") # Try a user on a different team. profile2 = self.users[1].get_profile() profile2.add_points(20, datetime.datetime.today(), "test") profile2.save() self.assertEqual(self.prize.leader(team=profile.team), profile3, "User 3 should be the leading profile on user 1's team.") self.assertEqual(self.prize.leader(team=profile2.team), profile2, "User 2 should be the leading profile on user 2's team.") def tearDown(self): """ Deletes the created image file in prizes. """ self.prize.image.delete() self.prize.delete()
vijayanandau/KnowledgeShare
makahiki/apps/widgets/prizes/tests/individual_prize_tests.py
Python
mit
5,225
import requests from django.utils.translation import ugettext as _ from allauth.socialaccount.providers.oauth2.views import ( OAuth2Adapter, OAuth2CallbackView, OAuth2LoginView, ) from ..base import ProviderException from .provider import DoubanProvider class DoubanOAuth2Adapter(OAuth2Adapter): provider_id = DoubanProvider.id access_token_url = 'https://www.douban.com/service/auth2/token' authorize_url = 'https://www.douban.com/service/auth2/auth' profile_url = 'https://api.douban.com/v2/user/~me' def complete_login(self, request, app, token, **kwargs): headers = {'Authorization': 'Bearer %s' % token.token} resp = requests.get(self.profile_url, headers=headers) extra_data = resp.json() """ Douban may return data like this: { 'code': 128, 'request': 'GET /v2/user/~me', 'msg': 'user_is_locked:53358092' } """ if 'id' not in extra_data: msg = extra_data.get('msg', _('Invalid profile data')) raise ProviderException(msg) return self.get_provider().sociallogin_from_response( request, extra_data) oauth2_login = OAuth2LoginView.adapter_view(DoubanOAuth2Adapter) oauth2_callback = OAuth2CallbackView.adapter_view(DoubanOAuth2Adapter)
okwow123/djangol2
example/env/lib/python2.7/site-packages/allauth/socialaccount/providers/douban/views.py
Python
mit
1,354
# Copyright (c) 2016--2017 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # import sys import cStringIO import json import zipfile import os from M2Crypto import X509 from spacewalk.satellite_tools.syncLib import log2 from spacewalk.server.rhnServer.satellite_cert import SatelliteCert import constants class Manifest(object): """Class containing relevant data from RHSM manifest.""" SIGNATURE_NAME = "signature" INNER_ZIP_NAME = "consumer_export.zip" ENTITLEMENTS_PATH = "export/entitlements" CERTIFICATE_PATH = "export/extensions" PRODUCTS_PATH = "export/products" CONSUMER_INFO = "export/consumer.json" META_INFO = "export/meta.json" UPSTREAM_CONSUMER_PATH = "export/upstream_consumer" def __init__(self, zip_path): self.all_entitlements = [] self.manifest_repos = {} self.sat5_certificate = None self.satellite_version = None self.consumer_credentials = None self.uuid = None self.name = None self.ownerid = None self.api_url = None self.web_url = None self.created = None # Signature and signed data self.signature = None self.data = None # Open manifest from path top_zip = None inner_zip = None inner_file = None # normalize path zip_path = os.path.abspath(os.path.expanduser(zip_path)) try: top_zip = zipfile.ZipFile(zip_path, 'r') # Fetch inner zip file into memory try: # inner_file = top_zip.open(zip_path.split('.zip')[0] + '/' + self.INNER_ZIP_NAME) inner_file = top_zip.open(self.INNER_ZIP_NAME) self.data = inner_file.read() inner_file_data = cStringIO.StringIO(self.data) signature_file = top_zip.open(self.SIGNATURE_NAME) self.signature = signature_file.read() # Open the inner zip file try: inner_zip = zipfile.ZipFile(inner_file_data) self._extract_consumer_info(inner_zip) self._load_entitlements(inner_zip) self._extract_certificate(inner_zip) self._extract_meta_info(inner_zip) self._extract_consumer_credentials(inner_zip) finally: if inner_zip is not None: inner_zip.close() finally: if inner_file is not None: inner_file.close() finally: if top_zip is not None: top_zip.close() def _extract_certificate(self, zip_file): files = zip_file.namelist() certificates_names = [] for f in files: if f.startswith(self.CERTIFICATE_PATH) and f.endswith(".xml"): certificates_names.append(f) if len(certificates_names) >= 1: # take only first file cert_file = zip_file.open(certificates_names[0]) # take only first file self.sat5_certificate = cert_file.read().strip() cert_file.close() # Save version too sat5_cert = SatelliteCert() sat5_cert.load(self.sat5_certificate) self.satellite_version = getattr(sat5_cert, 'satellite-version') else: raise MissingSatelliteCertificateError("Satellite Certificate was not found in manifest.") def _fill_product_repositories(self, zip_file, product): product_file = zip_file.open(self.PRODUCTS_PATH + '/' + str(product.get_id()) + '.json') product_data = json.load(product_file) product_file.close() try: for content in product_data['productContent']: content = content['content'] product.add_repository(content['label'], content['contentUrl']) except KeyError: log2(0, 0, "ERROR: Cannot access required field in product '%s'" % product.get_id(), stream=sys.stderr) raise def _load_entitlements(self, zip_file): files = zip_file.namelist() entitlements_files = [] for f in files: if f.startswith(self.ENTITLEMENTS_PATH) and f.endswith(".json"): entitlements_files.append(f) if len(entitlements_files) >= 1: self.all_entitlements = [] for entitlement_file in entitlements_files: entitlements = zip_file.open(entitlement_file) # try block in try block - this is hack for python 2.4 compatibility # to support finally try: try: data = json.load(entitlements) # Extract credentials certs = data['certificates'] if len(certs) != 1: raise IncorrectEntitlementsFileFormatError( "Single certificate in entitlements file '%s' is expected, found: %d" % (entitlement_file, len(certs))) cert = certs[0] credentials = Credentials(data['id'], cert['cert'], cert['key']) # Extract product IDs products = [] provided_products = data['pool']['providedProducts'] or [] derived_provided_products = data['pool']['derivedProvidedProducts'] or [] product_ids = [provided_product['productId'] for provided_product in provided_products + derived_provided_products] for product_id in set(product_ids): product = Product(product_id) self._fill_product_repositories(zip_file, product) products.append(product) # Skip entitlements not providing any products if products: entitlement = Entitlement(products, credentials) self.all_entitlements.append(entitlement) except KeyError: log2(0, 0, "ERROR: Cannot access required field in file '%s'" % entitlement_file, stream=sys.stderr) raise finally: entitlements.close() else: refer_url = "%s%s" % (self.web_url, self.uuid) if not refer_url.startswith("http"): refer_url = "https://" + refer_url raise IncorrectEntitlementsFileFormatError( "No subscriptions were found in manifest.\n\nPlease refer to %s for setting up subscriptions." % refer_url) def _extract_consumer_info(self, zip_file): files = zip_file.namelist() found = False for f in files: if f == self.CONSUMER_INFO: found = True break if found: consumer_info = zip_file.open(self.CONSUMER_INFO) try: try: data = json.load(consumer_info) self.uuid = data['uuid'] self.name = data['name'] self.ownerid = data['owner']['key'] self.api_url = data['urlApi'] self.web_url = data['urlWeb'] except KeyError: log2(0, 0, "ERROR: Cannot access required field in file '%s'" % self.CONSUMER_INFO, stream=sys.stderr) raise finally: consumer_info.close() else: raise MissingConsumerInfoError() def _extract_meta_info(self, zip_file): files = zip_file.namelist() found = False for f in files: if f == self.META_INFO: found = True break if found: meta_info = zip_file.open(self.META_INFO) try: try: data = json.load(meta_info) self.created = data['created'] except KeyError: log2(0, 0, "ERROR: Cannot access required field in file '%s'" % self.META_INFO, stream=sys.stderr) raise finally: meta_info.close() else: raise MissingMetaInfoError() def _extract_consumer_credentials(self, zip_file): files = zip_file.namelist() consumer_credentials = [] for f in files: if f.startswith(self.UPSTREAM_CONSUMER_PATH) and f.endswith(".json"): consumer_credentials.append(f) if len(consumer_credentials) == 1: upstream_consumer = zip_file.open(consumer_credentials[0]) try: try: data = json.load(upstream_consumer) self.consumer_credentials = Credentials(data['id'], data['cert'], data['key']) except KeyError: log2(0, 0, "ERROR: Cannot access required field in file '%s'" % consumer_credentials[0], stream=sys.stderr) raise finally: upstream_consumer.close() else: raise IncorrectCredentialsError( "ERROR: Single upstream consumer certificate expected, found %d." % len(consumer_credentials)) def get_all_entitlements(self): return self.all_entitlements def get_satellite_certificate(self): return self.sat5_certificate def get_satellite_version(self): return self.satellite_version def get_consumer_credentials(self): return self.consumer_credentials def get_name(self): return self.name def get_uuid(self): return self.uuid def get_ownerid(self): return self.ownerid def get_api_url(self): return self.api_url def get_created(self): return self.created def check_signature(self): if self.signature and self.data: certs = os.listdir(constants.CANDLEPIN_CA_CERT_DIR) # At least one certificate has to match for cert_name in certs: cert_file = None try: try: cert_file = open(constants.CANDLEPIN_CA_CERT_DIR + '/' + cert_name, 'r') cert = X509.load_cert_string(cert_file.read()) except (IOError, X509.X509Error): continue finally: if cert_file is not None: cert_file.close() pubkey = cert.get_pubkey() pubkey.reset_context(md='sha256') pubkey.verify_init() pubkey.verify_update(self.data) if pubkey.verify_final(self.signature): return True return False class Entitlement(object): def __init__(self, products, credentials): if products and credentials: self.products = products self.credentials = credentials else: raise IncorrectEntitlementError() def get_products(self): return self.products def get_credentials(self): return self.credentials class Credentials(object): def __init__(self, identifier, cert, key): if identifier: self.id = identifier else: raise IncorrectCredentialsError( "ERROR: ID of credentials has to be defined" ) if cert and key: self.cert = cert self.key = key else: raise IncorrectCredentialsError( "ERROR: Trying to create object with cert = %s and key = %s" % (cert, key) ) def get_id(self): return self.id def get_cert(self): return self.cert def get_key(self): return self.key class Product(object): def __init__(self, identifier): try: self.id = int(identifier) except ValueError: raise IncorrectProductError( "ERROR: Invalid product id: %s" % identifier ) self.repositories = {} def get_id(self): return self.id def get_repositories(self): return self.repositories def add_repository(self, label, url): self.repositories[label] = url class IncorrectProductError(Exception): pass class IncorrectEntitlementError(Exception): pass class IncorrectCredentialsError(Exception): pass class IncorrectEntitlementsFileFormatError(Exception): pass class MissingSatelliteCertificateError(Exception): pass class ManifestValidationError(Exception): pass class MissingConsumerInfoError(Exception): pass class MissingMetaInfoError(Exception): pass
ogajduse/spacewalk
backend/cdn_tools/manifest.py
Python
gpl-2.0
13,620
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### # Filename : long_anisotropically_dense.py # Author : Stephane Grabli # Date : 04/08/2005 # Purpose : Selects the lines that are long and have a high anisotropic # a priori density and uses causal density # to draw without cluttering. Ideally, half of the # selected lines are culled using the causal density. # # ********************* WARNING ************************************* # ******** The Directional a priori density maps must ****** # ******** have been computed prior to using this style module ****** from freestyle.chainingiterators import ChainSilhouetteIterator from freestyle.functions import DensityF1D from freestyle.predicates import ( NotUP1D, QuantitativeInvisibilityUP1D, UnaryPredicate1D, pyHighDensityAnisotropyUP1D, pyHigherLengthUP1D, pyLengthBP1D, ) from freestyle.shaders import ( ConstantColorShader, ConstantThicknessShader, SamplingShader, ) from freestyle.types import IntegrationType, Operators ## custom density predicate class pyDensityUP1D(UnaryPredicate1D): def __init__(self, wsize, threshold, integration=IntegrationType.MEAN, sampling=2.0): UnaryPredicate1D.__init__(self) self._wsize = wsize self._threshold = threshold self._integration = integration self._func = DensityF1D(self._wsize, self._integration, sampling) self._func2 = DensityF1D(self._wsize, IntegrationType.MAX, sampling) def __call__(self, inter): c = self._func(inter) m = self._func2(inter) if c < self._threshold: return 1 if m > 4*c: if c < 1.5*self._threshold: return 1 return 0 Operators.select(QuantitativeInvisibilityUP1D(0)) Operators.bidirectional_chain(ChainSilhouetteIterator(),NotUP1D(QuantitativeInvisibilityUP1D(0))) Operators.select(pyHigherLengthUP1D(40)) ## selects lines having a high anisotropic a priori density Operators.select(pyHighDensityAnisotropyUP1D(0.3,4)) Operators.sort(pyLengthBP1D()) shaders_list = [ SamplingShader(2.0), ConstantThicknessShader(2), ConstantColorShader(0.2,0.2,0.25,1), ] ## uniform culling Operators.create(pyDensityUP1D(3.0,2.0e-2, IntegrationType.MEAN, 0.1), shaders_list)
Microvellum/Fluid-Designer
win64-vc/2.78/scripts/freestyle/styles/long_anisotropically_dense.py
Python
gpl-3.0
3,125
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = ''' --- module: nxos_install_os extends_documentation_fragment: nxos short_description: Set boot options like boot, kickstart image and issu. description: - Install an operating system by setting the boot options like boot image and kickstart image and optionally select to install using ISSU (In Server Software Upgrade). notes: - Tested against the following platforms and images - N9k 7.0(3)I4(6), 7.0(3)I5(3), 7.0(3)I6(1), 7.0(3)I7(1), 7.0(3)F2(2), 7.0(3)F3(2) - N3k 6.0(2)A8(6), 6.0(2)A8(8), 7.0(3)I6(1), 7.0(3)I7(1) - N7k 7.3(0)D1(1), 8.0(1), 8.2(1) - This module requires both the ANSIBLE_PERSISTENT_CONNECT_TIMEOUT and ANSIBLE_PERSISTENT_COMMAND_TIMEOUT timers to be set to 600 seconds or higher. The module will exit if the timers are not set properly. - Do not include full file paths, just the name of the file(s) stored on the top level flash directory. - This module attempts to install the software immediately, which may trigger a reboot. - In check mode, the module will indicate if an upgrade is needed and whether or not the upgrade is disruptive or non-disruptive(ISSU). author: - Jason Edelman (@jedelman8) - Gabriele Gerbibo (@GGabriele) version_added: 2.2 options: system_image_file: description: - Name of the system (or combined) image file on flash. required: true kickstart_image_file: description: - Name of the kickstart image file on flash. (Not required on all Nexus platforms) issu: version_added: "2.5" description: - Upgrade using In Service Software Upgrade (ISSU). (Only supported on N9k platforms) - Selecting 'required' or 'yes' means that upgrades will only proceed if the switch is capable of ISSU. - Selecting 'desired' means that upgrades will use ISSU if possible but will fall back to disruptive upgrade if needed. - Selecting 'no' means do not use ISSU. Forced disruptive. choices: ['required','desired', 'yes', 'no'] default: 'no' ''' EXAMPLES = ''' - name: Install OS on N9k check_mode: no nxos_install_os: system_image_file: nxos.7.0.3.I6.1.bin issu: desired - name: Wait for device to come back up with new image wait_for: port: 22 state: started timeout: 500 delay: 60 host: "{{ inventory_hostname }}" - name: Check installed OS for newly installed version nxos_command: commands: ['show version | json'] provider: "{{ connection }}" register: output - assert: that: - output['stdout'][0]['kickstart_ver_str'] == '7.0(3)I6(1)' ''' RETURN = ''' install_state: description: Boot and install information. returned: always type: dictionary sample: { "install_state": [ "Compatibility check is done:", "Module bootable Impact Install-type Reason", "------ -------- -------------- ------------ ------", " 1 yes non-disruptive reset ", "Images will be upgraded according to following table:", "Module Image Running-Version(pri:alt) New-Version Upg-Required", "------ ---------- ---------------------------------------- -------------------- ------------", " 1 nxos 7.0(3)I6(1) 7.0(3)I7(1) yes", " 1 bios v4.4.0(07/12/2017) v4.4.0(07/12/2017) no" ], } ''' import re from time import sleep from ansible.module_utils.network.nxos.nxos import load_config, run_commands from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule # Output options are 'text' or 'json' def execute_show_command(module, command, output='text'): cmds = [{ 'command': command, 'output': output, }] return run_commands(module, cmds) def get_platform(module): """Determine platform type""" data = execute_show_command(module, 'show inventory', 'json') pid = data[0]['TABLE_inv']['ROW_inv'][0]['productid'] if re.search(r'N3K', pid): type = 'N3K' elif re.search(r'N5K', pid): type = 'N5K' elif re.search(r'N6K', pid): type = 'N6K' elif re.search(r'N7K', pid): type = 'N7K' elif re.search(r'N9K', pid): type = 'N9K' else: type = 'unknown' return type def parse_show_install(data): """Helper method to parse the output of the 'show install all impact' or 'install all' commands. Sample Output: Installer will perform impact only check. Please wait. Verifying image bootflash:/nxos.7.0.3.F2.2.bin for boot variable "nxos". [####################] 100% -- SUCCESS Verifying image type. [####################] 100% -- SUCCESS Preparing "bios" version info using image bootflash:/nxos.7.0.3.F2.2.bin. [####################] 100% -- SUCCESS Preparing "nxos" version info using image bootflash:/nxos.7.0.3.F2.2.bin. [####################] 100% -- SUCCESS Performing module support checks. [####################] 100% -- SUCCESS Notifying services about system upgrade. [####################] 100% -- SUCCESS Compatibility check is done: Module bootable Impact Install-type Reason ------ -------- -------------- ------------ ------ 8 yes disruptive reset Incompatible image for ISSU 21 yes disruptive reset Incompatible image for ISSU Images will be upgraded according to following table: Module Image Running-Version(pri:alt) New-Version Upg-Required ------ ---------- ---------------------------------------- ------------ 8 lcn9k 7.0(3)F3(2) 7.0(3)F2(2) yes 8 bios v01.17 v01.17 no 21 lcn9k 7.0(3)F3(2) 7.0(3)F2(2) yes 21 bios v01.70 v01.70 no """ if len(data) > 0: data = massage_install_data(data) ud = {'raw': data} ud['processed'] = [] ud['disruptive'] = False ud['upgrade_needed'] = False ud['error'] = False ud['install_in_progress'] = False ud['server_error'] = False ud['upgrade_succeeded'] = False ud['use_impact_data'] = False # Check for server errors if isinstance(data, int): if data == -1: ud['server_error'] = True elif data >= 500: ud['server_error'] = True elif data == -32603: ud['server_error'] = True return ud else: ud['list_data'] = data.split('\n') for x in ud['list_data']: # Check for errors and exit if found. if re.search(r'Pre-upgrade check failed', x): ud['error'] = True break if re.search(r'[I|i]nvalid command', x): ud['error'] = True break if re.search(r'No install all data found', x): ud['error'] = True break # Check for potentially transient conditions if re.search(r'Another install procedure may be in progress', x): ud['install_in_progress'] = True break if re.search(r'Backend processing error', x): ud['server_error'] = True break if re.search(r'^(-1|5\d\d)$', x): ud['server_error'] = True break # Check for messages indicating a successful upgrade. if re.search(r'Finishing the upgrade', x): ud['upgrade_succeeded'] = True break if re.search(r'Install has been successful', x): ud['upgrade_succeeded'] = True break if re.search(r'Switching over onto standby', x): ud['upgrade_succeeded'] = True break # We get these messages when the upgrade is non-disruptive and # we loose connection with the switchover but far enough along that # we can be confident the upgrade succeeded. if re.search(r'timeout trying to send command: install', x): ud['upgrade_succeeded'] = True ud['use_impact_data'] = True break if re.search(r'[C|c]onnection failure: timed out', x): ud['upgrade_succeeded'] = True ud['use_impact_data'] = True break # Begin normal parsing. if re.search(r'----|Module|Images will|Compatibility', x): ud['processed'].append(x) continue # Check to see if upgrade will be disruptive or non-disruptive and # build dictionary of individual modules and their status. # Sample Line: # # Module bootable Impact Install-type Reason # ------ -------- ---------- ------------ ------ # 8 yes disruptive reset Incompatible image rd = r'(\d+)\s+(\S+)\s+(disruptive|non-disruptive)\s+(\S+)' mo = re.search(rd, x) if mo: ud['processed'].append(x) key = 'm%s' % mo.group(1) field = 'disruptive' if mo.group(3) == 'non-disruptive': ud[key] = {field: False} else: ud[field] = True ud[key] = {field: True} field = 'bootable' if mo.group(2) == 'yes': ud[key].update({field: True}) else: ud[key].update({field: False}) continue # Check to see if switch needs an upgrade and build a dictionary # of individual modules and their individual upgrade status. # Sample Line: # # Module Image Running-Version(pri:alt) New-Version Upg-Required # ------ ----- ---------------------------------------- ------------ # 8 lcn9k 7.0(3)F3(2) 7.0(3)F2(2) yes mo = re.search(r'(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(yes|no)', x) if mo: ud['processed'].append(x) key = 'm%s_%s' % (mo.group(1), mo.group(2)) field = 'upgrade_needed' if mo.group(5) == 'yes': ud[field] = True ud[key] = {field: True} else: ud[key] = {field: False} continue return ud def massage_install_data(data): # Transport cli returns a list containing one result item. # Transport nxapi returns a list containing two items. The second item # contains the data we are interested in. default_error_msg = 'No install all data found' if len(data) == 1: result_data = data[0] elif len(data) == 2: result_data = data[1] else: result_data = default_error_msg # Further processing may be needed for result_data if len(data) == 2 and isinstance(data[1], dict): if 'clierror' in data[1].keys(): result_data = data[1]['clierror'] elif 'code' in data[1].keys() and data[1]['code'] == '500': # We encountered a backend processing error for nxapi result_data = data[1]['msg'] else: result_data = default_error_msg return result_data def build_install_cmd_set(issu, image, kick, type): commands = ['terminal dont-ask'] if re.search(r'required|desired|yes', issu): issu_cmd = 'non-disruptive' else: issu_cmd = '' if type == 'impact': rootcmd = 'show install all impact' else: rootcmd = 'install all' if kick is None: commands.append( '%s nxos %s %s' % (rootcmd, image, issu_cmd)) else: commands.append( '%s system %s kickstart %s' % (rootcmd, image, kick)) return commands def parse_show_version(data): version_data = {'raw': data[0].split('\n')} version_data['version'] = '' version_data['error'] = False for x in version_data['raw']: mo = re.search(r'(kickstart|system|NXOS):\s+version\s+(\S+)', x) if mo: version_data['version'] = mo.group(2) continue if version_data['version'] == '': version_data['error'] = True return version_data def check_mode_legacy(module, issu, image, kick=None): """Some platforms/images/transports don't support the 'install all impact' command so we need to use a different method.""" current = execute_show_command(module, 'show version', 'json')[0] # Call parse_show_data on empty string to create the default upgrade # data stucture dictionary data = parse_show_install('') upgrade_msg = 'No upgrade required' # Process System Image data['error'] = False tsver = 'show version image bootflash:%s' % image target_image = parse_show_version(execute_show_command(module, tsver)) if target_image['error']: data['error'] = True data['raw'] = target_image['raw'] if current['kickstart_ver_str'] != target_image['version'] and not data['error']: data['upgrade_needed'] = True data['disruptive'] = True upgrade_msg = 'Switch upgraded: system: %s' % tsver # Process Kickstart Image if kick is not None and not data['error']: tkver = 'show version image bootflash:%s' % kick target_kick = parse_show_version(execute_show_command(module, tkver)) if target_kick['error']: data['error'] = True data['raw'] = target_kick['raw'] if current['kickstart_ver_str'] != target_kick['version'] and not data['error']: data['upgrade_needed'] = True data['disruptive'] = True upgrade_msg = upgrade_msg + ' kickstart: %s' % tkver data['processed'] = upgrade_msg return data def check_mode_nextgen(module, issu, image, kick=None): """Use the 'install all impact' command for check_mode""" opts = {'ignore_timeout': True} commands = build_install_cmd_set(issu, image, kick, 'impact') data = parse_show_install(load_config(module, commands, True, opts)) # If an error is encountered when issu is 'desired' then try again # but set issu to 'no' if data['error'] and issu == 'desired': issu = 'no' commands = build_install_cmd_set(issu, image, kick, 'impact') # The system may be busy from the previous call to check_mode so loop # until it's done. data = check_install_in_progress(module, commands, opts) if data['server_error']: data['error'] = True return data def check_install_in_progress(module, commands, opts): for attempt in range(20): data = parse_show_install(load_config(module, commands, True, opts)) if data['install_in_progress']: sleep(1) continue break return data def check_mode(module, issu, image, kick=None): """Check switch upgrade impact using 'show install all impact' command""" data = check_mode_nextgen(module, issu, image, kick) if data['server_error']: # We encountered an unrecoverable error in the attempt to get upgrade # impact data from the 'show install all impact' command. # Fallback to legacy method. data = check_mode_legacy(module, issu, image, kick) return data def do_install_all(module, issu, image, kick=None): """Perform the switch upgrade using the 'install all' command""" impact_data = check_mode(module, issu, image, kick) if module.check_mode: # Check mode set in the playbook so just return the impact data. msg = '*** SWITCH WAS NOT UPGRADED: IMPACT DATA ONLY ***' impact_data['processed'].append(msg) return impact_data if impact_data['error']: # Check mode discovered an error so return with this info. return impact_data elif not impact_data['upgrade_needed']: # The switch is already upgraded. Nothing more to do. return impact_data else: # If we get here, check_mode returned no errors and the switch # needs to be upgraded. if impact_data['disruptive']: # Check mode indicated that ISSU is not possible so issue the # upgrade command without the non-disruptive flag. issu = 'no' commands = build_install_cmd_set(issu, image, kick, 'install') opts = {'ignore_timeout': True} # The system may be busy from the call to check_mode so loop until # it's done. upgrade = check_install_in_progress(module, commands, opts) # Special case: If we encounter a server error at this stage # it means the command was sent and the upgrade was started but # we will need to use the impact data instead of the current install # data. if upgrade['server_error']: upgrade['upgrade_succeeded'] = True upgrade['use_impact_data'] = True if upgrade['use_impact_data']: if upgrade['upgrade_succeeded']: upgrade = impact_data upgrade['upgrade_succeeded'] = True else: upgrade = impact_data upgrade['upgrade_succeeded'] = False if not upgrade['upgrade_succeeded']: upgrade['error'] = True return upgrade def main(): argument_spec = dict( system_image_file=dict(required=True), kickstart_image_file=dict(required=False), issu=dict(choices=['required', 'desired', 'no', 'yes'], default='no'), ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = list() check_args(module, warnings) # Get system_image_file(sif), kickstart_image_file(kif) and # issu settings from module params. sif = module.params['system_image_file'] kif = module.params['kickstart_image_file'] issu = module.params['issu'] if kif == 'null' or kif == '': kif = None install_result = do_install_all(module, issu, sif, kick=kif) if install_result['error']: msg = "Failed to upgrade device using image " if kif: msg = msg + "files: kickstart: %s, system: %s" % (kif, sif) else: msg = msg + "file: system: %s" % sif module.fail_json(msg=msg, raw_data=install_result['list_data']) state = install_result['processed'] changed = install_result['upgrade_needed'] module.exit_json(changed=changed, install_state=state, warnings=warnings) if __name__ == '__main__': main()
hryamzik/ansible
lib/ansible/modules/network/nxos/nxos_install_os.py
Python
gpl-3.0
19,714
# # Copyright (C) 1997-2016 JDE Developers Team # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses/. # Authors : # Aitor Martinez Fernandez <[email protected]> # import traceback import jderobot import threading import Ice from .threadSensor import ThreadSensor from jderobotTypes import NavdataData class NavData: def __init__(self, jdrc, prefix): self.lock = threading.Lock() try: ic = jdrc.getIc() proxyStr = jdrc.getConfig().getProperty(prefix+".Proxy") base = ic.stringToProxy(proxyStr) self.proxy = jderobot.NavdataPrx.checkedCast(base) self.navData = NavdataData() self.update() if not self.proxy: print ('Interface ' + prefix + ' not configured') except Ice.ConnectionRefusedException: print(prefix + ': connection refused') except: traceback.print_exc() exit(-1) def update(self): if self.hasproxy(): localNavdata = self.proxy.getNavdata() navdataData = NavdataData() navdataData.vehicle = localNavdata.vehicle navdataData.state = localNavdata.state navdataData.batteryPercent = localNavdata.batteryPercent navdataData.magX = localNavdata.magX navdataData.magY = localNavdata.magY navdataData.magZ = localNavdata.magZ navdataData.pressure = localNavdata.pressure navdataData.temp = localNavdata.temp navdataData.windSpeed = localNavdata.windSpeed navdataData.windAngle = localNavdata.windAngle navdataData.windCompAngle = localNavdata.windCompAngle navdataData.rotX = localNavdata.rotX navdataData.rotY = localNavdata.rotY navdataData.rotZ = localNavdata.rotZ navdataData.altd = localNavdata.altd navdataData.vx = localNavdata.vx navdataData.vy = localNavdata.vy navdataData.vz = localNavdata.vz navdataData.ax = localNavdata.ax navdataData.ay = localNavdata.ay navdataData.az = localNavdata.az navdataData.tagsCount = localNavdata.tagsCount navdataData.tagsType = localNavdata.tagsType navdataData.tagsXc = localNavdata.tagsXc navdataData.tagsYc = localNavdata.tagsYc navdataData.tagsWidth = localNavdata.tagsWidth navdataData.tagsHeight = localNavdata.tagsHeight navdataData.tagsOrientation = localNavdata.tagsOrientation navdataData.tagsDistance = localNavdata.tagsDistance navdataData.timeStamp = localNavdata.tm self.lock.acquire() self.navData = navdataData self.lock.release() def hasproxy (self): ''' Returns if proxy has ben created or not. @return if proxy has ben created or not (Boolean) ''' return hasattr(self,"proxy") and self.proxy def getNavdata(self): if self.hasproxy(): self.lock.acquire() navData = self.navData self.lock.release() return navData return None class NavdataIceClient: def __init__(self,ic,prefix, start = False): self.navdata = NavData(ic,prefix) self.kill_event = threading.Event() self.thread = ThreadSensor(self.navdata, self.kill_event) self.thread.daemon = True if start: self.start() def start(self): ''' Starts the client. If client is stopped you can not start again, Threading.Thread raised error ''' self.kill_event.clear() self.thread.start() def stop(self): ''' Stops the client. If client is stopped you can not start again, Threading.Thread raised error ''' self.kill_event.set() def getNavData(self): return self.navdata.getNavdata() def hasproxy (self): ''' Returns if proxy has ben created or not. @return if proxy has ben created or not (Boolean) ''' return self.navdata.hasproxy()
fqez/JdeRobot
src/libs/comm_py/comm/ice/navdataIceClient.py
Python
gpl-3.0
4,847
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from odoo import models, _ class EventRegistration(models.Model): _inherit = 'event.registration' def _get_lead_description_registration(self, prefix='', line_suffix=''): """Add the questions and answers linked to the registrations into the description of the lead.""" reg_description = super(EventRegistration, self)._get_lead_description_registration(prefix=prefix, line_suffix=line_suffix) if not self.registration_answer_ids: return reg_description answer_descriptions = [] for answer in self.registration_answer_ids: answer_value = answer.value_answer_id.name if answer.question_type == "simple_choice" else answer.value_text_box answer_value = "\n".join([" %s" % line for line in answer_value.split('\n')]) answer_descriptions.append(" - %s\n%s" % (answer.question_id.title, answer_value)) return "%s\n%s\n%s" % (reg_description, _("Questions"), '\n'.join(answer_descriptions)) def _get_lead_description_fields(self): res = super(EventRegistration, self)._get_lead_description_fields() res.append('registration_answer_ids') return res
ygol/odoo
addons/website_event_crm_questions/models/event_registration.py
Python
agpl-3.0
1,282
"""Support for monitoring the state of UpCloud servers.""" import logging import voluptuous as vol from homeassistant.components.binary_sensor import ( PLATFORM_SCHEMA, BinarySensorDevice) import homeassistant.helpers.config_validation as cv from . import CONF_SERVERS, DATA_UPCLOUD, UpCloudServerEntity _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_SERVERS): vol.All(cv.ensure_list, [cv.string]), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the UpCloud server binary sensor.""" upcloud = hass.data[DATA_UPCLOUD] servers = config.get(CONF_SERVERS) devices = [UpCloudBinarySensor(upcloud, uuid) for uuid in servers] add_entities(devices, True) class UpCloudBinarySensor(UpCloudServerEntity, BinarySensorDevice): """Representation of an UpCloud server sensor."""
molobrakos/home-assistant
homeassistant/components/upcloud/binary_sensor.py
Python
apache-2.0
896
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from airflow.contrib.hooks.aws_hook import AwsHook from airflow.contrib.operators.sagemaker_base_operator import SageMakerBaseOperator from airflow.utils.decorators import apply_defaults from airflow.exceptions import AirflowException class SageMakerTuningOperator(SageMakerBaseOperator): """ Initiate a SageMaker hyperparameter tuning job. This operator returns The ARN of the tuning job created in Amazon SageMaker. :param config: The configuration necessary to start a tuning job (templated). For details of the configuration parameter see :py:meth:`SageMaker.Client.create_hyper_parameter_tuning_job` :type config: dict :param aws_conn_id: The AWS connection ID to use. :type aws_conn_id: str :param wait_for_completion: Set to True to wait until the tuning job finishes. :type wait_for_completion: bool :param check_interval: If wait is set to True, the time interval, in seconds, that this operation waits to check the status of the tuning job. :type check_interval: int :param max_ingestion_time: If wait is set to True, the operation fails if the tuning job doesn't finish within max_ingestion_time seconds. If you set this parameter to None, the operation does not timeout. :type max_ingestion_time: int """ integer_fields = [ ['HyperParameterTuningJobConfig', 'ResourceLimits', 'MaxNumberOfTrainingJobs'], ['HyperParameterTuningJobConfig', 'ResourceLimits', 'MaxParallelTrainingJobs'], ['TrainingJobDefinition', 'ResourceConfig', 'InstanceCount'], ['TrainingJobDefinition', 'ResourceConfig', 'VolumeSizeInGB'], ['TrainingJobDefinition', 'StoppingCondition', 'MaxRuntimeInSeconds'] ] @apply_defaults def __init__(self, config, wait_for_completion=True, check_interval=30, max_ingestion_time=None, *args, **kwargs): super(SageMakerTuningOperator, self).__init__(config=config, *args, **kwargs) self.config = config self.wait_for_completion = wait_for_completion self.check_interval = check_interval self.max_ingestion_time = max_ingestion_time def expand_role(self): if 'TrainingJobDefinition' in self.config: config = self.config['TrainingJobDefinition'] if 'RoleArn' in config: hook = AwsHook(self.aws_conn_id) config['RoleArn'] = hook.expand_role(config['RoleArn']) def execute(self, context): self.preprocess_config() self.log.info( 'Creating SageMaker Hyper-Parameter Tuning Job %s', self.config['HyperParameterTuningJobName'] ) response = self.hook.create_tuning_job( self.config, wait_for_completion=self.wait_for_completion, check_interval=self.check_interval, max_ingestion_time=self.max_ingestion_time ) if response['ResponseMetadata']['HTTPStatusCode'] != 200: raise AirflowException('Sagemaker Tuning Job creation failed: %s' % response) else: return { 'Tuning': self.hook.describe_tuning_job( self.config['HyperParameterTuningJobName'] ) }
malmiron/incubator-airflow
airflow/contrib/operators/sagemaker_tuning_operator.py
Python
apache-2.0
4,191
"""Support for binary sensor using the PiFace Digital I/O module on a RPi.""" import logging import voluptuous as vol from homeassistant.components.binary_sensor import ( PLATFORM_SCHEMA, BinarySensorDevice) from homeassistant.components import rpi_pfio from homeassistant.const import CONF_NAME, DEVICE_DEFAULT_NAME import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_INVERT_LOGIC = 'invert_logic' CONF_PORTS = 'ports' CONF_SETTLE_TIME = 'settle_time' DEFAULT_INVERT_LOGIC = False DEFAULT_SETTLE_TIME = 20 PORT_SCHEMA = vol.Schema({ vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_SETTLE_TIME, default=DEFAULT_SETTLE_TIME): cv.positive_int, vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean, }) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_PORTS, default={}): vol.Schema({ cv.positive_int: PORT_SCHEMA, }) }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the PiFace Digital Input devices.""" binary_sensors = [] ports = config.get(CONF_PORTS) for port, port_entity in ports.items(): name = port_entity.get(CONF_NAME) settle_time = port_entity[CONF_SETTLE_TIME] / 1000 invert_logic = port_entity[CONF_INVERT_LOGIC] binary_sensors.append(RPiPFIOBinarySensor( hass, port, name, settle_time, invert_logic)) add_entities(binary_sensors, True) rpi_pfio.activate_listener(hass) class RPiPFIOBinarySensor(BinarySensorDevice): """Represent a binary sensor that a PiFace Digital Input.""" def __init__(self, hass, port, name, settle_time, invert_logic): """Initialize the RPi binary sensor.""" self._port = port self._name = name or DEVICE_DEFAULT_NAME self._invert_logic = invert_logic self._state = None def read_pfio(port): """Read state from PFIO.""" self._state = rpi_pfio.read_input(self._port) self.schedule_update_ha_state() rpi_pfio.edge_detect(hass, self._port, read_pfio, settle_time) @property def should_poll(self): """No polling needed.""" return False @property def name(self): """Return the name of the sensor.""" return self._name @property def is_on(self): """Return the state of the entity.""" return self._state != self._invert_logic def update(self): """Update the PFIO state.""" self._state = rpi_pfio.read_input(self._port)
MartinHjelmare/home-assistant
homeassistant/components/rpi_pfio/binary_sensor.py
Python
apache-2.0
2,581
import sys import os from doctest import DocTestSuite, ELLIPSIS, NORMALIZE_WHITESPACE SKIP_DIRS = ( # Skip modules which import and initialize stuff that require QApplication 'Orange/widgets', 'Orange/canvas', # Skip because we don't want Orange.datasets as a module (yet) 'Orange/datasets/' ) if sys.platform == "win32": # convert to platform native path component separators SKIP_DIRS = tuple(os.path.normpath(p) for p in SKIP_DIRS) def find_modules(package): """Return a recursive list of submodules for a given package""" from os import path, walk module = path.dirname(getattr(package, '__file__', package)) parent = path.dirname(module) files = (path.join(dir, file)[len(parent) + 1:-3] for dir, dirs, files in walk(module) for file in files if file.endswith('.py')) files = (f for f in files if not f.startswith(SKIP_DIRS)) files = (f.replace(path.sep, '.') for f in files) return files class Context(dict): """ Execution context that retains the changes the tests make. Preferably use one per module to obtain nice "literate" modules that "follow along". In other words, directly the opposite of: https://docs.python.org/3/library/doctest.html#what-s-the-execution-context By popular demand: http://stackoverflow.com/questions/13106118/object-reuse-in-python-doctest/13106793#13106793 http://stackoverflow.com/questions/3286658/embedding-test-code-or-data-within-doctest-strings """ def copy(self): return self def clear(self): pass def suite(package): """Assemble test suite for doctests in path (recursively)""" from importlib import import_module for module in find_modules(package.__file__): try: module = import_module(module) yield DocTestSuite(module, globs=Context(module.__dict__.copy()), optionflags=ELLIPSIS | NORMALIZE_WHITESPACE) except ValueError: pass # No doctests in module except ImportError: import warnings warnings.warn('Unimportable module: {}'.format(module)) def load_tests(loader, tests, ignore): # This follows the load_tests protocol # https://docs.python.org/3/library/unittest.html#load-tests-protocol import Orange tests.addTests(suite(Orange)) return tests
cheral/orange3
Orange/tests/test_doctest.py
Python
bsd-2-clause
2,448
#!/usr/bin/python2.4 # Copyright (c) 2006-2008 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. '''Gatherer for administrative template files. ''' import re import types from grit.gather import regexp from grit import exception from grit import tclib from grit import util class MalformedAdminTemplateException(exception.Base): '''This file doesn't look like a .adm file to me.''' def __init__(self, msg=''): exception.Base.__init__(self, msg) class AdmGatherer(regexp.RegexpGatherer): '''Gatherer for the translateable portions of an admin template. This gatherer currently makes the following assumptions: - there is only one [strings] section and it is always the last section of the file - translateable strings do not need to be escaped. ''' # Finds the strings section as the group named 'strings' _STRINGS_SECTION = re.compile('(?P<first_part>.+^\[strings\])(?P<strings>.+)\Z', re.MULTILINE | re.DOTALL) # Finds the translateable sections from within the [strings] section. _TRANSLATEABLES = re.compile('^\s*[A-Za-z0-9_]+\s*=\s*"(?P<text>.+)"\s*$', re.MULTILINE) def __init__(self, text): regexp.RegexpGatherer.__init__(self, text) def Escape(self, text): return text.replace('\n', '\\n') def UnEscape(self, text): return text.replace('\\n', '\n') def Parse(self): if self.have_parsed_: return m = self._STRINGS_SECTION.match(self.text_) if not m: raise MalformedAdminTemplateException() # Add the first part, which is all nontranslateable, to the skeleton self._AddNontranslateableChunk(m.group('first_part')) # Then parse the rest using the _TRANSLATEABLES regexp. self._RegExpParse(self._TRANSLATEABLES, m.group('strings')) # static method def FromFile(adm_file, ext_key=None, encoding='cp1252'): '''Loads the contents of 'adm_file' in encoding 'encoding' and creates an AdmGatherer instance that gathers from those contents. The 'ext_key' parameter is ignored. Args: adm_file: file('bingo.rc') | 'filename.rc' encoding: 'utf-8' Return: AdmGatherer(contents_of_file) ''' if isinstance(adm_file, types.StringTypes): adm_file = util.WrapInputStream(file(adm_file, 'r'), encoding) return AdmGatherer(adm_file.read()) FromFile = staticmethod(FromFile)
meego-tablet-ux/meego-app-browser
tools/grit/grit/gather/admin_template.py
Python
bsd-3-clause
2,488
# class generated by DeVIDE::createDeVIDEModuleFromVTKObject from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase import vtk class vtkSynchronizedTemplatesCutter3D(SimpleVTKClassModuleBase): def __init__(self, module_manager): SimpleVTKClassModuleBase.__init__( self, module_manager, vtk.vtkSynchronizedTemplatesCutter3D(), 'Processing.', ('vtkImageData',), ('vtkPolyData',), replaceDoc=True, inputFunctions=None, outputFunctions=None)
nagyistoce/devide
modules/vtk_basic/vtkSynchronizedTemplatesCutter3D.py
Python
bsd-3-clause
520
# Status: ported, except for --out-xml # Base revision: 64488 # # Copyright 2005 Dave Abrahams # Copyright 2002, 2003, 2004, 2005, 2010 Vladimir Prus # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # This module implements regression testing framework. It declares a number of # main target rules which perform some action and, if the results are OK, # creates an output file. # # The exact list of rules is: # 'compile' -- creates .test file if compilation of sources was # successful. # 'compile-fail' -- creates .test file if compilation of sources failed. # 'run' -- creates .test file is running of executable produced from # sources was successful. Also leaves behind .output file # with the output from program run. # 'run-fail' -- same as above, but .test file is created if running fails. # # In all cases, presence of .test file is an indication that the test passed. # For more convenient reporting, you might want to use C++ Boost regression # testing utilities (see http://www.boost.org/more/regression.html). # # For historical reason, a 'unit-test' rule is available which has the same # syntax as 'exe' and behaves just like 'run'. # Things to do: # - Teach compiler_status handle Jamfile.v2. # Notes: # - <no-warn> is not implemented, since it is Como-specific, and it is not # clear how to implement it # - std::locale-support is not implemented (it is used in one test). import b2.build.feature as feature import b2.build.type as type import b2.build.targets as targets import b2.build.generators as generators import b2.build.toolset as toolset import b2.tools.common as common import b2.util.option as option import b2.build_system as build_system from b2.manager import get_manager from b2.util import stem, bjam_signature, is_iterable_typed from b2.util.sequence import unique import bjam import re import os.path import sys def init(): pass # Feature controling the command used to lanch test programs. feature.feature("testing.launcher", [], ["free", "optional"]) feature.feature("test-info", [], ["free", "incidental"]) feature.feature("testing.arg", [], ["free", "incidental"]) feature.feature("testing.input-file", [], ["free", "dependency"]) feature.feature("preserve-test-targets", ["on", "off"], ["incidental", "propagated"]) # Register target types. type.register("TEST", ["test"]) type.register("COMPILE", [], "TEST") type.register("COMPILE_FAIL", [], "TEST") type.register("RUN_OUTPUT", ["run"]) type.register("RUN", [], "TEST") type.register("RUN_FAIL", [], "TEST") type.register("LINK", [], "TEST") type.register("LINK_FAIL", [], "TEST") type.register("UNIT_TEST", ["passed"], "TEST") __all_tests = [] # Declare the rules which create main targets. While the 'type' module already # creates rules with the same names for us, we need extra convenience: default # name of main target, so write our own versions. # Helper rule. Create a test target, using basename of first source if no target # name is explicitly passed. Remembers the created target in a global variable. def make_test(target_type, sources, requirements, target_name=None): assert isinstance(target_type, basestring) assert is_iterable_typed(sources, basestring) assert is_iterable_typed(requirements, basestring) assert isinstance(target_type, basestring) or target_type is None if not target_name: target_name = stem(os.path.basename(sources[0])) # Having periods (".") in the target name is problematic because the typed # generator will strip the suffix and use the bare name for the file # targets. Even though the location-prefix averts problems most times it # does not prevent ambiguity issues when referring to the test targets. For # example when using the XML log output. So we rename the target to remove # the periods, and provide an alias for users. real_name = target_name.replace(".", "~") project = get_manager().projects().current() # The <location-prefix> forces the build system for generate paths in the # form '$build_dir/array1.test/gcc/debug'. This is necessary to allow # post-processing tools to work. t = get_manager().targets().create_typed_target( type.type_from_rule_name(target_type), project, real_name, sources, requirements + ["<location-prefix>" + real_name + ".test"], [], []) # The alias to the real target, per period replacement above. if real_name != target_name: get_manager().projects().project_rules().rules["alias"]( target_name, [t]) # Remember the test (for --dump-tests). A good way would be to collect all # given a project. This has some technical problems: e.g. we can not call # this dump from a Jamfile since projects referred by 'build-project' are # not available until the whole Jamfile has been loaded. __all_tests.append(t) return t # Note: passing more that one cpp file here is known to fail. Passing a cpp file # and a library target works. # @bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) def compile(sources, requirements, target_name=None): return make_test("compile", sources, requirements, target_name) @bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) def compile_fail(sources, requirements, target_name=None): return make_test("compile-fail", sources, requirements, target_name) @bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) def link(sources, requirements, target_name=None): return make_test("link", sources, requirements, target_name) @bjam_signature((["sources", "*"], ["requirements", "*"], ["target_name", "?"])) def link_fail(sources, requirements, target_name=None): return make_test("link-fail", sources, requirements, target_name) def handle_input_files(input_files): if len(input_files) > 1: # Check that sorting made when creating property-set instance will not # change the ordering. if sorted(input_files) != input_files: get_manager().errors()("Names of input files must be sorted alphabetically\n" + "due to internal limitations") return ["<testing.input-file>" + f for f in input_files] @bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], ["requirements", "*"], ["target_name", "?"], ["default_build", "*"])) def run(sources, args, input_files, requirements, target_name=None, default_build=[]): if args: requirements.append("<testing.arg>" + " ".join(args)) requirements.extend(handle_input_files(input_files)) return make_test("run", sources, requirements, target_name) @bjam_signature((["sources", "*"], ["args", "*"], ["input_files", "*"], ["requirements", "*"], ["target_name", "?"], ["default_build", "*"])) def run_fail(sources, args, input_files, requirements, target_name=None, default_build=[]): if args: requirements.append("<testing.arg>" + " ".join(args)) requirements.extend(handle_input_files(input_files)) return make_test("run-fail", sources, requirements, target_name) # Register all the rules for name in ["compile", "compile-fail", "link", "link-fail", "run", "run-fail"]: get_manager().projects().add_rule(name, getattr(sys.modules[__name__], name.replace("-", "_"))) # Use 'test-suite' as a synonym for 'alias', for backward compatibility. from b2.build.alias import alias get_manager().projects().add_rule("test-suite", alias) # For all main targets in 'project-module', which are typed targets with type # derived from 'TEST', produce some interesting information. # def dump_tests(): for t in __all_tests: dump_test(t) # Given a project location in normalized form (slashes are forward), compute the # name of the Boost library. # __ln1 = re.compile("/(tools|libs)/(.*)/(test|example)") __ln2 = re.compile("/(tools|libs)/(.*)$") __ln3 = re.compile("(/status$)") def get_library_name(path): assert isinstance(path, basestring) path = path.replace("\\", "/") match1 = __ln1.match(path) match2 = __ln2.match(path) match3 = __ln3.match(path) if match1: return match1.group(2) elif match2: return match2.group(2) elif match3: return "" elif option.get("dump-tests", False, True): # The 'run' rule and others might be used outside boost. In that case, # just return the path, since the 'library name' makes no sense. return path # Was an XML dump requested? __out_xml = option.get("out-xml", False, True) # Takes a target (instance of 'basic-target') and prints # - its type # - its name # - comments specified via the <test-info> property # - relative location of all source from the project root. # def dump_test(target): assert isinstance(target, targets.AbstractTarget) type = target.type() name = target.name() project = target.project() project_root = project.get('project-root') library = get_library_name(os.path.abspath(project.get('location'))) if library: name = library + "/" + name sources = target.sources() source_files = [] for s in sources: if isinstance(s, targets.FileReference): location = os.path.abspath(os.path.join(s.location(), s.name())) source_files.append(os.path.relpath(location, os.path.abspath(project_root))) target_name = project.get('location') + "//" + target.name() + ".test" test_info = target.requirements().get('test-info') test_info = " ".join('"' + ti + '"' for ti in test_info) # If the user requested XML output on the command-line, add the test info to # that XML file rather than dumping them to stdout. #if $(.out-xml) #{ # local nl = " #" ; # .contents on $(.out-xml) += # "$(nl) <test type=\"$(type)\" name=\"$(name)\">" # "$(nl) <target><![CDATA[$(target-name)]]></target>" # "$(nl) <info><![CDATA[$(test-info)]]></info>" # "$(nl) <source><![CDATA[$(source-files)]]></source>" # "$(nl) </test>" # ; # } # else source_files = " ".join('"' + s + '"' for s in source_files) if test_info: print 'boost-test(%s) "%s" [%s] : %s' % (type, name, test_info, source_files) else: print 'boost-test(%s) "%s" : %s' % (type, name, source_files) # Register generators. Depending on target type, either 'expect-success' or # 'expect-failure' rule will be used. generators.register_standard("testing.expect-success", ["OBJ"], ["COMPILE"]) generators.register_standard("testing.expect-failure", ["OBJ"], ["COMPILE_FAIL"]) generators.register_standard("testing.expect-success", ["RUN_OUTPUT"], ["RUN"]) generators.register_standard("testing.expect-failure", ["RUN_OUTPUT"], ["RUN_FAIL"]) generators.register_standard("testing.expect-success", ["EXE"], ["LINK"]) generators.register_standard("testing.expect-failure", ["EXE"], ["LINK_FAIL"]) # Generator which runs an EXE and captures output. generators.register_standard("testing.capture-output", ["EXE"], ["RUN_OUTPUT"]) # Generator which creates a target if sources run successfully. Differs from RUN # in that run output is not captured. The reason why it exists is that the 'run' # rule is much better for automated testing, but is not user-friendly (see # http://article.gmane.org/gmane.comp.lib.boost.build/6353). generators.register_standard("testing.unit-test", ["EXE"], ["UNIT_TEST"]) # FIXME: if those calls are after bjam.call, then bjam will crash # when toolset.flags calls bjam.caller. toolset.flags("testing.capture-output", "ARGS", [], ["<testing.arg>"]) toolset.flags("testing.capture-output", "INPUT_FILES", [], ["<testing.input-file>"]) toolset.flags("testing.capture-output", "LAUNCHER", [], ["<testing.launcher>"]) toolset.flags("testing.unit-test", "LAUNCHER", [], ["<testing.launcher>"]) toolset.flags("testing.unit-test", "ARGS", [], ["<testing.arg>"]) # This is a composing generator to support cases where a generator for the # specified target constructs other targets as well. One such example is msvc's # exe generator that constructs both EXE and PDB targets. type.register("TIME", ["time"]) generators.register_composing("testing.time", [], ["TIME"]) # The following code sets up actions for this module. It's pretty convoluted, # but the basic points is that we most of actions are defined by Jam code # contained in testing-aux.jam, which we load into Jam module named 'testing' def run_path_setup(target, sources, ps): if __debug__: from ..build.property_set import PropertySet assert is_iterable_typed(target, basestring) or isinstance(target, basestring) assert is_iterable_typed(sources, basestring) assert isinstance(ps, PropertySet) # For testing, we need to make sure that all dynamic libraries needed by the # test are found. So, we collect all paths from dependency libraries (via # xdll-path property) and add whatever explicit dll-path user has specified. # The resulting paths are added to the environment on each test invocation. dll_paths = ps.get('dll-path') dll_paths.extend(ps.get('xdll-path')) dll_paths.extend(bjam.call("get-target-variable", sources, "RUN_PATH")) dll_paths = unique(dll_paths) if dll_paths: bjam.call("set-target-variable", target, "PATH_SETUP", common.prepend_path_variable_command( common.shared_library_path_variable(), dll_paths)) def capture_output_setup(target, sources, ps): if __debug__: from ..build.property_set import PropertySet assert is_iterable_typed(target, basestring) assert is_iterable_typed(sources, basestring) assert isinstance(ps, PropertySet) run_path_setup(target[0], sources, ps) if ps.get('preserve-test-targets') == ['off']: bjam.call("set-target-variable", target, "REMOVE_TEST_TARGETS", "1") get_manager().engine().register_bjam_action("testing.capture-output", capture_output_setup) path = os.path.dirname(__file__) import b2.util.os_j get_manager().projects().project_rules()._import_rule("testing", "os.name", b2.util.os_j.name) import b2.tools.common get_manager().projects().project_rules()._import_rule("testing", "common.rm-command", b2.tools.common.rm_command) get_manager().projects().project_rules()._import_rule("testing", "common.file-creation-command", b2.tools.common.file_creation_command) bjam.call("load", "testing", os.path.join(path, "testing-aux.jam")) for name in ["expect-success", "expect-failure", "time"]: get_manager().engine().register_bjam_action("testing." + name) get_manager().engine().register_bjam_action("testing.unit-test", run_path_setup) if option.get("dump-tests", False, True): build_system.add_pre_build_hook(dump_tests)
m039/Void
third-party/void-boost/tools/build/src/tools/testing.py
Python
mit
15,330
#!/usr/bin/env python """ Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ from lib.core.agent import agent from lib.core.common import arrayizeValue from lib.core.common import Backend from lib.core.common import filterPairValues from lib.core.common import getLimitRange from lib.core.common import isInferenceAvailable from lib.core.common import isNoneValue from lib.core.common import isNumPosStrValue from lib.core.common import isTechniqueAvailable from lib.core.common import readInput from lib.core.common import safeSQLIdentificatorNaming from lib.core.common import safeStringFormat from lib.core.common import unArrayizeValue from lib.core.common import unsafeSQLIdentificatorNaming from lib.core.data import conf from lib.core.data import kb from lib.core.data import logger from lib.core.data import paths from lib.core.data import queries from lib.core.enums import CHARSET_TYPE from lib.core.enums import DBMS from lib.core.enums import EXPECTED from lib.core.enums import PAYLOAD from lib.core.exception import SqlmapMissingMandatoryOptionException from lib.core.exception import SqlmapUserQuitException from lib.core.settings import CURRENT_DB from lib.core.settings import METADB_SUFFIX from lib.request import inject from lib.techniques.brute.use import columnExists from lib.techniques.brute.use import tableExists class Search: """ This class defines search functionalities for plugins. """ def __init__(self): pass def searchDb(self): foundDbs = [] rootQuery = queries[Backend.getIdentifiedDbms()].search_db dbList = conf.db.split(",") if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: dbCond = rootQuery.inband.condition2 else: dbCond = rootQuery.inband.condition dbConsider, dbCondParam = self.likeOrExact("database") for db in dbList: values = [] db = safeSQLIdentificatorNaming(db) if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): db = db.upper() infoMsg = "searching database" if dbConsider == "1": infoMsg += "s like" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db) logger.info(infoMsg) if conf.excludeSysDbs: exclDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList) infoMsg = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) logger.info(infoMsg) else: exclDbsQuery = "" dbQuery = "%s%s" % (dbCond, dbCondParam) dbQuery = dbQuery % unsafeSQLIdentificatorNaming(db) if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: query = rootQuery.inband.query2 else: query = rootQuery.inband.query query = query % (dbQuery + exclDbsQuery) values = inject.getValue(query, blind=False, time=False) if not isNoneValue(values): values = arrayizeValue(values) for value in values: value = safeSQLIdentificatorNaming(value) foundDbs.append(value) if not values and isInferenceAvailable() and not conf.direct: infoMsg = "fetching number of database" if dbConsider == "1": infoMsg += "s like" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(db) logger.info(infoMsg) if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: query = rootQuery.blind.count2 else: query = rootQuery.blind.count query = query % (dbQuery + exclDbsQuery) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "no database" if dbConsider == "1": warnMsg += "s like" warnMsg += " '%s' found" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) continue indexRange = getLimitRange(count) for index in indexRange: if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: query = rootQuery.blind.query2 else: query = rootQuery.blind.query query = query % (dbQuery + exclDbsQuery) query = agent.limitQuery(index, query, dbCond) value = unArrayizeValue(inject.getValue(query, union=False, error=False)) value = safeSQLIdentificatorNaming(value) foundDbs.append(value) conf.dumper.lister("found databases", foundDbs) def searchTable(self): bruteForce = False if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: errMsg = "information_schema not available, " errMsg += "back-end DBMS is MySQL < 5.0" bruteForce = True if bruteForce: message = "do you want to use common table existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") test = readInput(message, default="Y" if "Y" in message else "N") if test[0] in ("n", "N"): return elif test[0] in ("q", "Q"): raise SqlmapUserQuitException else: regex = "|".join(conf.tbl.split(",")) return tableExists(paths.COMMON_TABLES, regex) foundTbls = {} tblList = conf.tbl.split(",") rootQuery = queries[Backend.getIdentifiedDbms()].search_table tblCond = rootQuery.inband.condition dbCond = rootQuery.inband.condition2 tblConsider, tblCondParam = self.likeOrExact("table") for tbl in tblList: values = [] tbl = safeSQLIdentificatorNaming(tbl, True) if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2, DBMS.FIREBIRD): tbl = tbl.upper() infoMsg = "searching table" if tblConsider == "1": infoMsg += "s like" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) if dbCond and conf.db and conf.db != CURRENT_DB: _ = conf.db.split(",") whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")" infoMsg += " for database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(db for db in _)) elif conf.excludeSysDbs: whereDbsQuery = "".join(" AND '%s' != %s" % (unsafeSQLIdentificatorNaming(db), dbCond) for db in self.excludeDbsList) infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(db for db in self.excludeDbsList)) logger.info(infoMsg2) else: whereDbsQuery = "" logger.info(infoMsg) tblQuery = "%s%s" % (tblCond, tblCondParam) tblQuery = tblQuery % unsafeSQLIdentificatorNaming(tbl) if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: query = rootQuery.inband.query query = query % (tblQuery + whereDbsQuery) values = inject.getValue(query, blind=False, time=False) if values and Backend.getIdentifiedDbms() in (DBMS.SQLITE, DBMS.FIREBIRD): newValues = [] if isinstance(values, basestring): values = [values] for value in values: dbName = "SQLite" if Backend.isDbms(DBMS.SQLITE) else "Firebird" newValues.append(["%s%s" % (dbName, METADB_SUFFIX), value]) values = newValues for foundDb, foundTbl in filterPairValues(values): foundDb = safeSQLIdentificatorNaming(foundDb) foundTbl = safeSQLIdentificatorNaming(foundTbl, True) if foundDb is None or foundTbl is None: continue if foundDb in foundTbls: foundTbls[foundDb].append(foundTbl) else: foundTbls[foundDb] = [foundTbl] if not values and isInferenceAvailable() and not conf.direct: if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD): if len(whereDbsQuery) == 0: infoMsg = "fetching number of databases with table" if tblConsider == "1": infoMsg += "s like" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) logger.info(infoMsg) query = rootQuery.blind.count query = query % (tblQuery + whereDbsQuery) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "no databases have table" if tblConsider == "1": warnMsg += "s like" warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(tbl) logger.warn(warnMsg) continue indexRange = getLimitRange(count) for index in indexRange: query = rootQuery.blind.query query = query % (tblQuery + whereDbsQuery) query = agent.limitQuery(index, query) foundDb = unArrayizeValue(inject.getValue(query, union=False, error=False)) foundDb = safeSQLIdentificatorNaming(foundDb) if foundDb not in foundTbls: foundTbls[foundDb] = [] if tblConsider == "2": foundTbls[foundDb].append(tbl) if tblConsider == "2": continue else: for db in conf.db.split(","): db = safeSQLIdentificatorNaming(db) if db not in foundTbls: foundTbls[db] = [] else: dbName = "SQLite" if Backend.isDbms(DBMS.SQLITE) else "Firebird" foundTbls["%s%s" % (dbName, METADB_SUFFIX)] = [] for db in foundTbls.keys(): db = safeSQLIdentificatorNaming(db) infoMsg = "fetching number of table" if tblConsider == "1": infoMsg += "s like" infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(tbl), unsafeSQLIdentificatorNaming(db)) logger.info(infoMsg) query = rootQuery.blind.count2 if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD): query = query % unsafeSQLIdentificatorNaming(db) query += " AND %s" % tblQuery count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "no table" if tblConsider == "1": warnMsg += "s like" warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(tbl) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) continue indexRange = getLimitRange(count) for index in indexRange: query = rootQuery.blind.query2 if query.endswith("'%s')"): query = query[:-1] + " AND %s)" % tblQuery else: query += " AND %s" % tblQuery if Backend.isDbms(DBMS.FIREBIRD): query = safeStringFormat(query, index) if Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.FIREBIRD): query = safeStringFormat(query, unsafeSQLIdentificatorNaming(db)) if not Backend.isDbms(DBMS.FIREBIRD): query = agent.limitQuery(index, query) foundTbl = unArrayizeValue(inject.getValue(query, union=False, error=False)) if not isNoneValue(foundTbl): kb.hintValue = foundTbl foundTbl = safeSQLIdentificatorNaming(foundTbl, True) foundTbls[db].append(foundTbl) for db in foundTbls.keys(): if isNoneValue(foundTbls[db]): del foundTbls[db] if not foundTbls: warnMsg = "no databases contain any of the provided tables" logger.warn(warnMsg) return conf.dumper.dbTables(foundTbls) self.dumpFoundTables(foundTbls) def searchColumn(self): bruteForce = False if Backend.isDbms(DBMS.MYSQL) and not kb.data.has_information_schema: errMsg = "information_schema not available, " errMsg += "back-end DBMS is MySQL < 5.0" bruteForce = True if bruteForce: message = "do you want to use common column existence check? %s" % ("[Y/n/q]" if Backend.getIdentifiedDbms() in (DBMS.ACCESS,) else "[y/N/q]") test = readInput(message, default="Y" if "Y" in message else "N") if test[0] in ("n", "N"): return elif test[0] in ("q", "Q"): raise SqlmapUserQuitException else: regex = "|".join(conf.col.split(",")) conf.dumper.dbTableColumns(columnExists(paths.COMMON_COLUMNS, regex)) message = "do you want to dump entries? [Y/n] " output = readInput(message, default="Y") if output and output[0] not in ("n", "N"): self.dumpAll() return rootQuery = queries[Backend.getIdentifiedDbms()].search_column foundCols = {} dbs = {} whereDbsQuery = "" whereTblsQuery = "" infoMsgTbl = "" infoMsgDb = "" colList = conf.col.split(",") origTbl = conf.tbl origDb = conf.db colCond = rootQuery.inband.condition dbCond = rootQuery.inband.condition2 tblCond = rootQuery.inband.condition3 colConsider, colCondParam = self.likeOrExact("column") for column in colList: values = [] column = safeSQLIdentificatorNaming(column) conf.db = origDb conf.tbl = origTbl if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): column = column.upper() infoMsg = "searching column" if colConsider == "1": infoMsg += "s like" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) foundCols[column] = {} if conf.tbl: _ = conf.tbl.split(",") whereTblsQuery = " AND (" + " OR ".join("%s = '%s'" % (tblCond, unsafeSQLIdentificatorNaming(tbl)) for tbl in _) + ")" infoMsgTbl = " for table%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(tbl) for tbl in _)) if conf.db and conf.db != CURRENT_DB: _ = conf.db.split(",") whereDbsQuery = " AND (" + " OR ".join("%s = '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in _) + ")" infoMsgDb = " in database%s '%s'" % ("s" if len(_) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in _)) elif conf.excludeSysDbs: whereDbsQuery = "".join(" AND %s != '%s'" % (dbCond, unsafeSQLIdentificatorNaming(db)) for db in self.excludeDbsList) infoMsg2 = "skipping system database%s '%s'" % ("s" if len(self.excludeDbsList) > 1 else "", ", ".join(unsafeSQLIdentificatorNaming(db) for db in self.excludeDbsList)) logger.info(infoMsg2) else: infoMsgDb = " across all databases" logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb)) colQuery = "%s%s" % (colCond, colCondParam) colQuery = colQuery % unsafeSQLIdentificatorNaming(column) if any(isTechniqueAvailable(_) for _ in (PAYLOAD.TECHNIQUE.UNION, PAYLOAD.TECHNIQUE.ERROR, PAYLOAD.TECHNIQUE.QUERY)) or conf.direct: if not all((conf.db, conf.tbl)): # Enumerate tables containing the column provided if # either of database(s) or table(s) is not provided query = rootQuery.inband.query query = query % (colQuery + whereDbsQuery + whereTblsQuery) values = inject.getValue(query, blind=False, time=False) else: # Assume provided databases' tables contain the # column(s) provided values = [] for db in conf.db.split(","): for tbl in conf.tbl.split(","): values.append([safeSQLIdentificatorNaming(db), safeSQLIdentificatorNaming(tbl, True)]) for db, tbl in filterPairValues(values): db = safeSQLIdentificatorNaming(db) tbls = tbl.split(",") if not isNoneValue(tbl) else [] for tbl in tbls: tbl = safeSQLIdentificatorNaming(tbl, True) if db is None or tbl is None: continue conf.db = db conf.tbl = tbl conf.col = column self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False) if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]: if db not in dbs: dbs[db] = {} if tbl not in dbs[db]: dbs[db][tbl] = {} dbs[db][tbl].update(kb.data.cachedColumns[db][tbl]) if db in foundCols[column]: foundCols[column][db].append(tbl) else: foundCols[column][db] = [tbl] kb.data.cachedColumns = {} if not values and isInferenceAvailable() and not conf.direct: if not conf.db: infoMsg = "fetching number of databases with tables containing column" if colConsider == "1": infoMsg += "s like" infoMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) logger.info("%s%s%s" % (infoMsg, infoMsgTbl, infoMsgDb)) query = rootQuery.blind.count query = query % (colQuery + whereDbsQuery + whereTblsQuery) count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "no databases have tables containing column" if colConsider == "1": warnMsg += "s like" warnMsg += " '%s'" % unsafeSQLIdentificatorNaming(column) logger.warn("%s%s" % (warnMsg, infoMsgTbl)) continue indexRange = getLimitRange(count) for index in indexRange: query = rootQuery.blind.query query = query % (colQuery + whereDbsQuery + whereTblsQuery) query = agent.limitQuery(index, query) db = unArrayizeValue(inject.getValue(query, union=False, error=False)) db = safeSQLIdentificatorNaming(db) if db not in dbs: dbs[db] = {} if db not in foundCols[column]: foundCols[column][db] = [] else: for db in conf.db.split(","): db = safeSQLIdentificatorNaming(db) if db not in foundCols[column]: foundCols[column][db] = [] origDb = conf.db origTbl = conf.tbl for column, dbData in foundCols.items(): colQuery = "%s%s" % (colCond, colCondParam) colQuery = colQuery % unsafeSQLIdentificatorNaming(column) for db in dbData: conf.db = origDb conf.tbl = origTbl infoMsg = "fetching number of tables containing column" if colConsider == "1": infoMsg += "s like" infoMsg += " '%s' in database '%s'" % (unsafeSQLIdentificatorNaming(column), unsafeSQLIdentificatorNaming(db)) logger.info(infoMsg) query = rootQuery.blind.count2 query = query % unsafeSQLIdentificatorNaming(db) query += " AND %s" % colQuery query += whereTblsQuery count = inject.getValue(query, union=False, error=False, expected=EXPECTED.INT, charsetType=CHARSET_TYPE.DIGITS) if not isNumPosStrValue(count): warnMsg = "no tables contain column" if colConsider == "1": warnMsg += "s like" warnMsg += " '%s' " % unsafeSQLIdentificatorNaming(column) warnMsg += "in database '%s'" % unsafeSQLIdentificatorNaming(db) logger.warn(warnMsg) continue indexRange = getLimitRange(count) for index in indexRange: query = rootQuery.blind.query2 if query.endswith("'%s')"): query = query[:-1] + " AND %s)" % (colQuery + whereTblsQuery) else: query += " AND %s" % (colQuery + whereTblsQuery) query = safeStringFormat(query, unsafeSQLIdentificatorNaming(db)) query = agent.limitQuery(index, query) tbl = unArrayizeValue(inject.getValue(query, union=False, error=False)) kb.hintValue = tbl tbl = safeSQLIdentificatorNaming(tbl, True) conf.db = db conf.tbl = tbl conf.col = column self.getColumns(onlyColNames=True, colTuple=(colConsider, colCondParam), bruteForce=False) if db in kb.data.cachedColumns and tbl in kb.data.cachedColumns[db]: if db not in dbs: dbs[db] = {} if tbl not in dbs[db]: dbs[db][tbl] = {} dbs[db][tbl].update(kb.data.cachedColumns[db][tbl]) kb.data.cachedColumns = {} if db in foundCols[column]: foundCols[column][db].append(tbl) else: foundCols[column][db] = [tbl] if dbs: conf.dumper.dbColumns(foundCols, colConsider, dbs) self.dumpFoundColumn(dbs, foundCols, colConsider) else: warnMsg = "no databases have tables containing any of the " warnMsg += "provided columns" logger.warn(warnMsg) def search(self): if Backend.getIdentifiedDbms() in (DBMS.ORACLE, DBMS.DB2): for item in ('db', 'tbl', 'col'): if getattr(conf, item, None): setattr(conf, item, getattr(conf, item).upper()) if conf.col: self.searchColumn() elif conf.tbl: self.searchTable() elif conf.db: self.searchDb() else: errMsg = "missing parameter, provide -D, -T or -C along " errMsg += "with --search" raise SqlmapMissingMandatoryOptionException(errMsg)
golismero/golismero
tools/sqlmap/plugins/generic/search.py
Python
gpl-2.0
26,113
#!/usr/bin/env python """ Copyright (c) 2006-2013 sqlmap developers (http://sqlmap.org/) See the file 'doc/COPYING' for copying permission """ from lib.core.enums import PRIORITY __priority__ = PRIORITY.LOWEST def dependencies(): pass def tamper(payload, **kwargs): """ Replaces apostrophe character with its illegal double unicode counterpart >>> tamper("1 AND '1'='1") '1 AND %00%271%00%27=%00%271' """ return payload.replace('\'', "%00%27") if payload else payload
golismero/golismero
tools/sqlmap/tamper/apostrophenullencode.py
Python
gpl-2.0
503
# -*- coding: utf-8 -*- """ This script initializes the plugin, making it known to QGIS. """ def classFactory(iface): from HelloWorld import HelloWorld return HelloWorld(iface)
qgis/QGIS-Django
qgis-app/plugins/tests/HelloWorld/2.3-full-changed-repository/HelloWorld/__init__.py
Python
gpl-2.0
183
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_redis_instance_info description: - Gather info for GCP Instance short_description: Gather info for GCP Instance version_added: '2.8' author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: region: description: - The name of the Redis region of the instance. required: true type: str project: description: - The Google Cloud Platform project to use. type: str auth_kind: description: - The type of credential used. type: str required: true choices: - application - machineaccount - serviceaccount service_account_contents: description: - The contents of a Service Account JSON file, either in a dictionary or as a JSON string that represents it. type: jsonarg service_account_file: description: - The path of a Service Account JSON file if serviceaccount is selected as type. type: path service_account_email: description: - An optional service account email address if machineaccount is selected and the user does not wish to use the default email. type: str scopes: description: - Array of scopes to be used type: list env_type: description: - Specifies which Ansible environment you're running this module within. - This should not be set unless you know what you're doing. - This only alters the User Agent string for any API requests. type: str notes: - for authentication, you can set service_account_file using the C(gcp_service_account_file) env variable. - for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS) env variable. - For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL) env variable. - For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable. - For authentication, you can set scopes using the C(GCP_SCOPES) env variable. - Environment variables values will only be used if the playbook values are not set. - The I(service_account_email) and I(service_account_file) options are mutually exclusive. ''' EXAMPLES = ''' - name: get info on an instance gcp_redis_instance_info: region: us-central1 project: test_project auth_kind: serviceaccount service_account_file: "/tmp/auth.pem" ''' RETURN = ''' resources: description: List of resources returned: always type: complex contains: alternativeLocationId: description: - Only applicable to STANDARD_HA tier which protects the instance against zonal failures by provisioning it across two zones. - If provided, it must be a different zone from the one provided in [locationId]. returned: success type: str authorizedNetwork: description: - The full name of the Google Compute Engine network to which the instance is connected. If left unspecified, the default network will be used. returned: success type: str createTime: description: - The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds. returned: success type: str currentLocationId: description: - The current zone where the Redis endpoint is placed. - For Basic Tier instances, this will always be the same as the [locationId] provided by the user at creation time. For Standard Tier instances, this can be either [locationId] or [alternativeLocationId] and can change after a failover event. returned: success type: str displayName: description: - An arbitrary and optional user-provided name for the instance. returned: success type: str host: description: - Hostname or IP address of the exposed Redis endpoint used by clients to connect to the service. returned: success type: str labels: description: - Resource labels to represent user provided metadata. returned: success type: dict redisConfigs: description: - Redis configuration parameters, according to U(http://redis.io/topics/config). - 'Please check Memorystore documentation for the list of supported parameters: U(https://cloud.google.com/memorystore/docs/redis/reference/rest/v1/projects.locations.instances#Instance.FIELDS.redis_configs) .' returned: success type: dict locationId: description: - The zone where the instance will be provisioned. If not provided, the service will choose a zone for the instance. For STANDARD_HA tier, instances will be created across two zones for protection against zonal failures. If [alternativeLocationId] is also provided, it must be different from [locationId]. returned: success type: str name: description: - The ID of the instance or a fully qualified identifier for the instance. returned: success type: str memorySizeGb: description: - Redis memory size in GiB. returned: success type: int port: description: - The port number of the exposed Redis endpoint. returned: success type: int redisVersion: description: - 'The version of Redis software. If not provided, latest supported version will be used. Currently, the supported values are: - REDIS_4_0 for Redis 4.0 compatibility - REDIS_3_2 for Redis 3.2 compatibility .' returned: success type: str reservedIpRange: description: - The CIDR range of internal addresses that are reserved for this instance. If not provided, the service will choose an unused /29 block, for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique and non-overlapping with existing subnets in an authorized network. returned: success type: str tier: description: - 'The service tier of the instance. Must be one of these values: - BASIC: standalone instance - STANDARD_HA: highly available primary/replica instances .' returned: success type: str region: description: - The name of the Redis region of the instance. returned: success type: str ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest import json ################################################################################ # Main ################################################################################ def main(): module = GcpModule(argument_spec=dict(region=dict(required=True, type='str'))) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/cloud-platform'] return_value = {'resources': fetch_list(module, collection(module))} module.exit_json(**return_value) def collection(module): return "https://redis.googleapis.com/v1/projects/{project}/locations/{region}/instances".format(**module.params) def fetch_list(module, link): auth = GcpSession(module, 'redis') return auth.list(link, return_if_object, array_name='instances') def return_if_object(module, response): # If not found, return nothing. if response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst: module.fail_json(msg="Invalid JSON response with error: %s" % inst) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result if __name__ == "__main__": main()
anryko/ansible
lib/ansible/modules/cloud/google/gcp_redis_instance_info.py
Python
gpl-3.0
9,179
# coding=utf-8 """Test for GIS utilities functions.""" import unittest # noinspection PyUnresolvedReferences import qgis # pylint: disable=unused-import from qgis.core import QgsRectangle from safe.definitions.constants import INASAFE_TEST from safe.utilities.gis import ( is_polygon_layer, is_raster_y_inverted, wkt_to_rectangle, validate_geo_array) from safe.test.utilities import ( clone_raster_layer, load_test_vector_layer, load_test_raster_layer, standard_data_path, get_qgis_app) QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app(qsetting=INASAFE_TEST) class TestQGIS(unittest.TestCase): def test_is_polygonal_layer(self): """Test we can get the correct attributes back.""" # Polygon layer layer = load_test_vector_layer( 'aggregation', 'district_osm_jakarta.geojson', clone=True ) message = 'isPolygonLayer, %s layer should be polygonal' % layer self.assertTrue(is_polygon_layer(layer), message) # Point layer layer = load_test_vector_layer('hazard', 'volcano_point.geojson') message = '%s layer should be polygonal' % layer self.assertFalse(is_polygon_layer(layer), message) # Raster layer layer = clone_raster_layer( name='earthquake', extension='.tif', include_keywords=True, source_directory=standard_data_path('hazard') ) message = ('%s raster layer should not be polygonal' % layer) self.assertFalse(is_polygon_layer(layer), message) def test_raster_y_inverted(self): """Test if we can detect an upside down raster.""" # We should have one test with an inverted raster but as it's not # usual, I'm not going to spend time. layer = load_test_raster_layer('gisv4', 'hazard', 'earthquake.asc') self.assertFalse(is_raster_y_inverted(layer)) def test_rectangle_from_wkt(self): """Test we can a create a rectangle from a WKT.""" rectangle = wkt_to_rectangle('POLYGON ((0 0, 0 1, 1 1, 1 0, 0 0))') self.assertTrue(isinstance(rectangle, QgsRectangle)) rectangle = wkt_to_rectangle('POLYGON ((0 1, 1 1, 1 0, 0 0))') self.assertIsNone(rectangle) def test_validate_geo_array(self): """Test validate geographic extent method. .. versionadded:: 3.2 """ # Normal case min_longitude = 20.389938354492188 min_latitude = -34.10782492987083 max_longitude = 20.712661743164062 max_latitude = -34.008273470938335 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertTrue(validate_geo_array(extent)) # min_latitude >= max_latitude min_latitude = 34.10782492987083 max_latitude = -34.008273470938335 min_longitude = 20.389938354492188 max_longitude = 20.712661743164062 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertFalse(validate_geo_array(extent)) # min_longitude >= max_longitude min_latitude = -34.10782492987083 max_latitude = -34.008273470938335 min_longitude = 34.10782492987083 max_longitude = -34.008273470938335 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertFalse(validate_geo_array(extent)) # min_latitude < -90 or > 90 min_latitude = -134.10782492987083 max_latitude = -34.008273470938335 min_longitude = 20.389938354492188 max_longitude = 20.712661743164062 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertFalse(validate_geo_array(extent)) # max_latitude < -90 or > 90 min_latitude = -9.10782492987083 max_latitude = 91.10782492987083 min_longitude = 20.389938354492188 max_longitude = 20.712661743164062 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertFalse(validate_geo_array(extent)) # min_longitude < -180 or > 180 min_latitude = -34.10782492987083 max_latitude = -34.008273470938335 min_longitude = -184.10782492987083 max_longitude = 20.712661743164062 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertFalse(validate_geo_array(extent)) # max_longitude < -180 or > 180 min_latitude = -34.10782492987083 max_latitude = -34.008273470938335 min_longitude = 20.389938354492188 max_longitude = 180.712661743164062 extent = [min_longitude, min_latitude, max_longitude, max_latitude] self.assertFalse(validate_geo_array(extent)) if __name__ == '__main__': unittest.main()
AIFDR/inasafe
safe/utilities/test/test_gis.py
Python
gpl-3.0
4,837
""" Component-level Specification This module is called component to mirror organization of storm package. """ from ..storm.component import Component class Specification(object): def __init__(self, component_cls, name=None, parallelism=1): if not issubclass(component_cls, Component): raise TypeError("Invalid component: {}".format(component_cls)) if not isinstance(parallelism, int) or parallelism < 1: raise ValueError("Parallelism must be a integer greater than 0") self.component_cls = component_cls self.name = name self.parallelism = parallelism def resolve_dependencies(self, specifications): """Allows specification subclasses to resolve an dependencies that they may have on other specifications. :param specifications: all of the specification objects for this topology. :type specifications: dict """ pass
hodgesds/streamparse
streamparse/dsl/component.py
Python
apache-2.0
976
# Copyright 2013 Josh Durgin # Copyright 2013 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime from lxml import etree from oslo.config import cfg import webob from webob import exc from nova.api.openstack.compute.contrib import assisted_volume_snapshots as \ assisted_snaps from nova.api.openstack.compute.contrib import volumes from nova.api.openstack import extensions from nova.compute import api as compute_api from nova.compute import flavors from nova import context from nova import exception from nova.openstack.common import jsonutils from nova.openstack.common import timeutils from nova import test from nova.tests.api.openstack import fakes from nova.volume import cinder CONF = cfg.CONF CONF.import_opt('password_length', 'nova.utils') FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' FAKE_UUID_A = '00000000-aaaa-aaaa-aaaa-000000000000' FAKE_UUID_B = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb' FAKE_UUID_C = 'cccccccc-cccc-cccc-cccc-cccccccccccc' FAKE_UUID_D = 'dddddddd-dddd-dddd-dddd-dddddddddddd' IMAGE_UUID = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77' def fake_get_instance(self, context, instance_id, want_objects=False): return {'uuid': instance_id} def fake_get_volume(self, context, id): return {'id': 'woot'} def fake_attach_volume(self, context, instance, volume_id, device): pass def fake_detach_volume(self, context, instance, volume): pass def fake_swap_volume(self, context, instance, old_volume_id, new_volume_id): pass def fake_create_snapshot(self, context, volume, name, description): return {'id': 123, 'volume_id': 'fakeVolId', 'status': 'available', 'volume_size': 123, 'created_at': '2013-01-01 00:00:01', 'display_name': 'myVolumeName', 'display_description': 'myVolumeDescription'} def fake_delete_snapshot(self, context, snapshot_id): pass def fake_compute_volume_snapshot_delete(self, context, volume_id, snapshot_id, delete_info): pass def fake_compute_volume_snapshot_create(self, context, volume_id, create_info): pass def fake_get_instance_bdms(self, context, instance): return [{'id': 1, 'instance_uuid': instance['uuid'], 'device_name': '/dev/fake0', 'delete_on_termination': 'False', 'virtual_name': 'MyNamesVirtual', 'snapshot_id': None, 'volume_id': FAKE_UUID_A, 'volume_size': 1}, {'id': 2, 'instance_uuid': instance['uuid'], 'device_name': '/dev/fake1', 'delete_on_termination': 'False', 'virtual_name': 'MyNamesVirtual', 'snapshot_id': None, 'volume_id': FAKE_UUID_B, 'volume_size': 1}] class BootFromVolumeTest(test.TestCase): def setUp(self): super(BootFromVolumeTest, self).setUp() self.stubs.Set(compute_api.API, 'create', self._get_fake_compute_api_create()) fakes.stub_out_nw_api(self.stubs) self._block_device_mapping_seen = None self._legacy_bdm_seen = True self.flags( osapi_compute_extension=[ 'nova.api.openstack.compute.contrib.select_extensions'], osapi_compute_ext_list=['Volumes', 'Block_device_mapping_v2_boot']) def _get_fake_compute_api_create(self): def _fake_compute_api_create(cls, context, instance_type, image_href, **kwargs): self._block_device_mapping_seen = kwargs.get( 'block_device_mapping') self._legacy_bdm_seen = kwargs.get('legacy_bdm') inst_type = flavors.get_flavor_by_flavor_id(2) resv_id = None return ([{'id': 1, 'display_name': 'test_server', 'uuid': FAKE_UUID, 'instance_type': dict(inst_type), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fead::1234', 'image_ref': IMAGE_UUID, 'user_id': 'fake', 'project_id': 'fake', 'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0), 'updated_at': datetime.datetime(2010, 11, 11, 11, 0, 0), 'progress': 0, 'fixed_ips': [] }], resv_id) return _fake_compute_api_create def test_create_root_volume(self): body = dict(server=dict( name='test_server', imageRef=IMAGE_UUID, flavorRef=2, min_count=1, max_count=1, block_device_mapping=[dict( volume_id=1, device_name='/dev/vda', virtual='root', delete_on_termination=False, )] )) req = webob.Request.blank('/v2/fake/os-volumes_boot') req.method = 'POST' req.body = jsonutils.dumps(body) req.headers['content-type'] = 'application/json' res = req.get_response(fakes.wsgi_app( init_only=('os-volumes_boot', 'servers'))) self.assertEqual(res.status_int, 202) server = jsonutils.loads(res.body)['server'] self.assertEqual(FAKE_UUID, server['id']) self.assertEqual(CONF.password_length, len(server['adminPass'])) self.assertEqual(len(self._block_device_mapping_seen), 1) self.assertTrue(self._legacy_bdm_seen) self.assertEqual(self._block_device_mapping_seen[0]['volume_id'], 1) self.assertEqual(self._block_device_mapping_seen[0]['device_name'], '/dev/vda') def test_create_root_volume_bdm_v2(self): body = dict(server=dict( name='test_server', imageRef=IMAGE_UUID, flavorRef=2, min_count=1, max_count=1, block_device_mapping_v2=[dict( source_type='volume', uuid=1, device_name='/dev/vda', boot_index=0, delete_on_termination=False, )] )) req = webob.Request.blank('/v2/fake/os-volumes_boot') req.method = 'POST' req.body = jsonutils.dumps(body) req.headers['content-type'] = 'application/json' res = req.get_response(fakes.wsgi_app( init_only=('os-volumes_boot', 'servers'))) self.assertEqual(res.status_int, 202) server = jsonutils.loads(res.body)['server'] self.assertEqual(FAKE_UUID, server['id']) self.assertEqual(CONF.password_length, len(server['adminPass'])) self.assertEqual(len(self._block_device_mapping_seen), 1) self.assertFalse(self._legacy_bdm_seen) self.assertEqual(self._block_device_mapping_seen[0]['volume_id'], 1) self.assertEqual(self._block_device_mapping_seen[0]['boot_index'], 0) self.assertEqual(self._block_device_mapping_seen[0]['device_name'], '/dev/vda') class VolumeApiTest(test.TestCase): def setUp(self): super(VolumeApiTest, self).setUp() fakes.stub_out_networking(self.stubs) fakes.stub_out_rate_limiting(self.stubs) self.stubs.Set(cinder.API, "delete", fakes.stub_volume_delete) self.stubs.Set(cinder.API, "get", fakes.stub_volume_get) self.stubs.Set(cinder.API, "get_all", fakes.stub_volume_get_all) self.flags( osapi_compute_extension=[ 'nova.api.openstack.compute.contrib.select_extensions'], osapi_compute_ext_list=['Volumes']) self.context = context.get_admin_context() self.app = fakes.wsgi_app(init_only=('os-volumes',)) def test_volume_create(self): self.stubs.Set(cinder.API, "create", fakes.stub_volume_create) vol = {"size": 100, "display_name": "Volume Test Name", "display_description": "Volume Test Desc", "availability_zone": "zone1:host1"} body = {"volume": vol} req = webob.Request.blank('/v2/fake/os-volumes') req.method = 'POST' req.body = jsonutils.dumps(body) req.headers['content-type'] = 'application/json' resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) resp_dict = jsonutils.loads(resp.body) self.assertTrue('volume' in resp_dict) self.assertEqual(resp_dict['volume']['size'], vol['size']) self.assertEqual(resp_dict['volume']['displayName'], vol['display_name']) self.assertEqual(resp_dict['volume']['displayDescription'], vol['display_description']) self.assertEqual(resp_dict['volume']['availabilityZone'], vol['availability_zone']) def test_volume_create_bad(self): def fake_volume_create(self, context, size, name, description, snapshot, **param): raise exception.InvalidInput(reason="bad request data") self.stubs.Set(cinder.API, "create", fake_volume_create) vol = {"size": '#$?', "display_name": "Volume Test Name", "display_description": "Volume Test Desc", "availability_zone": "zone1:host1"} body = {"volume": vol} req = fakes.HTTPRequest.blank('/v2/fake/os-volumes') self.assertRaises(webob.exc.HTTPBadRequest, volumes.VolumeController().create, req, body) def test_volume_index(self): req = webob.Request.blank('/v2/fake/os-volumes') resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) def test_volume_detail(self): req = webob.Request.blank('/v2/fake/os-volumes/detail') resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) def test_volume_show(self): req = webob.Request.blank('/v2/fake/os-volumes/123') resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) def test_volume_show_no_volume(self): self.stubs.Set(cinder.API, "get", fakes.stub_volume_notfound) req = webob.Request.blank('/v2/fake/os-volumes/456') resp = req.get_response(self.app) self.assertEqual(resp.status_int, 404) def test_volume_delete(self): req = webob.Request.blank('/v2/fake/os-volumes/123') req.method = 'DELETE' resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) def test_volume_delete_no_volume(self): self.stubs.Set(cinder.API, "delete", fakes.stub_volume_notfound) req = webob.Request.blank('/v2/fake/os-volumes/456') req.method = 'DELETE' resp = req.get_response(self.app) self.assertEqual(resp.status_int, 404) class VolumeAttachTests(test.TestCase): def setUp(self): super(VolumeAttachTests, self).setUp() self.stubs.Set(compute_api.API, 'get_instance_bdms', fake_get_instance_bdms) self.stubs.Set(compute_api.API, 'get', fake_get_instance) self.stubs.Set(cinder.API, 'get', fake_get_volume) self.context = context.get_admin_context() self.expected_show = {'volumeAttachment': {'device': '/dev/fake0', 'serverId': FAKE_UUID, 'id': FAKE_UUID_A, 'volumeId': FAKE_UUID_A }} self.ext_mgr = extensions.ExtensionManager() self.ext_mgr.extensions = {} self.attachments = volumes.VolumeAttachmentController(self.ext_mgr) def test_show(self): req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid') req.method = 'POST' req.body = jsonutils.dumps({}) req.headers['content-type'] = 'application/json' req.environ['nova.context'] = self.context result = self.attachments.show(req, FAKE_UUID, FAKE_UUID_A) self.assertEqual(self.expected_show, result) def test_detach(self): self.stubs.Set(compute_api.API, 'detach_volume', fake_detach_volume) req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid') req.method = 'DELETE' req.headers['content-type'] = 'application/json' req.environ['nova.context'] = self.context result = self.attachments.delete(req, FAKE_UUID, FAKE_UUID_A) self.assertEqual('202 Accepted', result.status) def test_detach_vol_not_found(self): self.stubs.Set(compute_api.API, 'detach_volume', fake_detach_volume) req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid') req.method = 'DELETE' req.headers['content-type'] = 'application/json' req.environ['nova.context'] = self.context self.assertRaises(exc.HTTPNotFound, self.attachments.delete, req, FAKE_UUID, FAKE_UUID_C) def test_attach_volume(self): self.stubs.Set(compute_api.API, 'attach_volume', fake_attach_volume) body = {'volumeAttachment': {'volumeId': FAKE_UUID_A, 'device': '/dev/fake'}} req = webob.Request.blank('/v2/servers/id/os-volume_attachments') req.method = 'POST' req.body = jsonutils.dumps({}) req.headers['content-type'] = 'application/json' req.environ['nova.context'] = self.context result = self.attachments.create(req, FAKE_UUID, body) self.assertEqual(result['volumeAttachment']['id'], '00000000-aaaa-aaaa-aaaa-000000000000') def test_attach_volume_bad_id(self): self.stubs.Set(compute_api.API, 'attach_volume', fake_attach_volume) body = { 'volumeAttachment': { 'device': None, 'volumeId': 'TESTVOLUME', } } req = webob.Request.blank('/v2/servers/id/os-volume_attachments') req.method = 'POST' req.body = jsonutils.dumps({}) req.headers['content-type'] = 'application/json' req.environ['nova.context'] = self.context self.assertRaises(webob.exc.HTTPBadRequest, self.attachments.create, req, FAKE_UUID, body) def _test_swap(self, uuid=FAKE_UUID_A): self.stubs.Set(compute_api.API, 'swap_volume', fake_swap_volume) body = {'volumeAttachment': {'volumeId': FAKE_UUID_B, 'device': '/dev/fake'}} req = webob.Request.blank('/v2/servers/id/os-volume_attachments/uuid') req.method = 'PUT' req.body = jsonutils.dumps({}) req.headers['content-type'] = 'application/json' req.environ['nova.context'] = self.context return self.attachments.update(req, FAKE_UUID, uuid, body) def test_swap_volume_no_extension(self): self.assertRaises(webob.exc.HTTPBadRequest, self._test_swap) def test_swap_volume(self): self.ext_mgr.extensions['os-volume-attachment-update'] = True result = self._test_swap() self.assertEqual('202 Accepted', result.status) def test_swap_volume_no_attachment(self): self.ext_mgr.extensions['os-volume-attachment-update'] = True self.assertRaises(exc.HTTPNotFound, self._test_swap, FAKE_UUID_C) class VolumeSerializerTest(test.TestCase): def _verify_volume_attachment(self, attach, tree): for attr in ('id', 'volumeId', 'serverId', 'device'): self.assertEqual(str(attach[attr]), tree.get(attr)) def _verify_volume(self, vol, tree): self.assertEqual(tree.tag, 'volume') for attr in ('id', 'status', 'size', 'availabilityZone', 'createdAt', 'displayName', 'displayDescription', 'volumeType', 'snapshotId'): self.assertEqual(str(vol[attr]), tree.get(attr)) for child in tree: self.assertTrue(child.tag in ('attachments', 'metadata')) if child.tag == 'attachments': self.assertEqual(1, len(child)) self.assertEqual('attachment', child[0].tag) self._verify_volume_attachment(vol['attachments'][0], child[0]) elif child.tag == 'metadata': not_seen = set(vol['metadata'].keys()) for gr_child in child: self.assertTrue(gr_child.get("key") in not_seen) self.assertEqual(str(vol['metadata'][gr_child.get("key")]), gr_child.text) not_seen.remove(gr_child.get("key")) self.assertEqual(0, len(not_seen)) def test_attach_show_create_serializer(self): serializer = volumes.VolumeAttachmentTemplate() raw_attach = dict( id='vol_id', volumeId='vol_id', serverId='instance_uuid', device='/foo') text = serializer.serialize(dict(volumeAttachment=raw_attach)) tree = etree.fromstring(text) self.assertEqual('volumeAttachment', tree.tag) self._verify_volume_attachment(raw_attach, tree) def test_attach_index_serializer(self): serializer = volumes.VolumeAttachmentsTemplate() raw_attaches = [dict( id='vol_id1', volumeId='vol_id1', serverId='instance1_uuid', device='/foo1'), dict( id='vol_id2', volumeId='vol_id2', serverId='instance2_uuid', device='/foo2')] text = serializer.serialize(dict(volumeAttachments=raw_attaches)) tree = etree.fromstring(text) self.assertEqual('volumeAttachments', tree.tag) self.assertEqual(len(raw_attaches), len(tree)) for idx, child in enumerate(tree): self.assertEqual('volumeAttachment', child.tag) self._verify_volume_attachment(raw_attaches[idx], child) def test_volume_show_create_serializer(self): serializer = volumes.VolumeTemplate() raw_volume = dict( id='vol_id', status='vol_status', size=1024, availabilityZone='vol_availability', createdAt=timeutils.utcnow(), attachments=[dict( id='vol_id', volumeId='vol_id', serverId='instance_uuid', device='/foo')], displayName='vol_name', displayDescription='vol_desc', volumeType='vol_type', snapshotId='snap_id', metadata=dict( foo='bar', baz='quux', ), ) text = serializer.serialize(dict(volume=raw_volume)) tree = etree.fromstring(text) self._verify_volume(raw_volume, tree) def test_volume_index_detail_serializer(self): serializer = volumes.VolumesTemplate() raw_volumes = [dict( id='vol1_id', status='vol1_status', size=1024, availabilityZone='vol1_availability', createdAt=timeutils.utcnow(), attachments=[dict( id='vol1_id', volumeId='vol1_id', serverId='instance_uuid', device='/foo1')], displayName='vol1_name', displayDescription='vol1_desc', volumeType='vol1_type', snapshotId='snap1_id', metadata=dict( foo='vol1_foo', bar='vol1_bar', ), ), dict( id='vol2_id', status='vol2_status', size=1024, availabilityZone='vol2_availability', createdAt=timeutils.utcnow(), attachments=[dict( id='vol2_id', volumeId='vol2_id', serverId='instance_uuid', device='/foo2')], displayName='vol2_name', displayDescription='vol2_desc', volumeType='vol2_type', snapshotId='snap2_id', metadata=dict( foo='vol2_foo', bar='vol2_bar', ), )] text = serializer.serialize(dict(volumes=raw_volumes)) tree = etree.fromstring(text) self.assertEqual('volumes', tree.tag) self.assertEqual(len(raw_volumes), len(tree)) for idx, child in enumerate(tree): self._verify_volume(raw_volumes[idx], child) class TestVolumeCreateRequestXMLDeserializer(test.TestCase): def setUp(self): super(TestVolumeCreateRequestXMLDeserializer, self).setUp() self.deserializer = volumes.CreateDeserializer() def test_minimal_volume(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" size="1"></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "size": "1", }, } self.assertEquals(request['body'], expected) def test_display_name(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" size="1" display_name="Volume-xml"></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "size": "1", "display_name": "Volume-xml", }, } self.assertEquals(request['body'], expected) def test_display_description(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" size="1" display_name="Volume-xml" display_description="description"></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "size": "1", "display_name": "Volume-xml", "display_description": "description", }, } self.assertEquals(request['body'], expected) def test_volume_type(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" size="1" display_name="Volume-xml" display_description="description" volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "display_name": "Volume-xml", "size": "1", "display_name": "Volume-xml", "display_description": "description", "volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164", }, } self.assertEquals(request['body'], expected) def test_availability_zone(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" size="1" display_name="Volume-xml" display_description="description" volume_type="289da7f8-6440-407c-9fb4-7db01ec49164" availability_zone="us-east1"></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "size": "1", "display_name": "Volume-xml", "display_description": "description", "volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164", "availability_zone": "us-east1", }, } self.assertEquals(request['body'], expected) def test_metadata(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" display_name="Volume-xml" size="1"> <metadata><meta key="Type">work</meta></metadata></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "display_name": "Volume-xml", "size": "1", "metadata": { "Type": "work", }, }, } self.assertEquals(request['body'], expected) def test_full_volume(self): self_request = """ <volume xmlns="http://docs.openstack.org/compute/api/v1.1" size="1" display_name="Volume-xml" display_description="description" volume_type="289da7f8-6440-407c-9fb4-7db01ec49164" availability_zone="us-east1"> <metadata><meta key="Type">work</meta></metadata></volume>""" request = self.deserializer.deserialize(self_request) expected = { "volume": { "size": "1", "display_name": "Volume-xml", "display_description": "description", "volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164", "availability_zone": "us-east1", "metadata": { "Type": "work", }, }, } self.maxDiff = None self.assertEquals(request['body'], expected) class CommonUnprocessableEntityTestCase(object): resource = None entity_name = None controller_cls = None kwargs = {} """ Tests of places we throw 422 Unprocessable Entity from """ def setUp(self): super(CommonUnprocessableEntityTestCase, self).setUp() self.controller = self.controller_cls() def _unprocessable_create(self, body): req = fakes.HTTPRequest.blank('/v2/fake/' + self.resource) req.method = 'POST' kwargs = self.kwargs.copy() kwargs['body'] = body self.assertRaises(webob.exc.HTTPUnprocessableEntity, self.controller.create, req, **kwargs) def test_create_no_body(self): self._unprocessable_create(body=None) def test_create_missing_volume(self): body = {'foo': {'a': 'b'}} self._unprocessable_create(body=body) def test_create_malformed_entity(self): body = {self.entity_name: 'string'} self._unprocessable_create(body=body) class UnprocessableVolumeTestCase(CommonUnprocessableEntityTestCase, test.TestCase): resource = 'os-volumes' entity_name = 'volume' controller_cls = volumes.VolumeController class UnprocessableAttachmentTestCase(CommonUnprocessableEntityTestCase, test.TestCase): resource = 'servers/' + FAKE_UUID + '/os-volume_attachments' entity_name = 'volumeAttachment' controller_cls = volumes.VolumeAttachmentController kwargs = {'server_id': FAKE_UUID} class UnprocessableSnapshotTestCase(CommonUnprocessableEntityTestCase, test.TestCase): resource = 'os-snapshots' entity_name = 'snapshot' controller_cls = volumes.SnapshotController class CreateSnapshotTestCase(test.TestCase): def setUp(self): super(CreateSnapshotTestCase, self).setUp() self.controller = volumes.SnapshotController() self.stubs.Set(cinder.API, 'get', fake_get_volume) self.stubs.Set(cinder.API, 'create_snapshot_force', fake_create_snapshot) self.stubs.Set(cinder.API, 'create_snapshot', fake_create_snapshot) self.req = fakes.HTTPRequest.blank('/v2/fake/os-snapshots') self.req.method = 'POST' self.body = {'snapshot': {'volume_id': 1}} def test_force_true(self): self.body['snapshot']['force'] = 'True' self.controller.create(self.req, body=self.body) def test_force_false(self): self.body['snapshot']['force'] = 'f' self.controller.create(self.req, body=self.body) def test_force_invalid(self): self.body['snapshot']['force'] = 'foo' self.assertRaises(exception.InvalidParameterValue, self.controller.create, self.req, body=self.body) class DeleteSnapshotTestCase(test.TestCase): def setUp(self): super(DeleteSnapshotTestCase, self).setUp() self.controller = volumes.SnapshotController() self.stubs.Set(cinder.API, 'get', fake_get_volume) self.stubs.Set(cinder.API, 'create_snapshot_force', fake_create_snapshot) self.stubs.Set(cinder.API, 'create_snapshot', fake_create_snapshot) self.stubs.Set(cinder.API, 'delete_snapshot', fake_delete_snapshot) self.req = fakes.HTTPRequest.blank('/v2/fake/os-snapshots') def test_normal_delete(self): self.req.method = 'POST' self.body = {'snapshot': {'volume_id': 1}} result = self.controller.create(self.req, body=self.body) self.req.method = 'DELETE' result = self.controller.delete(self.req, result['snapshot']['id']) self.assertEqual(result.status_int, 202) class AssistedSnapshotCreateTestCase(test.TestCase): def setUp(self): super(AssistedSnapshotCreateTestCase, self).setUp() self.controller = assisted_snaps.AssistedVolumeSnapshotsController() self.stubs.Set(compute_api.API, 'volume_snapshot_create', fake_compute_volume_snapshot_create) def test_assisted_create(self): req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots') body = {'snapshot': {'volume_id': 1, 'create_info': {}}} req.method = 'POST' self.controller.create(req, body=body) def test_assisted_create_missing_create_info(self): req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots') body = {'snapshot': {'volume_id': 1}} req.method = 'POST' self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, req, body=body) class AssistedSnapshotDeleteTestCase(test.TestCase): def setUp(self): super(AssistedSnapshotDeleteTestCase, self).setUp() self.controller = assisted_snaps.AssistedVolumeSnapshotsController() self.stubs.Set(compute_api.API, 'volume_snapshot_delete', fake_compute_volume_snapshot_delete) def test_assisted_delete(self): params = { 'delete_info': jsonutils.dumps({'volume_id': 1}), } req = fakes.HTTPRequest.blank( '/v2/fake/os-assisted-volume-snapshots?%s' % '&'.join(['%s=%s' % (k, v) for k, v in params.iteritems()])) req.method = 'DELETE' result = self.controller.delete(req, '5') self.assertEqual(result.status_int, 204) def test_assisted_delete_missing_delete_info(self): req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots') req.method = 'DELETE' self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete, req, '5')
ntt-sic/nova
nova/tests/api/openstack/compute/contrib/test_volumes.py
Python
apache-2.0
32,169
# Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import datetime import logging import os import django from django.core.urlresolvers import reverse from django import http from django.test.utils import override_settings from django.utils import timezone from django.utils import unittest from mox import IgnoreArg # noqa from mox import IsA # noqa from horizon import exceptions from horizon.workflows import views from openstack_dashboard import api from openstack_dashboard.dashboards.identity.projects import workflows from openstack_dashboard import policy_backend from openstack_dashboard.test import helpers as test from openstack_dashboard import usage from openstack_dashboard.usage import quotas with_sel = os.environ.get('WITH_SELENIUM', False) if with_sel: from selenium.webdriver import ActionChains # noqa from selenium.webdriver.common import keys from socket import timeout as socket_timeout # noqa INDEX_URL = reverse('horizon:identity:projects:index') USER_ROLE_PREFIX = workflows.PROJECT_USER_MEMBER_SLUG + "_role_" GROUP_ROLE_PREFIX = workflows.PROJECT_GROUP_MEMBER_SLUG + "_role_" PROJECT_DETAIL_URL = reverse('horizon:identity:projects:detail', args=[1]) class TenantsViewTests(test.BaseAdminViewTests): @test.create_stubs({api.keystone: ('tenant_list',)}) def test_index(self): api.keystone.tenant_list(IsA(http.HttpRequest), domain=None, paginate=True, marker=None) \ .AndReturn([self.tenants.list(), False]) self.mox.ReplayAll() res = self.client.get(INDEX_URL) self.assertTemplateUsed(res, 'identity/projects/index.html') self.assertItemsEqual(res.context['table'].data, self.tenants.list()) @test.create_stubs({api.keystone: ('tenant_list', )}) def test_index_with_domain_context(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) domain_tenants = [tenant for tenant in self.tenants.list() if tenant.domain_id == domain.id] api.keystone.tenant_list(IsA(http.HttpRequest), domain=domain.id, paginate=True, marker=None) \ .AndReturn([domain_tenants, False]) self.mox.ReplayAll() res = self.client.get(INDEX_URL) self.assertTemplateUsed(res, 'identity/projects/index.html') self.assertItemsEqual(res.context['table'].data, domain_tenants) self.assertContains(res, "<em>test_domain:</em>") class ProjectsViewNonAdminTests(test.TestCase): @override_settings(POLICY_CHECK_FUNCTION=policy_backend.check) @test.create_stubs({api.keystone: ('tenant_list',)}) def test_index(self): api.keystone.tenant_list(IsA(http.HttpRequest), user=self.user.id, paginate=True, marker=None, admin=False) \ .AndReturn([self.tenants.list(), False]) self.mox.ReplayAll() res = self.client.get(INDEX_URL) self.assertTemplateUsed(res, 'identity/projects/index.html') self.assertItemsEqual(res.context['table'].data, self.tenants.list()) class CreateProjectWorkflowTests(test.BaseAdminViewTests): def _get_project_info(self, project): domain = self._get_default_domain() project_info = {"name": project.name, "description": project.description, "enabled": project.enabled, "domain": domain.id} return project_info def _get_workflow_fields(self, project): domain = self._get_default_domain() project_info = {"domain_id": domain.id, "domain_name": domain.name, "name": project.name, "description": project.description, "enabled": project.enabled} return project_info def _get_quota_info(self, quota): cinder_quota = self.cinder_quotas.first() neutron_quota = self.neutron_quotas.first() quota_data = {} for field in quotas.NOVA_QUOTA_FIELDS: quota_data[field] = int(quota.get(field).limit) for field in quotas.CINDER_QUOTA_FIELDS: quota_data[field] = int(cinder_quota.get(field).limit) for field in quotas.NEUTRON_QUOTA_FIELDS: quota_data[field] = int(neutron_quota.get(field).limit) return quota_data def _get_workflow_data(self, project, quota): project_info = self._get_workflow_fields(project) quota_data = self._get_quota_info(quota) project_info.update(quota_data) return project_info def _get_default_domain(self): default_domain = self.domain domain = {"id": self.request.session.get('domain_context', default_domain.id), "name": self.request.session.get('domain_context_name', default_domain.name)} return api.base.APIDictWrapper(domain) def _get_all_users(self, domain_id): if not domain_id: users = self.users.list() else: users = [user for user in self.users.list() if user.domain_id == domain_id] return users def _get_all_groups(self, domain_id): if not domain_id: groups = self.groups.list() else: groups = [group for group in self.groups.list() if group.domain_id == domain_id] return groups @test.create_stubs({api.keystone: ('get_default_domain', 'get_default_role', 'user_list', 'group_list', 'role_list'), api.base: ('is_service_enabled',), api.neutron: ('is_extension_supported',), quotas: ('get_default_quota_data',)}) def test_add_project_get(self): quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \ .MultipleTimes().AndReturn(True) api.base.is_service_enabled(IsA(http.HttpRequest), 'volume') \ .MultipleTimes().AndReturn(True) api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) api.neutron.is_extension_supported( IsA(http.HttpRequest), 'security-group').AndReturn(True) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)).AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) api.keystone.role_list(IsA(http.HttpRequest)).AndReturn(roles) self.mox.ReplayAll() url = reverse('horizon:identity:projects:create') res = self.client.get(url) self.assertTemplateUsed(res, views.WorkflowView.template_name) self.assertContains(res, '<input type="hidden" name="subnet" ' 'id="id_subnet" />', html=True) workflow = res.context['workflow'] self.assertEqual(res.context['workflow'].name, workflows.CreateProject.name) step = workflow.get_step("createprojectinfoaction") self.assertEqual(step.action.initial['ram'], quota.get('ram').limit) self.assertEqual(step.action.initial['injected_files'], quota.get('injected_files').limit) self.assertQuerysetEqual( workflow.steps, ['<CreateProjectInfo: createprojectinfoaction>', '<UpdateProjectMembers: update_members>', '<UpdateProjectGroups: update_group_members>', '<CreateProjectQuota: create_quotas>']) def test_add_project_get_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_get() @test.create_stubs({api.keystone: ('get_default_role', 'user_list', 'group_list', 'role_list', 'domain_get'), api.neutron: ('is_extension_supported', 'tenant_quota_get'), quotas: ('get_default_quota_data',)}) @test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True}) def test_add_project_get_with_neutron(self): quota = self.quotas.first() neutron_quotas = self.neutron_quotas.first() quotas.get_default_quota_data(IsA(http.HttpRequest)) \ .AndReturn(quota) api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \ .MultipleTimes().AndReturn(True) api.neutron.is_extension_supported( IsA(http.HttpRequest), 'security-group').AndReturn(True) api.neutron.tenant_quota_get(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(neutron_quotas) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(self.roles.first()) api.keystone.user_list(IsA(http.HttpRequest), domain=None) \ .AndReturn(self.users.list()) api.keystone.role_list(IsA(http.HttpRequest)) \ .AndReturn(self.roles.list()) api.keystone.group_list(IsA(http.HttpRequest), domain=None) \ .AndReturn(self.groups.list()) api.keystone.role_list(IsA(http.HttpRequest)) \ .AndReturn(self.roles.list()) self.mox.ReplayAll() res = self.client.get(reverse('horizon:identity:projects:create')) self.assertTemplateUsed(res, views.WorkflowView.template_name) if django.VERSION >= (1, 6): self.assertContains(res, ''' <input class="form-control" id="id_subnet" min="-1" name="subnet" type="number" value="10" /> ''', html=True) else: self.assertContains(res, ''' <input class="form-control" name="subnet" id="id_subnet" value="10" type="text" /> ''', html=True) workflow = res.context['workflow'] self.assertEqual(res.context['workflow'].name, workflows.CreateProject.name) step = workflow.get_step("createprojectinfoaction") self.assertEqual(step.action.initial['ram'], quota.get('ram').limit) self.assertEqual(step.action.initial['subnet'], neutron_quotas.get('subnet').limit) @test.create_stubs({api.keystone: ('get_default_role', 'add_tenant_user_role', 'tenant_create', 'user_list', 'group_list', 'role_list', 'domain_get'), quotas: ('get_default_quota_data', 'get_disabled_quotas', 'tenant_quota_usages',), api.cinder: ('tenant_quota_update',), api.nova: ('tenant_quota_update',)}) def test_add_project_post(self, neutron=False): project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) if neutron: quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) # handle project_details = self._get_project_info(project) quota_data = self._get_quota_info(quota) api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \ .AndReturn(project) workflow_data = {} for role in roles: if USER_ROLE_PREFIX + role.id in workflow_data: ulist = workflow_data[USER_ROLE_PREFIX + role.id] for user_id in ulist: api.keystone.add_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user=user_id, role=role.id) for role in roles: if GROUP_ROLE_PREFIX + role.id in workflow_data: ulist = workflow_data[GROUP_ROLE_PREFIX + role.id] for group_id in ulist: api.keystone.add_group_role(IsA(http.HttpRequest), role=role.id, group=group_id, project=self.tenant.id) nova_updated_quota = dict([(key, quota_data[key]) for key in quotas.NOVA_QUOTA_FIELDS]) api.nova.tenant_quota_update(IsA(http.HttpRequest), project.id, **nova_updated_quota) cinder_updated_quota = dict([(key, quota_data[key]) for key in quotas.CINDER_QUOTA_FIELDS]) api.cinder.tenant_quota_update(IsA(http.HttpRequest), project.id, **cinder_updated_quota) self.mox.ReplayAll() workflow_data.update(self._get_workflow_data(project, quota)) url = reverse('horizon:identity:projects:create') res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, INDEX_URL) def test_add_project_post_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_post() @test.create_stubs({api.neutron: ('is_extension_supported', 'tenant_quota_update')}) @test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True}) def test_add_project_post_with_neutron(self): quota_data = self.neutron_quotas.first() neutron_updated_quota = dict([(key, quota_data.get(key).limit) for key in quotas.NEUTRON_QUOTA_FIELDS]) api.neutron.is_extension_supported( IsA(http.HttpRequest), 'security-group').AndReturn(True) api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \ .MultipleTimes().AndReturn(True) api.neutron.tenant_quota_update(IsA(http.HttpRequest), self.tenant.id, **neutron_updated_quota) self.test_add_project_post(neutron=True) @test.create_stubs({api.keystone: ('user_list', 'role_list', 'group_list', 'get_default_domain', 'get_default_role'), quotas: ('get_default_quota_data', 'get_disabled_quotas')}) def test_add_project_quota_defaults_error(self): default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)) \ .AndRaise(self.exceptions.nova) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) self.mox.ReplayAll() url = reverse('horizon:identity:projects:create') res = self.client.get(url) self.assertTemplateUsed(res, views.WorkflowView.template_name) self.assertContains(res, "Unable to retrieve default quota values") def test_add_project_quota_defaults_error_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_quota_defaults_error() @test.create_stubs({api.keystone: ('tenant_create', 'user_list', 'role_list', 'group_list', 'get_default_domain', 'get_default_role'), quotas: ('get_default_quota_data', 'get_disabled_quotas', 'tenant_quota_usages')}) def test_add_project_tenant_create_error(self): project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) # handle project_details = self._get_project_info(project) api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \ .AndRaise(self.exceptions.keystone) self.mox.ReplayAll() workflow_data = self._get_workflow_data(project, quota) url = reverse('horizon:identity:projects:create') res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, INDEX_URL) def test_add_project_tenant_create_error_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_tenant_create_error() @test.create_stubs({api.keystone: ('tenant_create', 'user_list', 'role_list', 'group_list', 'get_default_domain', 'get_default_role', 'add_tenant_user_role'), quotas: ('get_default_quota_data', 'get_disabled_quotas', 'tenant_quota_usages'), api.nova: ('tenant_quota_update',)}) def test_add_project_quota_update_error(self): project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) # handle project_details = self._get_project_info(project) quota_data = self._get_quota_info(quota) api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \ .AndReturn(project) workflow_data = {} for role in roles: if USER_ROLE_PREFIX + role.id in workflow_data: ulist = workflow_data[USER_ROLE_PREFIX + role.id] for user_id in ulist: api.keystone.add_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user=user_id, role=role.id) for role in roles: if GROUP_ROLE_PREFIX + role.id in workflow_data: ulist = workflow_data[GROUP_ROLE_PREFIX + role.id] for group_id in ulist: api.keystone.add_group_role(IsA(http.HttpRequest), role=role.id, group=group_id, project=self.tenant.id) nova_updated_quota = dict([(key, quota_data[key]) for key in quotas.NOVA_QUOTA_FIELDS]) api.nova.tenant_quota_update(IsA(http.HttpRequest), project.id, **nova_updated_quota) \ .AndRaise(self.exceptions.nova) self.mox.ReplayAll() workflow_data.update(self._get_workflow_data(project, quota)) url = reverse('horizon:identity:projects:create') res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, INDEX_URL) def test_add_project_quota_update_error_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_quota_update_error() @test.create_stubs({api.keystone: ('tenant_create', 'user_list', 'role_list', 'group_list', 'get_default_domain', 'get_default_role', 'add_tenant_user_role'), quotas: ('get_default_quota_data', 'get_disabled_quotas', 'tenant_quota_usages'), api.cinder: ('tenant_quota_update',), api.nova: ('tenant_quota_update',)}) def test_add_project_user_update_error(self): project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) # handle project_details = self._get_project_info(project) quota_data = self._get_quota_info(quota) api.keystone.tenant_create(IsA(http.HttpRequest), **project_details) \ .AndReturn(project) workflow_data = {} for role in roles: if USER_ROLE_PREFIX + role.id in workflow_data: ulist = workflow_data[USER_ROLE_PREFIX + role.id] for user_id in ulist: api.keystone.add_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user=user_id, role=role.id) \ .AndRaise(self.exceptions.keystone) break break nova_updated_quota = dict([(key, quota_data[key]) for key in quotas.NOVA_QUOTA_FIELDS]) api.nova.tenant_quota_update(IsA(http.HttpRequest), project.id, **nova_updated_quota) cinder_updated_quota = dict([(key, quota_data[key]) for key in quotas.CINDER_QUOTA_FIELDS]) api.cinder.tenant_quota_update(IsA(http.HttpRequest), project.id, **cinder_updated_quota) self.mox.ReplayAll() workflow_data.update(self._get_workflow_data(project, quota)) url = reverse('horizon:identity:projects:create') res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, INDEX_URL) def test_add_project_user_update_error_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_user_update_error() @test.create_stubs({api.keystone: ('user_list', 'role_list', 'group_list', 'get_default_domain', 'get_default_role'), quotas: ('get_default_quota_data', 'get_disabled_quotas', 'tenant_quota_usages')}) def test_add_project_missing_field_error(self): project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) self.mox.ReplayAll() workflow_data = self._get_workflow_data(project, quota) workflow_data["name"] = "" url = reverse('horizon:identity:projects:create') res = self.client.post(url, workflow_data) self.assertContains(res, "field is required") def test_add_project_missing_field_error_domain(self): domain = self.domains.get(id="1") self.setSessionValues(domain_context=domain.id, domain_context_name=domain.name) self.test_add_project_missing_field_error() @test.create_stubs({api.keystone: ('user_list', 'role_list', 'group_list', 'get_default_domain', 'get_default_role', 'tenant_list'), quotas: ('get_default_quota_data', 'get_disabled_quotas', 'tenant_quota_usages')}) def test_add_project_name_already_in_use_error(self): keystone_api_version = api.keystone.VERSIONS.active if keystone_api_version < 3: return project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() default_domain = self._get_default_domain() domain_id = default_domain.id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() # init api.keystone.tenant_list(IgnoreArg(), domain=domain_id, filters={"name": project.name})\ .AndReturn(project) api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(default_domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_default_quota_data(IsA(http.HttpRequest)).AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) self.mox.ReplayAll() workflow_data = self._get_workflow_data(project, quota) url = reverse('horizon:identity:projects:create') res = self.client.post(url, workflow_data) self.assertContains(res, 'already in use') class UpdateProjectWorkflowTests(test.BaseAdminViewTests): def _get_quota_info(self, quota): cinder_quota = self.cinder_quotas.first() neutron_quota = self.neutron_quotas.first() quota_data = {} for field in quotas.NOVA_QUOTA_FIELDS: quota_data[field] = int(quota.get(field).limit) for field in quotas.CINDER_QUOTA_FIELDS: quota_data[field] = int(cinder_quota.get(field).limit) for field in quotas.NEUTRON_QUOTA_FIELDS: quota_data[field] = int(neutron_quota.get(field).limit) return quota_data def _get_all_users(self, domain_id): if not domain_id: users = self.users.list() else: users = [user for user in self.users.list() if user.domain_id == domain_id] return users def _get_all_groups(self, domain_id): if not domain_id: groups = self.groups.list() else: groups = [group for group in self.groups.list() if group.domain_id == domain_id] return groups def _get_proj_users(self, project_id): return [user for user in self.users.list() if user.project_id == project_id] def _get_proj_groups(self, project_id): return [group for group in self.groups.list() if group.project_id == project_id] def _get_proj_role_assignment(self, project_id): project_scope = {'project': {'id': project_id}} return self.role_assignments.filter(scope=project_scope) def _check_role_list(self, keystone_api_version, role_assignments, groups, proj_users, roles, workflow_data): if keystone_api_version >= 3: # admin role with attempt to remove current admin, results in # warning message workflow_data[USER_ROLE_PREFIX + "1"] = ['3'] # member role workflow_data[USER_ROLE_PREFIX + "2"] = ['1', '3'] # admin role workflow_data[GROUP_ROLE_PREFIX + "1"] = ['2', '3'] # member role workflow_data[GROUP_ROLE_PREFIX + "2"] = ['1', '2', '3'] api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) # Give user 1 role 2 api.keystone.add_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user='1', role='2',) # remove role 2 from user 2 api.keystone.remove_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user='2', role='2') # Give user 3 role 1 api.keystone.add_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user='3', role='1',) api.keystone.group_list(IsA(http.HttpRequest), domain=self.domain.id, project=self.tenant.id) \ .AndReturn(groups) api.keystone.roles_for_group(IsA(http.HttpRequest), group='1', project=self.tenant.id) \ .AndReturn(roles) api.keystone.remove_group_role(IsA(http.HttpRequest), project=self.tenant.id, group='1', role='1') api.keystone.roles_for_group(IsA(http.HttpRequest), group='2', project=self.tenant.id) \ .AndReturn(roles) api.keystone.roles_for_group(IsA(http.HttpRequest), group='3', project=self.tenant.id) \ .AndReturn(roles) else: api.keystone.user_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(proj_users) # admin user - try to remove all roles on current project, warning api.keystone.roles_for_user(IsA(http.HttpRequest), '1', self.tenant.id).AndReturn(roles) # member user 1 - has role 1, will remove it api.keystone.roles_for_user(IsA(http.HttpRequest), '2', self.tenant.id).AndReturn((roles[1],)) # member user 3 - has role 2 api.keystone.roles_for_user(IsA(http.HttpRequest), '3', self.tenant.id).AndReturn((roles[0],)) # add role 2 api.keystone.add_tenant_user_role(IsA(http.HttpRequest), project=self.tenant.id, user='3', role='2')\ .AndRaise(self.exceptions.keystone) @test.create_stubs({api.keystone: ('get_default_role', 'roles_for_user', 'tenant_get', 'domain_get', 'user_list', 'roles_for_group', 'group_list', 'role_list', 'role_assignments_list'), quotas: ('get_tenant_quota_data', 'get_disabled_quotas')}) def test_update_project_get(self): keystone_api_version = api.keystone.VERSIONS.active project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() domain_id = project.domain_id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() proj_users = self._get_proj_users(project.id) role_assignments = self._get_proj_role_assignment(project.id) api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndReturn(project) api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \ .AndReturn(self.domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_tenant_quota_data(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) if keystone_api_version >= 3: api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) else: api.keystone.user_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(proj_users) for user in proj_users: api.keystone.roles_for_user(IsA(http.HttpRequest), user.id, self.tenant.id).AndReturn(roles) api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) self.mox.ReplayAll() url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) res = self.client.get(url) self.assertTemplateUsed(res, views.WorkflowView.template_name) workflow = res.context['workflow'] self.assertEqual(res.context['workflow'].name, workflows.UpdateProject.name) step = workflow.get_step("update_info") self.assertEqual(step.action.initial['ram'], quota.get('ram').limit) self.assertEqual(step.action.initial['injected_files'], quota.get('injected_files').limit) self.assertEqual(step.action.initial['name'], project.name) self.assertEqual(step.action.initial['description'], project.description) self.assertQuerysetEqual( workflow.steps, ['<UpdateProjectInfo: update_info>', '<UpdateProjectMembers: update_members>', '<UpdateProjectGroups: update_group_members>', '<UpdateProjectQuota: update_quotas>']) @test.create_stubs({api.keystone: ('tenant_get', 'domain_get', 'tenant_update', 'get_default_role', 'roles_for_user', 'remove_tenant_user_role', 'add_tenant_user_role', 'user_list', 'roles_for_group', 'remove_group_role', 'add_group_role', 'group_list', 'role_list', 'role_assignments_list'), api.nova: ('tenant_quota_update',), api.cinder: ('tenant_quota_update',), quotas: ('get_tenant_quota_data', 'get_disabled_quotas', 'tenant_quota_usages')}) def test_update_project_save(self, neutron=False): keystone_api_version = api.keystone.VERSIONS.active project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() domain_id = project.domain_id users = self._get_all_users(domain_id) proj_users = self._get_proj_users(project.id) groups = self._get_all_groups(domain_id) roles = self.roles.list() role_assignments = self._get_proj_role_assignment(project.id) quota_usages = self.quota_usages.first() # get/init api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndReturn(project) api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \ .AndReturn(self.domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) if neutron: quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_tenant_quota_data(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) workflow_data = {} if keystone_api_version >= 3: api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) else: api.keystone.user_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(proj_users) for user in proj_users: api.keystone.roles_for_user(IsA(http.HttpRequest), user.id, self.tenant.id).AndReturn(roles) api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) workflow_data[USER_ROLE_PREFIX + "1"] = ['3'] # admin role workflow_data[USER_ROLE_PREFIX + "2"] = ['2'] # member role # Group assignment form data workflow_data[GROUP_ROLE_PREFIX + "1"] = ['3'] # admin role workflow_data[GROUP_ROLE_PREFIX + "2"] = ['2'] # member role # update some fields project._info["domain_id"] = domain_id project._info["name"] = "updated name" project._info["description"] = "updated description" quota.metadata_items = 444 quota.volumes = 444 updated_project = {"name": project._info["name"], "description": project._info["description"], "enabled": project.enabled} updated_quota = self._get_quota_info(quota) # handle api.keystone.tenant_update(IsA(http.HttpRequest), project.id, **updated_project) \ .AndReturn(project) self._check_role_list(keystone_api_version, role_assignments, groups, proj_users, roles, workflow_data) quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \ .AndReturn(quota_usages) nova_updated_quota = dict([(key, updated_quota[key]) for key in quotas.NOVA_QUOTA_FIELDS]) api.nova.tenant_quota_update(IsA(http.HttpRequest), project.id, **nova_updated_quota) cinder_updated_quota = dict([(key, updated_quota[key]) for key in quotas.CINDER_QUOTA_FIELDS]) api.cinder.tenant_quota_update(IsA(http.HttpRequest), project.id, **cinder_updated_quota) self.mox.ReplayAll() # submit form data project_data = {"domain_id": project._info["domain_id"], "name": project._info["name"], "id": project.id, "description": project._info["description"], "enabled": project.enabled} workflow_data.update(project_data) workflow_data.update(updated_quota) url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertMessageCount(error=0, warning=1) self.assertRedirectsNoFollow(res, INDEX_URL) @test.create_stubs({api.neutron: ('is_extension_supported', 'tenant_quota_get', 'tenant_quota_update')}) @test.update_settings(OPENSTACK_NEUTRON_NETWORK={'enable_quotas': True}) def test_update_project_save_with_neutron(self): quota_data = self.neutron_quotas.first() neutron_updated_quota = dict([(key, quota_data.get(key).limit) for key in quotas.NEUTRON_QUOTA_FIELDS]) api.neutron.is_extension_supported(IsA(http.HttpRequest), 'quotas') \ .MultipleTimes().AndReturn(True) api.neutron.tenant_quota_get(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota_data) api.neutron.tenant_quota_update(IsA(http.HttpRequest), self.tenant.id, **neutron_updated_quota) self.test_update_project_save(neutron=True) @test.create_stubs({api.keystone: ('tenant_get',)}) def test_update_project_get_error(self): api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndRaise(self.exceptions.nova) self.mox.ReplayAll() url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) res = self.client.get(url) self.assertRedirectsNoFollow(res, INDEX_URL) @test.create_stubs({api.keystone: ('tenant_get', 'domain_get', 'tenant_update', 'get_default_role', 'roles_for_user', 'remove_tenant_user', 'add_tenant_user_role', 'user_list', 'roles_for_group', 'remove_group_role', 'add_group_role', 'group_list', 'role_list', 'role_assignments_list'), quotas: ('get_tenant_quota_data', 'get_disabled_quotas', 'tenant_quota_usages',), api.nova: ('tenant_quota_update',)}) def test_update_project_tenant_update_error(self): keystone_api_version = api.keystone.VERSIONS.active project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() domain_id = project.domain_id users = self._get_all_users(domain_id) groups = self._get_all_groups(domain_id) roles = self.roles.list() proj_users = self._get_proj_users(project.id) role_assignments = self.role_assignments.list() quota_usages = self.quota_usages.first() # get/init api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndReturn(project) api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \ .AndReturn(self.domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_tenant_quota_data(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) workflow_data = {} if keystone_api_version >= 3: api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) else: api.keystone.user_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(proj_users) for user in proj_users: api.keystone.roles_for_user(IsA(http.HttpRequest), user.id, self.tenant.id).AndReturn(roles) role_ids = [role.id for role in roles] for user in proj_users: if role_ids: workflow_data.setdefault(USER_ROLE_PREFIX + role_ids[0], []) \ .append(user.id) api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) role_ids = [role.id for role in roles] for group in groups: if role_ids: workflow_data.setdefault(GROUP_ROLE_PREFIX + role_ids[0], []) \ .append(group.id) # update some fields project._info["domain_id"] = domain_id project._info["name"] = "updated name" project._info["description"] = "updated description" quota.metadata_items = 444 quota.volumes = 444 updated_project = {"name": project._info["name"], "description": project._info["description"], "enabled": project.enabled} updated_quota = self._get_quota_info(quota) # handle quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \ .AndReturn(quota_usages) api.keystone.tenant_update(IsA(http.HttpRequest), project.id, **updated_project) \ .AndRaise(self.exceptions.keystone) self.mox.ReplayAll() # submit form data project_data = {"domain_id": project._info["domain_id"], "name": project._info["name"], "id": project.id, "description": project._info["description"], "enabled": project.enabled} workflow_data.update(project_data) workflow_data.update(updated_quota) url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, INDEX_URL) @test.create_stubs({api.keystone: ('tenant_get', 'domain_get', 'tenant_update', 'get_default_role', 'roles_for_user', 'remove_tenant_user_role', 'add_tenant_user_role', 'user_list', 'roles_for_group', 'remove_group_role', 'add_group_role', 'group_list', 'role_list', 'role_assignments_list'), quotas: ('get_tenant_quota_data', 'get_disabled_quotas', 'tenant_quota_usages',), api.nova: ('tenant_quota_update',)}) def test_update_project_quota_update_error(self): keystone_api_version = api.keystone.VERSIONS.active project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() domain_id = project.domain_id users = self._get_all_users(domain_id) proj_users = self._get_proj_users(project.id) groups = self._get_all_groups(domain_id) roles = self.roles.list() role_assignments = self._get_proj_role_assignment(project.id) quota_usages = self.quota_usages.first() # get/init api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndReturn(project) api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \ .AndReturn(self.domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_tenant_quota_data(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) workflow_data = {} if keystone_api_version >= 3: api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) else: api.keystone.user_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(proj_users) for user in proj_users: api.keystone.roles_for_user(IsA(http.HttpRequest), user.id, self.tenant.id).AndReturn(roles) api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) workflow_data[USER_ROLE_PREFIX + "1"] = ['1', '3'] # admin role workflow_data[USER_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role # Group role assignment data workflow_data[GROUP_ROLE_PREFIX + "1"] = ['1', '3'] # admin role workflow_data[GROUP_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role # update some fields project._info["domain_id"] = domain_id project._info["name"] = "updated name" project._info["description"] = "updated description" quota[0].limit = 444 quota[1].limit = -1 updated_project = {"name": project._info["name"], "description": project._info["description"], "enabled": project.enabled} updated_quota = self._get_quota_info(quota) # handle api.keystone.tenant_update(IsA(http.HttpRequest), project.id, **updated_project) \ .AndReturn(project) self._check_role_list(keystone_api_version, role_assignments, groups, proj_users, roles, workflow_data) quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \ .AndReturn(quota_usages) nova_updated_quota = dict([(key, updated_quota[key]) for key in quotas.NOVA_QUOTA_FIELDS]) api.nova.tenant_quota_update(IsA(http.HttpRequest), project.id, **nova_updated_quota) \ .AndRaise(self.exceptions.nova) self.mox.ReplayAll() # submit form data project_data = {"domain_id": project._info["domain_id"], "name": project._info["name"], "id": project.id, "description": project._info["description"], "enabled": project.enabled} workflow_data.update(project_data) workflow_data.update(updated_quota) url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertMessageCount(error=2, warning=1) self.assertRedirectsNoFollow(res, INDEX_URL) @test.create_stubs({api.keystone: ('tenant_get', 'domain_get', 'tenant_update', 'get_default_role', 'roles_for_user', 'remove_tenant_user_role', 'add_tenant_user_role', 'user_list', 'roles_for_group', 'remove_group_role', 'add_group_role', 'group_list', 'role_list', 'role_assignments_list'), quotas: ('get_tenant_quota_data', 'get_disabled_quotas', 'tenant_quota_usages')}) def test_update_project_member_update_error(self): keystone_api_version = api.keystone.VERSIONS.active project = self.tenants.first() quota = self.quotas.first() default_role = self.roles.first() domain_id = project.domain_id users = self._get_all_users(domain_id) proj_users = self._get_proj_users(project.id) groups = self._get_all_groups(domain_id) roles = self.roles.list() role_assignments = self._get_proj_role_assignment(project.id) quota_usages = self.quota_usages.first() # get/init api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndReturn(project) api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \ .AndReturn(self.domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_tenant_quota_data(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(default_role) api.keystone.user_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(roles) api.keystone.group_list(IsA(http.HttpRequest), domain=domain_id) \ .AndReturn(groups) workflow_data = {} if keystone_api_version >= 3: api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) else: api.keystone.user_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(proj_users) for user in proj_users: api.keystone.roles_for_user(IsA(http.HttpRequest), user.id, self.tenant.id).AndReturn(roles) api.keystone.role_assignments_list(IsA(http.HttpRequest), project=self.tenant.id) \ .AndReturn(role_assignments) workflow_data[USER_ROLE_PREFIX + "1"] = ['1', '3'] # admin role workflow_data[USER_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role workflow_data[GROUP_ROLE_PREFIX + "1"] = ['1', '3'] # admin role workflow_data[GROUP_ROLE_PREFIX + "2"] = ['1', '2', '3'] # member role # update some fields project._info["domain_id"] = domain_id project._info["name"] = "updated name" project._info["description"] = "updated description" quota.metadata_items = 444 quota.volumes = 444 updated_project = {"name": project._info["name"], "description": project._info["description"], "enabled": project.enabled} updated_quota = self._get_quota_info(quota) # handle quotas.tenant_quota_usages(IsA(http.HttpRequest), tenant_id=project.id) \ .AndReturn(quota_usages) api.keystone.tenant_update(IsA(http.HttpRequest), project.id, **updated_project) \ .AndReturn(project) self._check_role_list(keystone_api_version, role_assignments, groups, proj_users, roles, workflow_data) self.mox.ReplayAll() # submit form data project_data = {"domain_id": project._info["domain_id"], "name": project._info["name"], "id": project.id, "description": project._info["description"], "enabled": project.enabled} workflow_data.update(project_data) workflow_data.update(updated_quota) url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) res = self.client.post(url, workflow_data) self.assertNoFormErrors(res) self.assertMessageCount(error=2, warning=1) self.assertRedirectsNoFollow(res, INDEX_URL) # django 1.7 and later does not handle the thrown keystoneclient # exception well enough. # TODO(mrunge): re-check when django-1.8 is stable @unittest.skipIf(django.VERSION >= (1, 7, 0), 'Currently skipped with Django >= 1.7') @test.create_stubs({api.keystone: ('get_default_role', 'tenant_get', 'domain_get'), quotas: ('get_tenant_quota_data', 'get_disabled_quotas')}) def test_update_project_when_default_role_does_not_exist(self): project = self.tenants.first() domain_id = project.domain_id quota = self.quotas.first() api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(None) # Default role doesn't exist api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id, admin=True) \ .AndReturn(project) api.keystone.domain_get(IsA(http.HttpRequest), domain_id) \ .AndReturn(self.domain) quotas.get_disabled_quotas(IsA(http.HttpRequest)) \ .AndReturn(self.disabled_quotas.first()) quotas.get_tenant_quota_data(IsA(http.HttpRequest), tenant_id=self.tenant.id) \ .AndReturn(quota) self.mox.ReplayAll() url = reverse('horizon:identity:projects:update', args=[self.tenant.id]) try: # Avoid the log message in the test output when the workflow's # step action cannot be instantiated logging.disable(logging.ERROR) with self.assertRaises(exceptions.NotFound): self.client.get(url) finally: logging.disable(logging.NOTSET) class UsageViewTests(test.BaseAdminViewTests): def _stub_nova_api_calls(self, nova_stu_enabled=True): self.mox.StubOutWithMock(api.nova, 'usage_get') self.mox.StubOutWithMock(api.nova, 'tenant_absolute_limits') self.mox.StubOutWithMock(api.nova, 'extension_supported') self.mox.StubOutWithMock(api.cinder, 'tenant_absolute_limits') api.nova.extension_supported( 'SimpleTenantUsage', IsA(http.HttpRequest)) \ .AndReturn(nova_stu_enabled) def _stub_neutron_api_calls(self, neutron_sg_enabled=True): self.mox.StubOutWithMock(api.neutron, 'is_extension_supported') self.mox.StubOutWithMock(api.network, 'floating_ip_supported') self.mox.StubOutWithMock(api.network, 'tenant_floating_ip_list') if neutron_sg_enabled: self.mox.StubOutWithMock(api.network, 'security_group_list') api.neutron.is_extension_supported( IsA(http.HttpRequest), 'security-group').AndReturn(neutron_sg_enabled) api.network.floating_ip_supported(IsA(http.HttpRequest)) \ .AndReturn(True) api.network.tenant_floating_ip_list(IsA(http.HttpRequest)) \ .AndReturn(self.floating_ips.list()) if neutron_sg_enabled: api.network.security_group_list(IsA(http.HttpRequest)) \ .AndReturn(self.q_secgroups.list()) def test_usage_csv(self): self._test_usage_csv(nova_stu_enabled=True) def test_usage_csv_disabled(self): self._test_usage_csv(nova_stu_enabled=False) def _test_usage_csv(self, nova_stu_enabled=True): now = timezone.now() usage_obj = api.nova.NovaUsage(self.usages.first()) self._stub_nova_api_calls(nova_stu_enabled) api.nova.extension_supported( 'SimpleTenantUsage', IsA(http.HttpRequest)) \ .AndReturn(nova_stu_enabled) start = datetime.datetime(now.year, now.month, 1, 0, 0, 0, 0) end = datetime.datetime(now.year, now.month, now.day, 23, 59, 59, 0) if nova_stu_enabled: api.nova.usage_get(IsA(http.HttpRequest), self.tenant.id, start, end).AndReturn(usage_obj) api.nova.tenant_absolute_limits(IsA(http.HttpRequest))\ .AndReturn(self.limits['absolute']) api.cinder.tenant_absolute_limits(IsA(http.HttpRequest)) \ .AndReturn(self.cinder_limits['absolute']) self._stub_neutron_api_calls() self.mox.ReplayAll() project_id = self.tenants.first().id csv_url = reverse('horizon:identity:projects:usage', args=[project_id]) + "?format=csv" res = self.client.get(csv_url) self.assertTemplateUsed(res, 'project/overview/usage.csv') self.assertTrue(isinstance(res.context['usage'], usage.ProjectUsage)) hdr = ('Instance Name,VCPUs,RAM (MB),Disk (GB),Usage (Hours),' 'Time since created (Seconds),State') self.assertContains(res, '%s\r\n' % hdr) class DetailProjectViewTests(test.BaseAdminViewTests): @test.create_stubs({api.keystone: ('tenant_get',)}) def test_detail_view(self): project = self.tenants.first() api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id) \ .AndReturn(project) self.mox.ReplayAll() res = self.client.get(PROJECT_DETAIL_URL, args=[project.id]) self.assertTemplateUsed(res, 'identity/projects/detail.html') self.assertEqual(res.context['project'].name, project.name) self.assertEqual(res.context['project'].id, project.id) self.assertContains(res, "Project Details: %s" % project.name, 1, 200) @test.create_stubs({api.keystone: ('tenant_get',)}) def test_detail_view_with_exception(self): project = self.tenants.first() api.keystone.tenant_get(IsA(http.HttpRequest), self.tenant.id) \ .AndRaise(self.exceptions.keystone) self.mox.ReplayAll() res = self.client.get(PROJECT_DETAIL_URL, args=[project.id]) self.assertRedirectsNoFollow(res, INDEX_URL) @unittest.skipUnless(os.environ.get('WITH_SELENIUM', False), "The WITH_SELENIUM env variable is not set.") class SeleniumTests(test.SeleniumAdminTestCase): @test.create_stubs( {api.keystone: ('tenant_list', 'tenant_get', 'tenant_update')}) def test_inline_editing_update(self): # Tenant List api.keystone.tenant_list(IgnoreArg(), domain=None, marker=None, paginate=True) \ .AndReturn([self.tenants.list(), False]) # Edit mod api.keystone.tenant_get(IgnoreArg(), u'1', admin=True) \ .AndReturn(self.tenants.list()[0]) # Update - requires get and update api.keystone.tenant_get(IgnoreArg(), u'1', admin=True) \ .AndReturn(self.tenants.list()[0]) api.keystone.tenant_update( IgnoreArg(), u'1', description='a test tenant.', enabled=True, name=u'Changed test_tenant') # Refreshing cell with changed name changed_tenant = copy.copy(self.tenants.list()[0]) changed_tenant.name = u'Changed test_tenant' api.keystone.tenant_get(IgnoreArg(), u'1', admin=True) \ .AndReturn(changed_tenant) self.mox.ReplayAll() self.selenium.get("%s%s" % (self.live_server_url, INDEX_URL)) # Check the presence of the important elements td_element = self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']") cell_wrapper = td_element.find_element_by_class_name( 'table_cell_wrapper') edit_button_wrapper = td_element.find_element_by_class_name( 'table_cell_action') edit_button = edit_button_wrapper.find_element_by_tag_name('button') # Hovering over td and clicking on edit button action_chains = ActionChains(self.selenium) action_chains.move_to_element(cell_wrapper).click(edit_button) action_chains.perform() # Waiting for the AJAX response for switching to editing mod wait = self.ui.WebDriverWait(self.selenium, 10, ignored_exceptions=[socket_timeout]) wait.until(lambda x: self.selenium.find_element_by_name("name__1")) # Changing project name in cell form td_element = self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']") name_input = td_element.find_element_by_tag_name('input') name_input.send_keys(keys.Keys.HOME) name_input.send_keys("Changed ") # Saving new project name by AJAX td_element.find_element_by_class_name('inline-edit-submit').click() # Waiting for the AJAX response of cell refresh wait = self.ui.WebDriverWait(self.selenium, 10, ignored_exceptions=[socket_timeout]) wait.until(lambda x: self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']" "/div[@class='table_cell_wrapper']" "/div[@class='table_cell_data_wrapper']")) # Checking new project name after cell refresh data_wrapper = self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']" "/div[@class='table_cell_wrapper']" "/div[@class='table_cell_data_wrapper']") self.assertTrue(data_wrapper.text == u'Changed test_tenant', "Error: saved tenant name is expected to be " "'Changed test_tenant'") @test.create_stubs( {api.keystone: ('tenant_list', 'tenant_get')}) def test_inline_editing_cancel(self): # Tenant List api.keystone.tenant_list(IgnoreArg(), domain=None, marker=None, paginate=True) \ .AndReturn([self.tenants.list(), False]) # Edit mod api.keystone.tenant_get(IgnoreArg(), u'1', admin=True) \ .AndReturn(self.tenants.list()[0]) # Cancel edit mod is without the request self.mox.ReplayAll() self.selenium.get("%s%s" % (self.live_server_url, INDEX_URL)) # Check the presence of the important elements td_element = self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']") cell_wrapper = td_element.find_element_by_class_name( 'table_cell_wrapper') edit_button_wrapper = td_element.find_element_by_class_name( 'table_cell_action') edit_button = edit_button_wrapper.find_element_by_tag_name('button') # Hovering over td and clicking on edit action_chains = ActionChains(self.selenium) action_chains.move_to_element(cell_wrapper).click(edit_button) action_chains.perform() # Waiting for the AJAX response for switching to editing mod wait = self.ui.WebDriverWait(self.selenium, 10, ignored_exceptions=[socket_timeout]) wait.until(lambda x: self.selenium.find_element_by_name("name__1")) # Click on cancel button td_element = self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']") td_element.find_element_by_class_name('inline-edit-cancel').click() # Cancel is via javascript, so it should be immediate # Checking that tenant name is not changed data_wrapper = self.selenium.find_element_by_xpath( "//td[@data-update-url='/identity/?action=cell_update" "&table=tenants&cell_name=name&obj_id=1']" "/div[@class='table_cell_wrapper']" "/div[@class='table_cell_data_wrapper']") self.assertTrue(data_wrapper.text == u'test_tenant', "Error: saved tenant name is expected to be " "'test_tenant'") @test.create_stubs({api.keystone: ('get_default_domain', 'get_default_role', 'user_list', 'group_list', 'role_list'), api.base: ('is_service_enabled',), quotas: ('get_default_quota_data',)}) def test_membership_list_loads_correctly(self): member_css_class = ".available_members" users = self.users.list() api.base.is_service_enabled(IsA(http.HttpRequest), 'network') \ .MultipleTimes().AndReturn(False) api.base.is_service_enabled(IsA(http.HttpRequest), 'volume') \ .MultipleTimes().AndReturn(False) api.keystone.get_default_domain(IsA(http.HttpRequest)) \ .AndReturn(self.domain) quotas.get_default_quota_data(IsA(http.HttpRequest)) \ .AndReturn(self.quotas.first()) api.keystone.get_default_role(IsA(http.HttpRequest)) \ .MultipleTimes().AndReturn(self.roles.first()) api.keystone.user_list(IsA(http.HttpRequest), domain=self.domain.id) \ .AndReturn(users) api.keystone.role_list(IsA(http.HttpRequest)) \ .AndReturn(self.roles.list()) api.keystone.group_list(IsA(http.HttpRequest), domain=self.domain.id) \ .AndReturn(self.groups.list()) api.keystone.role_list(IsA(http.HttpRequest)) \ .AndReturn(self.roles.list()) self.mox.ReplayAll() self.selenium.get("%s%s" % (self.live_server_url, reverse('horizon:identity:projects:create'))) members = self.selenium.find_element_by_css_selector(member_css_class) for user in users: self.assertIn(user.name, members.text)
newrocknj/horizon
openstack_dashboard/dashboards/identity/projects/tests.py
Python
apache-2.0
82,571
# -*- coding: utf-8 -*- # # scikit-learn documentation build configuration file, created by # sphinx-quickstart on Fri Jan 8 09:13:42 2010. # # This file is execfile()d with the current directory set to its containing # dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from __future__ import print_function import sys import os from sklearn.externals.six import u # If extensions (or modules to document with autodoc) are in another # directory, add these directories to sys.path here. If the directory # is relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. sys.path.insert(0, os.path.abspath('sphinxext')) from github_link import make_linkcode_resolve import sphinx_gallery # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.autosummary', 'numpydoc', 'sphinx.ext.linkcode', 'sphinx.ext.doctest', 'sphinx_gallery.gen_gallery', 'sphinx_issues', ] # this is needed for some reason... # see https://github.com/numpy/numpydoc/issues/69 numpydoc_class_members_toctree = False # pngmath / imgmath compatibility layer for different sphinx versions import sphinx from distutils.version import LooseVersion if LooseVersion(sphinx.__version__) < LooseVersion('1.4'): extensions.append('sphinx.ext.pngmath') else: extensions.append('sphinx.ext.imgmath') autodoc_default_flags = ['members', 'inherited-members'] # Add any paths that contain templates here, relative to this directory. templates_path = ['templates'] # generate autosummary even if no references autosummary_generate = True # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # Generate the plots for the gallery plot_gallery = True # The master toctree document. master_doc = 'index' # General information about the project. project = u('scikit-learn') copyright = u('2007 - 2017, scikit-learn developers (BSD License)') # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. import sklearn version = sklearn.__version__ # The full version, including alpha/beta/rc tags. release = sklearn.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be # searched for source files. exclude_trees = ['_build', 'templates', 'includes'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = False # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'scikit-learn' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = {'oldversion': False, 'collapsiblesidebar': True, 'google_analytics': True, 'surveybanner': False, 'sprintbanner': True} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['themes'] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. html_short_title = 'scikit-learn' # The name of an image file (relative to this directory) to place at the top # of the sidebar. html_logo = 'logos/scikit-learn-logo-small.png' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = 'logos/favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['images'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. html_domain_indices = False # If false, no index is generated. html_use_index = False # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'scikit-learndoc' # -- Options for LaTeX output ------------------------------------------------ # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [('index', 'user_guide.tex', u('scikit-learn user guide'), u('scikit-learn developers'), 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. latex_logo = "logos/scikit-learn-logo.png" # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. latex_preamble = r""" \usepackage{amsmath}\usepackage{amsfonts}\usepackage{bm}\usepackage{morefloats} \usepackage{enumitem} \setlistdepth{10} """ # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. latex_domain_indices = False trim_doctests_flags = True sphinx_gallery_conf = { 'doc_module': 'sklearn', 'backreferences_dir': os.path.join('modules', 'generated'), 'reference_url': { 'sklearn': None, 'matplotlib': 'http://matplotlib.org', 'numpy': 'http://docs.scipy.org/doc/numpy-1.8.1', 'scipy': 'http://docs.scipy.org/doc/scipy-0.13.3/reference'} } # The following dictionary contains the information used to create the # thumbnails for the front page of the scikit-learn home page. # key: first image in set # values: (number of plot in set, height of thumbnail) carousel_thumbs = {'sphx_glr_plot_classifier_comparison_001.png': 600, 'sphx_glr_plot_outlier_detection_003.png': 372, 'sphx_glr_plot_gpr_co2_001.png': 350, 'sphx_glr_plot_adaboost_twoclass_001.png': 372, 'sphx_glr_plot_compare_methods_001.png': 349} def make_carousel_thumbs(app, exception): """produces the final resized carousel images""" if exception is not None: return print('Preparing carousel images') image_dir = os.path.join(app.builder.outdir, '_images') for glr_plot, max_width in carousel_thumbs.items(): image = os.path.join(image_dir, glr_plot) if os.path.exists(image): c_thumb = os.path.join(image_dir, glr_plot[:-4] + '_carousel.png') sphinx_gallery.gen_rst.scale_image(image, c_thumb, max_width, 190) # Config for sphinx_issues issues_uri = 'https://github.com/scikit-learn/scikit-learn/issues/{issue}' issues_github_path = 'scikit-learn/scikit-learn' issues_user_uri = 'https://github.com/{user}' def setup(app): # to hide/show the prompt in code examples: app.add_javascript('js/copybutton.js') app.connect('build-finished', make_carousel_thumbs) # The following is used by sphinx.ext.linkcode to provide links to github linkcode_resolve = make_linkcode_resolve('sklearn', u'https://github.com/scikit-learn/' 'scikit-learn/blob/{revision}/' '{package}/{path}#L{lineno}')
nhejazi/scikit-learn
doc/conf.py
Python
bsd-3-clause
9,924
# Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utils functions used in Task Python API.""" from tensorflow_lite_support.cc.task.core.proto import base_options_pb2 from tensorflow_lite_support.python.task.core import task_options from tensorflow_lite_support.python.task.core.proto import configuration_pb2 _ProtoBaseOptions = base_options_pb2.BaseOptions def ConvertToProtoBaseOptions( options: task_options.BaseOptions) -> _ProtoBaseOptions: """Convert the Python BaseOptions to Proto BaseOptions. Python BaseOptions is a subset of the Proto BaseOptions that strips off configurations that are useless in Python development. Args: options: the Python BaseOptions object. Returns: The Proto BaseOptions object. """ proto_options = _ProtoBaseOptions() if options.model_file.file_content: proto_options.model_file.file_content = options.model_file.file_content elif options.model_file.file_name: proto_options.model_file.file_name = options.model_file.file_name proto_options.compute_settings.tflite_settings.cpu_settings.num_threads = ( options.num_threads) if options.use_coral: proto_options.compute_settings.tflite_settings.delegate = ( configuration_pb2.Delegate.EDGETPU_CORAL) return proto_options
chromium/chromium
third_party/tflite_support/src/tensorflow_lite_support/python/task/core/task_utils.py
Python
bsd-3-clause
1,839
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import base64 import hashlib import os import string import win32api import win32file import win32com.client from win32com.shell import shell, shellcon import win32security def _GetFileVersion(file_path): """Returns the file version of the given file.""" return win32com.client.Dispatch( 'Scripting.FileSystemObject').GetFileVersion(file_path) def _GetFileBitness(file_path): """Returns the bitness of the given file.""" if win32file.GetBinaryType(file_path) == win32file.SCS_32BIT_BINARY: return '32' return '64' def _GetProductName(file_path): """Returns the product name of the given file. Args: file_path: The absolute or relative path to the file. Returns: A string representing the product name of the file, or None if the product name was not found. """ language_and_codepage_pairs = win32api.GetFileVersionInfo( file_path, '\\VarFileInfo\\Translation') if not language_and_codepage_pairs: return None product_name_entry = ('\\StringFileInfo\\%04x%04x\\ProductName' % language_and_codepage_pairs[0]) return win32api.GetFileVersionInfo(file_path, product_name_entry) def _GetUserSpecificRegistrySuffix(): """Returns '.' + the unpadded Base32 encoding of the MD5 of the user's SID. The result must match the output from the method UserSpecificRegistrySuffix::GetSuffix() in chrome/installer/util/shell_util.cc. It will always be 27 characters long. """ token_handle = win32security.OpenProcessToken(win32api.GetCurrentProcess(), win32security.TOKEN_QUERY) user_sid, _ = win32security.GetTokenInformation(token_handle, win32security.TokenUser) user_sid_string = win32security.ConvertSidToStringSid(user_sid) md5_digest = hashlib.md5(user_sid_string).digest() return '.' + base64.b32encode(md5_digest).rstrip('=') class VariableExpander: """Expands variables in strings.""" def __init__(self, mini_installer_path, previous_version_mini_installer_path, chromedriver_path, quiet, output_dir): """Constructor. The constructor initializes a variable dictionary that maps variables to their values. These are the only acceptable variables: * $BRAND: the browser brand (e.g., "Google Chrome" or "Chromium"). * $CHROME_DIR: the directory of Chrome (or Chromium) from the base installation directory. * $CHROME_HTML_PROG_ID: 'ChromeHTML' (or 'ChromiumHTM'). * $CHROME_LONG_NAME: 'Google Chrome' (or 'Chromium'). * $CHROME_LONG_NAME_BETA: 'Google Chrome Beta' if $BRAND is 'Google * Chrome'. * $CHROME_LONG_NAME_DEV: 'Google Chrome Dev' if $BRAND is 'Google * Chrome'. * $CHROME_LONG_NAME_SXS: 'Google Chrome SxS' if $BRAND is 'Google * Chrome'. * $CHROME_SHORT_NAME: 'Chrome' (or 'Chromium'). * $CHROME_SHORT_NAME_BETA: 'ChromeBeta' if $BRAND is 'Google Chrome'. * $CHROME_SHORT_NAME_DEV: 'ChromeDev' if $BRAND is 'Google Chrome'. * $CHROME_SHORT_NAME_SXS: 'ChromeCanary' if $BRAND is 'Google Chrome'. * $CHROME_UPDATE_REGISTRY_SUBKEY: the registry key, excluding the root key, of Chrome for Google Update. * $CHROME_UPDATE_REGISTRY_SUBKEY_DEV: the registry key, excluding the root key, of Chrome Dev for Google Update. * $CHROME_UPDATE_REGISTRY_SUBKEY_BETA: the registry key, excluding the root key, of Chrome Beta for Google Update. * $CHROME_UPDATE_REGISTRY_SUBKEY_SXS: the registry key, excluding the root key, of Chrome SxS for Google Update. * $CHROMEDRIVER_PATH: Path to chromedriver. * $QUIET: Supress output. * $OUTPUT_DIR: "--output-dir=DIR" or an empty string. * $LAUNCHER_UPDATE_REGISTRY_SUBKEY: the registry key, excluding the root key, of the app launcher for Google Update if $BRAND is 'Google * Chrome'. * $LOCAL_APPDATA: the unquoted path to the Local Application Data folder. * $LOG_FILE: "--log-file=FILE" or an empty string. * $MINI_INSTALLER: the unquoted path to the mini_installer. * $MINI_INSTALLER_BITNESS: the bitness of the mini_installer. * $MINI_INSTALLER_FILE_VERSION: the file version of $MINI_INSTALLER. * $PREVIOUS_VERSION_MINI_INSTALLER: the unquoted path to a mini_installer whose version is lower than $MINI_INSTALLER. * $PREVIOUS_VERSION_MINI_INSTALLER_FILE_VERSION: the file version of $PREVIOUS_VERSION_MINI_INSTALLER. * $PROGRAM_FILES: the unquoted path to the Program Files folder. * $USER_SPECIFIC_REGISTRY_SUFFIX: the output from the function _GetUserSpecificRegistrySuffix(). * $VERSION_[XP/SERVER_2003/VISTA/WIN7/WIN8/WIN8_1/WIN10]: a 2-tuple representing the version of the corresponding OS. * $WINDOWS_VERSION: a 2-tuple representing the current Windows version. * $CHROME_TOAST_ACTIVATOR_CLSID: NotificationActivator's CLSID for Chrome. * $CHROME_TOAST_ACTIVATOR_CLSID_BETA: NotificationActivator's CLSID for Chrome Beta. * $CHROME_TOAST_ACTIVATOR_CLSID_DEV: NotificationActivator's CLSID for Chrome Dev. * $CHROME_TOAST_ACTIVATOR_CLSID_SXS: NotificationActivator's CLSID for Chrome SxS. * $CHROME_ELEVATOR_CLSID: Elevator Service CLSID for Chrome. * $CHROME_ELEVATOR_CLSID_BETA: Elevator Service CLSID for Chrome Beta. * $CHROME_ELEVATOR_CLSID_DEV: Elevator Service CLSID for Chrome Dev. * $CHROME_ELEVATOR_CLSID_SXS: Elevator Service CLSID for Chrome SxS. * $CHROME_ELEVATOR_IID: IElevator IID for Chrome. * $CHROME_ELEVATOR_IID_BETA: IElevator IID for Chrome Beta. * $CHROME_ELEVATOR_IID_DEV: IElevator IID for Chrome Dev. * $CHROME_ELEVATOR_IID_SXS: IElevator IID for Chrome SxS. * $CHROME_ELEVATION_SERVICE_NAME: Elevation Service Name for Chrome. * $CHROME_ELEVATION_SERVICE_NAME_BETA: Elevation Service Name for Chrome Beta. * $CHROME_ELEVATION_SERVICE_NAME_DEV: Elevation Service Name for Chrome Dev. * $CHROME_ELEVATION_SERVICE_NAME_SXS: Elevation Service Name for Chrome SxS. * $CHROME_ELEVATION_SERVICE_DISPLAY_NAME: Elevation Service Display Name for Chrome. * $CHROME_ELEVATION_SERVICE_DISPLAY_NAME_BETA: Elevation Service Display Name for Chrome Beta. * $CHROME_ELEVATION_SERVICE_DISPLAY_NAME_DEV: Elevation Service Display Name for Chrome Dev. * $CHROME_ELEVATION_SERVICE_DISPLAY_NAME_SXS: Elevation Service Display Name for Chrome SxS. * $LAST_INSTALLER_BREAKING_VERSION: The last installer version that had breaking changes. Args: mini_installer_path: The path to a mini_installer. previous_version_mini_installer_path: The path to a mini_installer whose version is lower than |mini_installer_path|. """ mini_installer_abspath = os.path.abspath(mini_installer_path) previous_version_mini_installer_abspath = os.path.abspath( previous_version_mini_installer_path) windows_major_ver, windows_minor_ver, _, _, _ = win32api.GetVersionEx() self._variable_mapping = { 'CHROMEDRIVER_PATH': chromedriver_path, 'QUIET': '-q' if quiet else '', 'OUTPUT_DIR': '"--output-dir=%s"' % output_dir if output_dir else '', 'LAST_INSTALLER_BREAKING_VERSION': '85.0.4169.0', 'LOCAL_APPDATA': shell.SHGetFolderPath(0, shellcon.CSIDL_LOCAL_APPDATA, None, 0), 'LOG_FILE': '', 'MINI_INSTALLER': mini_installer_abspath, 'MINI_INSTALLER_FILE_VERSION': _GetFileVersion(mini_installer_abspath), 'MINI_INSTALLER_BITNESS': _GetFileBitness(mini_installer_abspath), 'PREVIOUS_VERSION_MINI_INSTALLER': previous_version_mini_installer_abspath, 'PREVIOUS_VERSION_MINI_INSTALLER_FILE_VERSION': _GetFileVersion(previous_version_mini_installer_abspath), 'PROGRAM_FILES': shell.SHGetFolderPath( 0, shellcon.CSIDL_PROGRAM_FILES if _GetFileBitness(mini_installer_abspath) == '64' else shellcon.CSIDL_PROGRAM_FILESX86, None, 0), 'USER_SPECIFIC_REGISTRY_SUFFIX': _GetUserSpecificRegistrySuffix(), 'VERSION_SERVER_2003': '(5, 2)', 'VERSION_VISTA': '(6, 0)', 'VERSION_WIN10': '(10, 0)', 'VERSION_WIN7': '(6, 1)', 'VERSION_WIN8': '(6, 2)', 'VERSION_WIN8_1': '(6, 3)', 'VERSION_XP': '(5, 1)', 'WINDOWS_VERSION': '(%s, %s)' % (windows_major_ver, windows_minor_ver) } mini_installer_product_name = _GetProductName(mini_installer_abspath) if mini_installer_product_name == 'Google Chrome Installer': self._variable_mapping.update({ 'BRAND': 'Google Chrome', 'BINARIES_UPDATE_REGISTRY_SUBKEY': ('Software\\Google\\Update\\Clients\\' '{4DC8B4CA-1BDA-483e-B5FA-D3C12E15B62D}'), 'CHROME_DIR': 'Google\\Chrome', 'CHROME_HTML_PROG_ID': 'ChromeHTML', 'CHROME_HTML_PROG_ID_BETA': 'ChromeBHTML', 'CHROME_HTML_PROG_ID_DEV': 'ChromeDHTML', 'CHROME_HTML_PROG_ID_SXS': 'ChromeSSHTM', 'CHROME_LONG_NAME': 'Google Chrome', 'CHROME_SHORT_NAME': 'Chrome', 'CHROME_UPDATE_REGISTRY_SUBKEY': ('Software\\Google\\Update\\Clients\\' '{8A69D345-D564-463c-AFF1-A69D9E530F96}'), 'CHROME_CLIENT_STATE_KEY_BETA': ('Software\\Google\\Update\\ClientState\\' '{8237E44A-0054-442C-B6B6-EA0509993955}'), 'CHROME_CLIENT_STATE_KEY_DEV': ('Software\\Google\\Update\\ClientState\\' '{401C381F-E0DE-4B85-8BD8-3F3F14FBDA57}'), 'CHROME_CLIENT_STATE_KEY_SXS': ('Software\\Google\\Update\\ClientState\\' '{4ea16ac7-fd5a-47c3-875b-dbf4a2008c20}'), 'CHROME_CLIENT_STATE_KEY': ('Software\\Google\\Update\\ClientState\\' '{8A69D345-D564-463c-AFF1-A69D9E530F96}'), 'CHROME_TOAST_ACTIVATOR_CLSID': ('{A2C6CB58-C076-425C-ACB7-6D19D64428CD}'), 'CHROME_DIR_BETA': 'Google\\Chrome Beta', 'CHROME_DIR_DEV': 'Google\\Chrome Dev', 'CHROME_DIR_SXS': 'Google\\Chrome SxS', 'CHROME_LONG_NAME_BETA': 'Google Chrome Beta', 'CHROME_LONG_NAME_DEV': 'Google Chrome Dev', 'CHROME_LONG_NAME_SXS': 'Google Chrome SxS', 'CHROME_SHORT_NAME_BETA': 'ChromeBeta', 'CHROME_SHORT_NAME_DEV': 'ChromeDev', 'CHROME_SHORT_NAME_SXS': 'ChromeCanary', 'CHROME_UPDATE_REGISTRY_SUBKEY_BETA': ('Software\\Google\\Update\\Clients\\' '{8237E44A-0054-442C-B6B6-EA0509993955}'), 'CHROME_UPDATE_REGISTRY_SUBKEY_DEV': ('Software\\Google\\Update\\Clients\\' '{401C381F-E0DE-4B85-8BD8-3F3F14FBDA57}'), 'CHROME_UPDATE_REGISTRY_SUBKEY_SXS': ('Software\\Google\\Update\\Clients\\' '{4ea16ac7-fd5a-47c3-875b-dbf4a2008c20}'), 'LAUNCHER_UPDATE_REGISTRY_SUBKEY': ('Software\\Google\\Update\\Clients\\' '{FDA71E6F-AC4C-4a00-8B70-9958A68906BF}'), 'CHROME_TOAST_ACTIVATOR_CLSID_BETA': ('{B89B137F-96AA-4AE2-98C4-6373EAA1EA4D}'), 'CHROME_TOAST_ACTIVATOR_CLSID_DEV': ('{F01C03EB-D431-4C83-8D7A-902771E732FA}'), 'CHROME_TOAST_ACTIVATOR_CLSID_SXS': ('{FA372A6E-149F-4E95-832D-8F698D40AD7F}'), 'CHROME_ELEVATOR_CLSID': ('{708860E0-F641-4611-8895-7D867DD3675B}'), 'CHROME_ELEVATOR_CLSID_BETA': ('{DD2646BA-3707-4BF8-B9A7-038691A68FC2}'), 'CHROME_ELEVATOR_CLSID_DEV': ('{DA7FDCA5-2CAA-4637-AA17-0740584DE7DA}'), 'CHROME_ELEVATOR_CLSID_SXS': ('{704C2872-2049-435E-A469-0A534313C42B}'), 'CHROME_ELEVATOR_IID': ('{463ABECF-410D-407F-8AF5-0DF35A005CC8}'), 'CHROME_ELEVATOR_IID_BETA': ('{A2721D66-376E-4D2F-9F0F-9070E9A42B5F}'), 'CHROME_ELEVATOR_IID_DEV': ('{BB2AA26B-343A-4072-8B6F-80557B8CE571}'), 'CHROME_ELEVATOR_IID_SXS': ('{4F7CE041-28E9-484F-9DD0-61A8CACEFEE4}'), 'CHROME_ELEVATION_SERVICE_NAME': ('GoogleChromeElevationService'), 'CHROME_ELEVATION_SERVICE_NAME_BETA': ('GoogleChromeBetaElevationService'), 'CHROME_ELEVATION_SERVICE_NAME_DEV': ('GoogleChromeDevElevationService'), 'CHROME_ELEVATION_SERVICE_NAME_SXS': ('GoogleChromeCanaryElevationService'), 'CHROME_ELEVATION_SERVICE_DISPLAY_NAME': ('Google Chrome Elevation Service ' + '(GoogleChromeElevationService)'), 'CHROME_ELEVATION_SERVICE_DISPLAY_NAME_BETA': ('Google Chrome Beta Elevation Service' ' (GoogleChromeBetaElevationService)'), 'CHROME_ELEVATION_SERVICE_DISPLAY_NAME_DEV': ('Google Chrome Dev Elevation Service' ' (GoogleChromeDevElevationService)'), 'CHROME_ELEVATION_SERVICE_DISPLAY_NAME_SXS': ('Google Chrome Canary Elevation Service'), }) elif mini_installer_product_name == 'Chromium Installer': self._variable_mapping.update({ 'BRAND': 'Chromium', 'BINARIES_UPDATE_REGISTRY_SUBKEY': 'Software\\Chromium Binaries', 'CHROME_DIR': 'Chromium', 'CHROME_HTML_PROG_ID': 'ChromiumHTM', 'CHROME_LONG_NAME': 'Chromium', 'CHROME_SHORT_NAME': 'Chromium', 'CHROME_UPDATE_REGISTRY_SUBKEY': 'Software\\Chromium', 'CHROME_CLIENT_STATE_KEY': 'Software\\Chromium', 'CHROME_TOAST_ACTIVATOR_CLSID': ('{635EFA6F-08D6-4EC9-BD14-8A0FDE975159}'), 'CHROME_ELEVATOR_CLSID': ('{D133B120-6DB4-4D6B-8BFE-83BF8CA1B1B0}'), 'CHROME_ELEVATOR_IID': ('{B88C45B9-8825-4629-B83E-77CC67D9CEED}'), 'CHROME_ELEVATION_SERVICE_NAME': 'ChromiumElevationService', 'CHROME_ELEVATION_SERVICE_DISPLAY_NAME': ('Chromium Elevation Service (ChromiumElevationService)'), }) else: raise KeyError("Unknown mini_installer product name '%s'" % mini_installer_product_name) def SetLogFile(self, log_file): """Updates the value for the LOG_FILE variable""" self._variable_mapping['LOG_FILE'] = ('"--log-file=%s"' % log_file if log_file else '') def Expand(self, a_string): """Expands variables in the given string. This method resolves only variables defined in the constructor. It does not resolve environment variables. Any dollar signs that are not part of variables must be escaped with $$, otherwise a KeyError or a ValueError will be raised. Args: a_string: A string. Returns: A new string created by replacing variables with their values. """ return string.Template(a_string).substitute(self._variable_mapping)
ric2b/Vivaldi-browser
chromium/chrome/test/mini_installer/variable_expander.py
Python
bsd-3-clause
17,079
""" tests.components.automation.test_location ±±±~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tests location automation. """ import unittest from homeassistant.components import automation, zone from tests.common import get_test_home_assistant class TestAutomationZone(unittest.TestCase): """ Test the event automation. """ def setUp(self): # pylint: disable=invalid-name self.hass = get_test_home_assistant() zone.setup(self.hass, { 'zone': { 'name': 'test', 'latitude': 32.880837, 'longitude': -117.237561, 'radius': 250, } }) self.calls = [] def record_call(service): self.calls.append(service) self.hass.services.register('test', 'automation', record_call) def tearDown(self): # pylint: disable=invalid-name """ Stop down stuff we started. """ self.hass.stop() def test_if_fires_on_zone_enter(self): self.hass.states.set('test.entity', 'hello', { 'latitude': 32.881011, 'longitude': -117.234758 }) self.hass.pool.block_till_done() self.assertTrue(automation.setup(self.hass, { automation.DOMAIN: { 'trigger': { 'platform': 'zone', 'entity_id': 'test.entity', 'zone': 'zone.test', 'event': 'enter', }, 'action': { 'service': 'test.automation', } } })) self.hass.states.set('test.entity', 'hello', { 'latitude': 32.880586, 'longitude': -117.237564 }) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_not_fires_for_enter_on_zone_leave(self): self.hass.states.set('test.entity', 'hello', { 'latitude': 32.880586, 'longitude': -117.237564 }) self.hass.pool.block_till_done() self.assertTrue(automation.setup(self.hass, { automation.DOMAIN: { 'trigger': { 'platform': 'zone', 'entity_id': 'test.entity', 'zone': 'zone.test', 'event': 'enter', }, 'action': { 'service': 'test.automation', } } })) self.hass.states.set('test.entity', 'hello', { 'latitude': 32.881011, 'longitude': -117.234758 }) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_if_fires_on_zone_leave(self): self.hass.states.set('test.entity', 'hello', { 'latitude': 32.880586, 'longitude': -117.237564 }) self.hass.pool.block_till_done() self.assertTrue(automation.setup(self.hass, { automation.DOMAIN: { 'trigger': { 'platform': 'zone', 'entity_id': 'test.entity', 'zone': 'zone.test', 'event': 'leave', }, 'action': { 'service': 'test.automation', } } })) self.hass.states.set('test.entity', 'hello', { 'latitude': 32.881011, 'longitude': -117.234758 }) self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls)) def test_if_not_fires_for_leave_on_zone_enter(self): self.hass.states.set('test.entity', 'hello', { 'latitude': 32.881011, 'longitude': -117.234758 }) self.hass.pool.block_till_done() self.assertTrue(automation.setup(self.hass, { automation.DOMAIN: { 'trigger': { 'platform': 'zone', 'entity_id': 'test.entity', 'zone': 'zone.test', 'event': 'leave', }, 'action': { 'service': 'test.automation', } } })) self.hass.states.set('test.entity', 'hello', { 'latitude': 32.880586, 'longitude': -117.237564 }) self.hass.pool.block_till_done() self.assertEqual(0, len(self.calls)) def test_zone_condition(self): self.hass.states.set('test.entity', 'hello', { 'latitude': 32.880586, 'longitude': -117.237564 }) self.hass.pool.block_till_done() self.assertTrue(automation.setup(self.hass, { automation.DOMAIN: { 'trigger': { 'platform': 'event', 'event_type': 'test_event' }, 'condition': { 'platform': 'zone', 'entity_id': 'test.entity', 'zone': 'zone.test', }, 'action': { 'service': 'test.automation', } } })) self.hass.bus.fire('test_event') self.hass.pool.block_till_done() self.assertEqual(1, len(self.calls))
toddeye/home-assistant
tests/components/automation/test_zone.py
Python
mit
5,342
# Copyright (C) Ivan Kravets <[email protected]> # See LICENSE for details. import argparse import zipfile from os import getcwd, listdir, makedirs, mkdir, rename from os.path import isdir, isfile, join from shutil import move, rmtree from sys import exit as sys_exit from sys import path path.append("..") from platformio.util import exec_command, get_home_dir def _unzip_generated_file(mbed_dir, output_dir, mcu): filename = join( mbed_dir, "build", "export", "MBED_A1_emblocks_%s.zip" % mcu) variant_dir = join(output_dir, "variant", mcu) if isfile(filename): with zipfile.ZipFile(filename) as zfile: mkdir(variant_dir) zfile.extractall(variant_dir) for f in listdir(join(variant_dir, "MBED_A1")): if not f.lower().startswith("mbed"): continue move(join(variant_dir, "MBED_A1", f), variant_dir) rename(join(variant_dir, "MBED_A1.eix"), join(variant_dir, "%s.eix" % mcu)) rmtree(join(variant_dir, "MBED_A1")) else: print "Warning! Skipped board: %s" % mcu def buildlib(mbed_dir, mcu, lib="mbed"): build_command = [ "python", join(mbed_dir, "workspace_tools", "build.py"), "--mcu", mcu, "-t", "GCC_ARM" ] if lib is not "mbed": build_command.append(lib) build_result = exec_command(build_command, cwd=getcwd()) if build_result['returncode'] != 0: print "* %s doesn't support %s library!" % (mcu, lib) def copylibs(mbed_dir, output_dir): libs = ["dsp", "fat", "net", "rtos", "usb", "usb_host"] libs_dir = join(output_dir, "libs") makedirs(libs_dir) print "Moving generated libraries to framework dir..." for lib in libs: if lib == "net": move(join(mbed_dir, "build", lib, "eth"), libs_dir) continue move(join(mbed_dir, "build", lib), libs_dir) def main(mbed_dir, output_dir): print "Starting..." path.append(mbed_dir) from workspace_tools.export import gccarm if isdir(output_dir): print "Deleting previous framework dir..." rmtree(output_dir) settings_file = join(mbed_dir, "workspace_tools", "private_settings.py") if not isfile(settings_file): with open(settings_file, "w") as f: f.write("GCC_ARM_PATH = '%s'" % join(get_home_dir(), "packages", "toolchain-gccarmnoneeabi", "bin")) makedirs(join(output_dir, "variant")) mbed_libs = ["--rtos", "--dsp", "--fat", "--eth", "--usb", "--usb_host"] for mcu in set(gccarm.GccArm.TARGETS): print "Processing board: %s" % mcu buildlib(mbed_dir, mcu) for lib in mbed_libs: buildlib(mbed_dir, mcu, lib) result = exec_command( ["python", join(mbed_dir, "workspace_tools", "project.py"), "--mcu", mcu, "-i", "emblocks", "-p", "0", "-b"], cwd=getcwd() ) if result['returncode'] != 0: print "Unable to build the project for %s" % mcu continue _unzip_generated_file(mbed_dir, output_dir, mcu) copylibs(mbed_dir, output_dir) with open(join(output_dir, "boards.txt"), "w") as fp: fp.write("\n".join(sorted(listdir(join(output_dir, "variant"))))) print "Complete!" if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--mbed', help="The path to mbed framework") parser.add_argument('--output', help="The path to output directory") args = vars(parser.parse_args()) sys_exit(main(args["mbed"], args["output"]))
mseroczynski/platformio
scripts/mbed_to_package.py
Python
mit
3,667
'''tzinfo timezone information for GMT_minus_0.''' from pytz.tzinfo import StaticTzInfo from pytz.tzinfo import memorized_timedelta as timedelta class GMT_minus_0(StaticTzInfo): '''GMT_minus_0 timezone definition. See datetime.tzinfo for details''' zone = 'GMT_minus_0' _utcoffset = timedelta(seconds=0) _tzname = 'GMT' GMT_minus_0 = GMT_minus_0()
newvem/pytz
pytz/zoneinfo/GMT_minus_0.py
Python
mit
367
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.testsdk import ScenarioTest class CdnEdgeNodecenarioTest(ScenarioTest): def test_edge_node_crud(self): self.cmd('cdn edge-node list', checks=self.check('length(@)', 3))
yugangw-msft/azure-cli
src/azure-cli/azure/cli/command_modules/cdn/tests/latest/test_nodes_scenarios.py
Python
mit
543
#!/usr/bin/python # # Copyright 2010, 2011 wkhtmltopdf authors # # This file is part of wkhtmltopdf. # # wkhtmltopdf is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # wkhtmltopdf is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with wkhtmltopdf. If not, see <http:#www.gnu.org/licenses/>. from sys import argv, exit import re from datetime import date import os import difflib cdate = re.compile(r"Copyright ([0-9 ,]*) wkhtmltopdf authors") ifdef = re.compile(r"^[\n\r \t]*#ifndef __(.*)__[\t ]*\n#define __(\1)__[\t ]*\n") endif = re.compile(r"#endif.*[\r\n \t]*$") ws = re.compile(r"[ \t]*[\r\n]") branchspace = re.compile(r"([ \t\r\n])(for|if|while|switch|foreach)[\t \r\n]*\(") hangelse = re.compile(r"}[\r\n\t ]*(else)") braceup = re.compile(r"(\)|else)[\r\n\t ]*{") include = re.compile(r"(#include (\"[^\"]*\"|<[^>]*>)\n)+") def includesort(x): return "\n".join(sorted(x.group(0)[:-1].split("\n"))+[""]) changes=False progname="wkhtmltopdf" for path in argv[1:]: if path.split("/")[0] == "include": continue try: data = file(path).read() except: continue mo = cdate.search(data) years = set(mo.group(1).split(", ")) if mo else set() years.add(str(date.today().year)) ext = path.rsplit(".",2)[-1] header = "" cc = "//" if ext in ["hh","h","c","cc","cpp","inl", "inc"]: header += """// -*- mode: c++; tab-width: 4; indent-tabs-mode: t; eval: (progn (c-set-style "stroustrup") (c-set-offset 'innamespace 0)); -*- // vi:set ts=4 sts=4 sw=4 noet : // """ elif ext in ["sh"]: header += "#!/bin/bash\n#\n" cc = "#" elif ext in ["py"]: header += "#!/usr/bin/python\n#\n" cc = "#" elif ext in ["pro","pri"]: cc = "#" else: continue header += """// Copyright %(years)s %(name)s authors // // This file is part of %(name)s. // // %(name)s is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // %(name)s is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with %(name)s. If not, see <http://www.gnu.org/licenses/>. """%{"years": (", ".join(sorted(list(years)))),"name":progname} if ext in ["c", "h", "inc"]: header = "/*" + header[2:-1] + " */\n\n" cc = " *" hexp = re.compile(r"^/\*([^*]*(\*[^/]))*[^*]*\*/[ \t\n]*"); else: #Strip away generated header hexp = re.compile("^(%s[^\\n]*\\n)*"%(cc)) ndata = hexp.sub("", data,1) ndata = ws.sub("\n", ndata)+"\n" if ext in ["hh","h","inl"]: s=0 e=-1 while ndata[s] in ['\r','\n',' ','\t']: s+=1 while ndata[e] in ['\r','\n',' ','\t']: e-=1 #Strip away generated ifdef if ifdef.search(ndata): ndata = endif.sub("",ifdef.sub("",ndata,1),1) s=0 e=-1 while ndata[s] in ['\r','\n',' ','\t']: s+=1 while ndata[e] in ['\r','\n',' ','\t']: e-=1 ndata=ndata[s:e+1].replace(" ",'\t') if ext in ["hh","h","c","cc","cpp","inl"]: ndata = branchspace.sub(r"\1\2 (",ndata) ndata = hangelse.sub("} else",ndata) ndata = braceup.sub(r"\1 {",ndata) ndata = include.sub(includesort, ndata) if ext in ["hh","h","inl"]: n = os.path.split(path)[-1].replace(".","_").replace(" ","_").upper() ndata = """#ifndef __%s__ #define __%s__ %s #endif %s__%s__%s"""%(n,n,ndata, "//" if ext != "h" else "/*", n, "" if ext != "h" else "*/") ndata = header.replace("//",cc)+ndata+"\n" if ndata != data: for x in difflib.unified_diff(data.split("\n"),ndata.split("\n"), "a/"+path, "b/"+path): print x changes=True file(path, "w").write(ndata) if changes: exit(1)
anouschka42/starktheatreprod
sites/all/libraries/wkhtmltopdf-0.12.0/scripts/sourcefix.py
Python
gpl-2.0
4,292
from __future__ import print_function, unicode_literals from praw.internal import _to_reddit_list from .helper import PRAWTest, betamax class InternalTest(PRAWTest): def test__to_reddit_list(self): output = _to_reddit_list('hello') self.assertEqual('hello', output) def test__to_reddit_list_with_list(self): output = _to_reddit_list(['hello']) self.assertEqual('hello', output) def test__to_reddit_list_with_empty_list(self): output = _to_reddit_list([]) self.assertEqual('', output) def test__to_reddit_list_with_big_list(self): output = _to_reddit_list(['hello', 'world']) self.assertEqual('hello,world', output) @betamax() def test__to_reddit_list_with_object(self): output = _to_reddit_list(self.r.get_subreddit(self.sr)) self.assertEqual(self.sr, output) def test__to_reddit_list_with_object_in_list(self): obj = self.r.get_subreddit(self.sr) output = _to_reddit_list([obj]) self.assertEqual(self.sr, output) def test__to_reddit_list_with_mix(self): obj = self.r.get_subreddit(self.sr) output = _to_reddit_list([obj, 'hello']) self.assertEqual("{0},{1}".format(self.sr, 'hello'), output)
dmarx/praw
tests/test_internal.py
Python
gpl-3.0
1,261
""" Telstra API platform for notify component. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/notify.telstra/ """ import logging import requests import voluptuous as vol from homeassistant.components.notify import ( BaseNotificationService, ATTR_TITLE, PLATFORM_SCHEMA) from homeassistant.const import CONTENT_TYPE_JSON, HTTP_HEADER_CONTENT_TYPE import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) CONF_CONSUMER_KEY = 'consumer_key' CONF_CONSUMER_SECRET = 'consumer_secret' CONF_PHONE_NUMBER = 'phone_number' PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_CONSUMER_KEY): cv.string, vol.Required(CONF_CONSUMER_SECRET): cv.string, vol.Required(CONF_PHONE_NUMBER): cv.string, }) def get_service(hass, config, discovery_info=None): """Get the Telstra SMS API notification service.""" consumer_key = config.get(CONF_CONSUMER_KEY) consumer_secret = config.get(CONF_CONSUMER_SECRET) phone_number = config.get(CONF_PHONE_NUMBER) if _authenticate(consumer_key, consumer_secret) is False: _LOGGER.exception("Error obtaining authorization from Telstra API") return None return TelstraNotificationService( consumer_key, consumer_secret, phone_number) class TelstraNotificationService(BaseNotificationService): """Implementation of a notification service for the Telstra SMS API.""" def __init__(self, consumer_key, consumer_secret, phone_number): """Initialize the service.""" self._consumer_key = consumer_key self._consumer_secret = consumer_secret self._phone_number = phone_number def send_message(self, message="", **kwargs): """Send a message to a user.""" title = kwargs.get(ATTR_TITLE) # Retrieve authorization first token_response = _authenticate( self._consumer_key, self._consumer_secret) if token_response is False: _LOGGER.exception("Error obtaining authorization from Telstra API") return # Send the SMS if title: text = '{} {}'.format(title, message) else: text = message message_data = { 'to': self._phone_number, 'body': text, } message_resource = 'https://api.telstra.com/v1/sms/messages' message_headers = { HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_JSON, 'Authorization': 'Bearer ' + token_response['access_token'], } message_response = requests.post( message_resource, headers=message_headers, json=message_data, timeout=10) if message_response.status_code != 202: _LOGGER.exception("Failed to send SMS. Status code: %d", message_response.status_code) def _authenticate(consumer_key, consumer_secret): """Authenticate with the Telstra API.""" token_data = { 'client_id': consumer_key, 'client_secret': consumer_secret, 'grant_type': 'client_credentials', 'scope': 'SMS' } token_resource = 'https://api.telstra.com/v1/oauth/token' token_response = requests.get( token_resource, params=token_data, timeout=10).json() if 'error' in token_response: return False return token_response
MungoRae/home-assistant
homeassistant/components/notify/telstra.py
Python
apache-2.0
3,404
binomial_fit.coef()
madmax983/h2o-3
h2o-docs/src/booklets/v2_2015/source/GLM_Vignette_code_examples/glm_model_output_20.py
Python
apache-2.0
19
#!/usr/bin/env python import os import sys from macholib.MachOStandalone import MachOStandalone from macholib.util import strip_files def standaloneApp(path): if not os.path.isdir(path) and os.path.exists( os.path.join(path, 'Contents')): raise SystemExit('%s: %s does not look like an app bundle' % (sys.argv[0], path)) files = MachOStandalone(path).run() strip_files(files) def main(): print("WARNING: 'macho_standalone' is deprecated, use 'python -mmacholib dump' instead") if not sys.argv[1:]: raise SystemExit('usage: %s [appbundle ...]' % (sys.argv[0],)) for fn in sys.argv[1:]: standaloneApp(fn) if __name__ == '__main__': main()
timeyyy/PyUpdater
pyupdater/vendor/PyInstaller/lib/macholib/macho_standalone.py
Python
bsd-2-clause
718
# -*- coding: utf-8 -*- # Unit tests for cache framework # Uses whatever cache backend is set in the test settings file. from __future__ import unicode_literals import copy import os import re import shutil import tempfile import threading import time import unittest import warnings from django.conf import settings from django.core import management, signals from django.core.cache import ( DEFAULT_CACHE_ALIAS, CacheKeyWarning, cache, caches, ) from django.core.cache.utils import make_template_fragment_key from django.db import connection, connections, transaction from django.http import HttpRequest, HttpResponse, StreamingHttpResponse from django.middleware.cache import ( CacheMiddleware, FetchFromCacheMiddleware, UpdateCacheMiddleware, ) from django.middleware.csrf import CsrfViewMiddleware from django.template import engines from django.template.context_processors import csrf from django.template.response import TemplateResponse from django.test import ( RequestFactory, TestCase, TransactionTestCase, override_settings, ) from django.test.signals import setting_changed from django.utils import six, timezone, translation from django.utils.cache import ( get_cache_key, learn_cache_key, patch_cache_control, patch_response_headers, patch_vary_headers, ) from django.utils.encoding import force_text from django.views.decorators.cache import cache_page from .models import Poll, expensive_calculation try: # Use the same idiom as in cache backends from django.utils.six.moves import cPickle as pickle except ImportError: import pickle # functions/classes for complex data type tests def f(): return 42 class C: def m(n): return 24 class Unpickable(object): def __getstate__(self): raise pickle.PickleError() @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } }) class DummyCacheTests(TestCase): # The Dummy cache backend doesn't really behave like a test backend, # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" cache.set("key", "value") self.assertIsNone(cache.get("key")) def test_add(self): "Add doesn't do anything in dummy cache backend" cache.add("addkey1", "value") result = cache.add("addkey1", "newvalue") self.assertTrue(result) self.assertIsNone(cache.get("addkey1")) def test_non_existent(self): "Non-existent keys aren't found in the dummy cache backend" self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" cache.set('a', 'a') cache.set('b', 'b') cache.set('c', 'c') cache.set('d', 'd') self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" cache.set("key1", "spam") cache.set("key2", "eggs") self.assertIsNone(cache.get("key1")) cache.delete("key1") self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" cache.set("hello1", "goodbye1") self.assertFalse(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" cache.set("hello2", "goodbye2") self.assertNotIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): "Dummy cache values can't be incremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.incr, 'answer') self.assertRaises(ValueError, cache.incr, 'does_not_exist') def test_decr(self): "Dummy cache values can't be decremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.decr, 'answer') self.assertRaises(ValueError, cache.decr, 'does_not_exist') def test_data_types(self): "All data types are ignored equally by the dummy cache" stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertIsNone(cache.get("stuff")) def test_expiration(self): "Expiration has no effect on the dummy cache" cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) cache.add("expire2", "newvalue") self.assertIsNone(cache.get("expire2")) self.assertFalse(cache.has_key("expire3")) def test_unicode(self): "Unicode values are ignored by the dummy cache" stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } for (key, value) in stuff.items(): cache.set(key, value) self.assertIsNone(cache.get(key)) def test_set_many(self): "set_many does nothing for the dummy cache backend" cache.set_many({'a': 1, 'b': 2}) cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1') def test_delete_many(self): "delete_many does nothing for the dummy cache backend" cache.delete_many(['a', 'b']) def test_clear(self): "clear does nothing for the dummy cache backend" cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.incr_version, 'answer') self.assertRaises(ValueError, cache.incr_version, 'does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" cache.set('answer', 42) self.assertRaises(ValueError, cache.decr_version, 'answer') self.assertRaises(ValueError, cache.decr_version, 'does_not_exist') def custom_key_func(key, key_prefix, version): "A customized cache key function" return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) _caches_setting_base = { 'default': {}, 'prefix': {'KEY_PREFIX': 'cacheprefix{}'.format(os.getpid())}, 'v2': {'VERSION': 2}, 'custom_key': {'KEY_FUNCTION': custom_key_func}, 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, } def caches_setting_for_tests(base=None, **params): # `base` is used to pull in the memcached config from the original settings, # `params` are test specific overrides and `_caches_settings_base` is the # base config for the tests. # This results in the following search order: # params -> _caches_setting_base -> base base = base or {} setting = {k: base.copy() for k in _caches_setting_base.keys()} for key, cache_params in setting.items(): cache_params.update(_caches_setting_base[key]) cache_params.update(params) return setting class BaseCacheTests(object): # A common set of tests to apply to all cache backends def setUp(self): self.factory = RequestFactory() def tearDown(self): cache.clear() def test_simple(self): # Simple cache set/get works cache.set("key", "value") self.assertEqual(cache.get("key"), "value") def test_add(self): # A key can be added to a cache cache.add("addkey1", "value") result = cache.add("addkey1", "newvalue") self.assertFalse(result) self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend cache.set('somekey', 'value') # should not be set in the prefixed cache self.assertFalse(caches['prefix'].has_key('somekey')) caches['prefix'].set('somekey', 'value2') self.assertEqual(cache.get('somekey'), 'value') self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): # Non-existent cache keys return as None/default # get with non-existent keys self.assertIsNone(cache.get("does_not_exist")) self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many cache.set('a', 'a') cache.set('b', 'b') cache.set('c', 'c') cache.set('d', 'd') self.assertDictEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) self.assertDictEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) def test_delete(self): # Cache keys can be deleted cache.set("key1", "spam") cache.set("key2", "eggs") self.assertEqual(cache.get("key1"), "spam") cache.delete("key1") self.assertIsNone(cache.get("key1")) self.assertEqual(cache.get("key2"), "eggs") def test_has_key(self): # The cache can be inspected for cache keys cache.set("hello1", "goodbye1") self.assertTrue(cache.has_key("hello1")) self.assertFalse(cache.has_key("goodbye1")) cache.set("no_expiry", "here", None) self.assertTrue(cache.has_key("no_expiry")) def test_in(self): # The in operator can be used to inspect cache contents cache.set("hello2", "goodbye2") self.assertIn("hello2", cache) self.assertNotIn("goodbye2", cache) def test_incr(self): # Cache values can be incremented cache.set('answer', 41) self.assertEqual(cache.incr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.incr('answer', 10), 52) self.assertEqual(cache.get('answer'), 52) self.assertEqual(cache.incr('answer', -10), 42) self.assertRaises(ValueError, cache.incr, 'does_not_exist') def test_decr(self): # Cache values can be decremented cache.set('answer', 43) self.assertEqual(cache.decr('answer'), 42) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.decr('answer', 10), 32) self.assertEqual(cache.get('answer'), 32) self.assertEqual(cache.decr('answer', -10), 42) self.assertRaises(ValueError, cache.decr, 'does_not_exist') def test_close(self): self.assertTrue(hasattr(cache, 'close')) cache.close() def test_data_types(self): # Many different data types can be cached stuff = { 'string': 'this is a string', 'int': 42, 'list': [1, 2, 3, 4], 'tuple': (1, 2, 3, 4), 'dict': {'A': 1, 'B': 2}, 'function': f, 'class': C, } cache.set("stuff", stuff) self.assertEqual(cache.get("stuff"), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date cache.set('question', my_poll) cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_write_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache write expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) def test_cache_read_for_model_instance_with_deferred(self): # Don't want fields with callable as default to be called on cache read expensive_calculation.num_runs = 0 Poll.objects.all().delete() Poll.objects.create(question="What?") self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire cache.set('expire1', 'very quickly', 1) cache.set('expire2', 'very quickly', 1) cache.set('expire3', 'very quickly', 1) time.sleep(2) self.assertIsNone(cache.get("expire1")) cache.add("expire2", "newvalue") self.assertEqual(cache.get("expire2"), "newvalue") self.assertFalse(cache.has_key("expire3")) def test_unicode(self): # Unicode values can be cached stuff = { 'ascii': 'ascii_value', 'unicode_ascii': 'Iñtërnâtiônàlizætiøn1', 'Iñtërnâtiônàlizætiøn': 'Iñtërnâtiônàlizætiøn2', 'ascii2': {'x': 1} } # Test `set` for (key, value) in stuff.items(): cache.set(key, value) self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): cache.delete(key) cache.add(key, value) self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): cache.delete(key) cache.set_many(stuff) for (key, value) in stuff.items(): self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable from zlib import compress, decompress value = 'value_to_be_compressed' compressed_value = compress(value.encode()) # Test set cache.set('binary1', compressed_value) compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add cache.add('binary1-add', compressed_value) compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many cache.set_many({'binary1-set_many': compressed_value}) compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many cache.set_many({"key1": "spam", "key2": "eggs"}) self.assertEqual(cache.get("key1"), "spam") self.assertEqual(cache.get("key2"), "eggs") def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_delete_many(self): # Multiple keys can be deleted using delete_many cache.set("key1", "spam") cache.set("key2", "eggs") cache.set("key3", "ham") cache.delete_many(["key1", "key2"]) self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear cache.set("key1", "spam") cache.set("key2", "eggs") cache.clear() self.assertIsNone(cache.get("key1")) self.assertIsNone(cache.get("key2")) def test_long_timeout(self): ''' Using a timeout greater than 30 days makes memcached think it is an absolute expiration timestamp instead of a relative offset. Test that we honour this convention. Refs #12399. ''' cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second self.assertEqual(cache.get('key1'), 'eggs') cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key2'), 'ham') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): ''' Passing in None into timeout results in a value that is cached forever ''' cache.set('key1', 'eggs', None) self.assertEqual(cache.get('key1'), 'eggs') cache.add('key2', 'ham', None) self.assertEqual(cache.get('key2'), 'ham') added = cache.add('key1', 'new eggs', None) self.assertEqual(added, False) self.assertEqual(cache.get('key1'), 'eggs') cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) self.assertEqual(cache.get('key3'), 'sausage') self.assertEqual(cache.get('key4'), 'lobster bisque') def test_zero_timeout(self): ''' Passing in zero into timeout results in a value that is not cached ''' cache.set('key1', 'eggs', 0) self.assertIsNone(cache.get('key1')) cache.add('key2', 'ham', 0) self.assertIsNone(cache.get('key2')) cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) self.assertIsNone(cache.get('key3')) self.assertIsNone(cache.get('key4')) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. cache.set("key1", "spam", 100.2) self.assertEqual(cache.get("key1"), "spam") def _perform_cull_test(self, cull_cache, initial_count, final_count): # Create initial cache key entries. This will overflow the cache, # causing a cull. for i in range(1, initial_count): cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): if cull_cache.has_key('cull%d' % i): count = count + 1 self.assertEqual(count, final_count) def test_cull(self): self._perform_cull_test(caches['cull'], 50, 29) def test_zero_cull(self): self._perform_cull_test(caches['zero_cull'], 50, 19) def test_invalid_keys(self): """ All the builtin backends (except memcached, see below) should warn on keys that would be refused by memcached. This encourages portable caching code without making it too difficult to use production backends with more liberal key rules. Refs #6447. """ # mimic custom ``make_key`` method being defined since the default will # never show the below warnings def func(key, *args): return key old_func = cache.key_func cache.key_func = func try: with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # memcached does not allow whitespace or control characters in keys cache.set('key with spaces', 'value') self.assertEqual(len(w), 2) self.assertIsInstance(w[0].message, CacheKeyWarning) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # memcached limits key length to 250 cache.set('a' * 251, 'value') self.assertEqual(len(w), 1) self.assertIsInstance(w[0].message, CacheKeyWarning) finally: cache.key_func = old_func def test_cache_versioning_get_set(self): # set, using default version = 1 cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertEqual(cache.get('answer1', version=1), 42) self.assertIsNone(cache.get('answer1', version=2)) self.assertIsNone(caches['v2'].get('answer1')) self.assertEqual(caches['v2'].get('answer1', version=1), 42) self.assertIsNone(caches['v2'].get('answer1', version=2)) # set, default version = 1, but manually override version = 2 cache.set('answer2', 42, version=2) self.assertIsNone(cache.get('answer2')) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 caches['v2'].set('answer3', 42) self.assertIsNone(cache.get('answer3')) self.assertIsNone(cache.get('answer3', version=1)) self.assertEqual(cache.get('answer3', version=2), 42) self.assertEqual(caches['v2'].get('answer3'), 42) self.assertIsNone(caches['v2'].get('answer3', version=1)) self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set('answer4', 42, version=1) self.assertEqual(cache.get('answer4'), 42) self.assertEqual(cache.get('answer4', version=1), 42) self.assertIsNone(cache.get('answer4', version=2)) self.assertIsNone(caches['v2'].get('answer4')) self.assertEqual(caches['v2'].get('answer4', version=1), 42) self.assertIsNone(caches['v2'].get('answer4', version=2)) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 cache.add('answer1', 42, version=2) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.add('answer1', 37, version=2) self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.add('answer1', 37, version=1) self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 caches['v2'].add('answer2', 42) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) caches['v2'].add('answer2', 37) self.assertIsNone(cache.get('answer2', version=1)) self.assertEqual(cache.get('answer2', version=2), 42) caches['v2'].add('answer2', 37, version=1) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 caches['v2'].add('answer3', 42, version=1) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) caches['v2'].add('answer3', 37, version=1) self.assertEqual(cache.get('answer3', version=1), 42) self.assertIsNone(cache.get('answer3', version=2)) caches['v2'].add('answer3', 37) self.assertEqual(cache.get('answer3', version=1), 42) self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): cache.set('answer1', 42) # has_key self.assertTrue(cache.has_key('answer1')) self.assertTrue(cache.has_key('answer1', version=1)) self.assertFalse(cache.has_key('answer1', version=2)) self.assertFalse(caches['v2'].has_key('answer1')) self.assertTrue(caches['v2'].has_key('answer1', version=1)) self.assertFalse(caches['v2'].has_key('answer1', version=2)) def test_cache_versioning_delete(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) cache.delete('answer1') self.assertIsNone(cache.get('answer1', version=1)) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) cache.delete('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertIsNone(cache.get('answer2', version=2)) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) caches['v2'].delete('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertIsNone(cache.get('answer3', version=2)) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) caches['v2'].delete('answer4', version=1) self.assertIsNone(cache.get('answer4', version=1)) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): cache.set('answer1', 37, version=1) cache.set('answer1', 42, version=2) cache.incr('answer1') self.assertEqual(cache.get('answer1', version=1), 38) self.assertEqual(cache.get('answer1', version=2), 42) cache.decr('answer1') self.assertEqual(cache.get('answer1', version=1), 37) self.assertEqual(cache.get('answer1', version=2), 42) cache.set('answer2', 37, version=1) cache.set('answer2', 42, version=2) cache.incr('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 43) cache.decr('answer2', version=2) self.assertEqual(cache.get('answer2', version=1), 37) self.assertEqual(cache.get('answer2', version=2), 42) cache.set('answer3', 37, version=1) cache.set('answer3', 42, version=2) caches['v2'].incr('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 43) caches['v2'].decr('answer3') self.assertEqual(cache.get('answer3', version=1), 37) self.assertEqual(cache.get('answer3', version=2), 42) cache.set('answer4', 37, version=1) cache.set('answer4', 42, version=2) caches['v2'].incr('answer4', version=1) self.assertEqual(cache.get('answer4', version=1), 38) self.assertEqual(cache.get('answer4', version=2), 42) caches['v2'].decr('answer4', version=1) self.assertEqual(cache.get('answer4', version=1), 37) self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 cache.set_many({'ford1': 37, 'arthur1': 42}) self.assertDictEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertDictEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) self.assertDictEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) self.assertDictEqual(cache.get_many(['ford2', 'arthur2']), {}) self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) self.assertDictEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) self.assertDictEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) self.assertDictEqual(cache.get_many(['ford3', 'arthur3']), {}) self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) self.assertDictEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) self.assertDictEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) self.assertDictEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertDictEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) self.assertDictEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertIsNone(cache.get('answer', version=3)) self.assertEqual(cache.incr_version('answer', version=2), 3) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertIsNone(cache.get('answer', version=2)) self.assertEqual(cache.get('answer', version=3), 42) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertIsNone(caches['v2'].get('answer2', version=3)) self.assertEqual(caches['v2'].incr_version('answer2'), 3) self.assertIsNone(caches['v2'].get('answer2')) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertEqual(caches['v2'].get('answer2', version=3), 42) self.assertRaises(ValueError, cache.incr_version, 'does_not_exist') def test_decr_version(self): cache.set('answer', 42, version=2) self.assertIsNone(cache.get('answer')) self.assertIsNone(cache.get('answer', version=1)) self.assertEqual(cache.get('answer', version=2), 42) self.assertEqual(cache.decr_version('answer', version=2), 1) self.assertEqual(cache.get('answer'), 42) self.assertEqual(cache.get('answer', version=1), 42) self.assertIsNone(cache.get('answer', version=2)) caches['v2'].set('answer2', 42) self.assertEqual(caches['v2'].get('answer2'), 42) self.assertIsNone(caches['v2'].get('answer2', version=1)) self.assertEqual(caches['v2'].get('answer2', version=2), 42) self.assertEqual(caches['v2'].decr_version('answer2'), 1) self.assertIsNone(caches['v2'].get('answer2')) self.assertEqual(caches['v2'].get('answer2', version=1), 42) self.assertIsNone(caches['v2'].get('answer2', version=2)) self.assertRaises(ValueError, cache.decr_version, 'does_not_exist', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other cache.set('answer1', 42) self.assertEqual(cache.get('answer1'), 42) self.assertIsNone(caches['custom_key'].get('answer1')) self.assertIsNone(caches['custom_key2'].get('answer1')) caches['custom_key'].set('answer2', 42) self.assertIsNone(cache.get('answer2')) self.assertEqual(caches['custom_key'].get('answer2'), 42) self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpickable_object(self): update_middleware = UpdateCacheMiddleware() update_middleware.cache = cache fetch_middleware = FetchFromCacheMiddleware() fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) response = HttpResponse() content = 'Testing cookie serialization.' response.content = content response.set_cookie('foo', 'bar') update_middleware.process_response(request, response) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode('utf-8')) self.assertEqual(get_cache_data.cookies, response.cookies) update_middleware.process_response(request, get_cache_data) get_cache_data = fetch_middleware.process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode('utf-8')) self.assertEqual(get_cache_data.cookies, response.cookies) def test_add_fail_on_pickleerror(self): "See https://code.djangoproject.com/ticket/21200" with self.assertRaises(pickle.PickleError): cache.add('unpickable', Unpickable()) def test_set_fail_on_pickleerror(self): "See https://code.djangoproject.com/ticket/21200" with self.assertRaises(pickle.PickleError): cache.set('unpickable', Unpickable()) def test_get_or_set(self): self.assertIsNone(cache.get('projector')) self.assertEqual(cache.get_or_set('projector', 42), 42) self.assertEqual(cache.get('projector'), 42) def test_get_or_set_callable(self): def my_callable(): return 'value' self.assertEqual(cache.get_or_set('mykey', my_callable), 'value') def test_get_or_set_version(self): cache.get_or_set('brian', 1979, version=2) with self.assertRaisesMessage(ValueError, 'You need to specify a value.'): cache.get_or_set('brian') with self.assertRaisesMessage(ValueError, 'You need to specify a value.'): cache.get_or_set('brian', version=1) self.assertIsNone(cache.get('brian', version=1)) self.assertEqual(cache.get_or_set('brian', 42, version=1), 42) self.assertEqual(cache.get_or_set('brian', 1979, version=2), 1979) self.assertIsNone(cache.get('brian', version=3)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Spaces are used in the table name to ensure quoting/escaping is working LOCATION='test cache table' )) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] def setUp(self): # The super calls needs to happen first for the settings override. super(DBCacheTests, self).setUp() self.create_table() def tearDown(self): # The super call needs to happen first because it uses the database. super(DBCacheTests, self).tearDown() self.drop_table() def create_table(self): management.call_command('createcachetable', verbosity=0, interactive=False) def drop_table(self): with connection.cursor() as cursor: table_name = connection.ops.quote_name('test cache table') cursor.execute('DROP TABLE %s' % table_name) def test_zero_cull(self): self._perform_cull_test(caches['zero_cull'], 50, 18) def test_second_call_doesnt_crash(self): out = six.StringIO() management.call_command('createcachetable', stdout=out) self.assertEqual(out.getvalue(), "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.db.DatabaseCache', # Use another table name to avoid the 'table already exists' message. LOCATION='createcachetable_dry_run_mode' )) def test_createcachetable_dry_run_mode(self): out = six.StringIO() management.call_command('createcachetable', dry_run=True, stdout=out) output = out.getvalue() self.assertTrue(output.startswith("CREATE TABLE")) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ self.drop_table() out = six.StringIO() management.call_command( 'createcachetable', 'test cache table', verbosity=2, stdout=out, ) self.assertEqual(out.getvalue(), "Cache table 'test cache table' created.\n") def test_clear_commits_transaction(self): # Ensure the database transaction is committed (#19896) cache.set("key1", "spam") cache.clear() transaction.rollback() self.assertIsNone(cache.get("key1")) @override_settings(USE_TZ=True) class DBCacheWithTimeZoneTests(DBCacheTests): pass class DBCacheRouter(object): """A router that puts the cache table on the 'other' database.""" def db_for_read(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def db_for_write(self, model, **hints): if model._meta.app_label == 'django_cache': return 'other' return None def allow_migrate(self, db, app_label, **hints): if app_label == 'django_cache': return db == 'other' return None @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'my_cache_table', }, }, ) class CreateCacheTableForDBCacheTests(TestCase): multi_db = True @override_settings(DATABASE_ROUTERS=[DBCacheRouter()]) def test_createcachetable_observes_database_router(self): # cache table should not be created on 'default' with self.assertNumQueries(0, using='default'): management.call_command('createcachetable', database='default', verbosity=0, interactive=False) # cache table should be created on 'other' # Queries: # 1: check table doesn't already exist # 2: create savepoint (if transactional DDL is supported) # 3: create the table # 4: create the index # 5: release savepoint (if transactional DDL is supported) num = 5 if connections['other'].features.can_rollback_ddl else 3 with self.assertNumQueries(num, using='other'): management.call_command('createcachetable', database='other', verbosity=0, interactive=False) class PicklingSideEffect(object): def __init__(self, cache): self.cache = cache self.locked = False def __getstate__(self): if self.cache._lock.active_writers: self.locked = True return {} @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.locmem.LocMemCache', )) class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): super(LocMemCacheTests, self).setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. caches['prefix']._cache = cache._cache caches['prefix']._expire_info = cache._expire_info caches['v2']._cache = cache._cache caches['v2']._expire_info = cache._expire_info caches['custom_key']._cache = cache._cache caches['custom_key']._expire_info = cache._expire_info caches['custom_key2']._cache = cache._cache caches['custom_key2']._expire_info = cache._expire_info @override_settings(CACHES={ 'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other' }, }) def test_multiple_caches(self): "Check that multiple locmem caches are isolated" cache.set('value', 42) self.assertEqual(caches['default'].get('value'), 42) self.assertIsNone(caches['other'].get('value')) def test_locking_on_pickle(self): """#20613/#18541 -- Ensures pickling is done outside of the lock.""" bad_obj = PicklingSideEffect(cache) cache.set('set', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") cache.add('add', bad_obj) self.assertFalse(bad_obj.locked, "Cache was locked during pickling") def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' _key = cache.make_key(key) cache.set(key, 1, timeout=cache.default_timeout * 10) expire = cache._expire_info[_key] cache.incr(key) self.assertEqual(expire, cache._expire_info[_key]) cache.decr(key) self.assertEqual(expire, cache._expire_info[_key]) # memcached backend isn't guaranteed to be available. # To check the memcached backend, the test settings file will # need to contain at least one cache backend setting that points at # your memcache server. memcached_params = {} for _cache_params in settings.CACHES.values(): if _cache_params['BACKEND'].startswith('django.core.cache.backends.memcached.'): memcached_params = _cache_params memcached_never_expiring_params = memcached_params.copy() memcached_never_expiring_params['TIMEOUT'] = None memcached_far_future_params = memcached_params.copy() memcached_far_future_params['TIMEOUT'] = 31536000 # 60*60*24*365, 1 year @unittest.skipUnless(memcached_params, "memcached not available") @override_settings(CACHES=caches_setting_for_tests(base=memcached_params)) class MemcachedCacheTests(BaseCacheTests, TestCase): def test_invalid_keys(self): """ On memcached, we don't introduce a duplicate key validation step (for speed reasons), we just let the memcached API library raise its own exception on bad keys. Refs #6447. In order to be memcached-API-library agnostic, we only assert that a generic exception of some kind is raised. """ # memcached does not allow whitespace or control characters in keys self.assertRaises(Exception, cache.set, 'key with spaces', 'value') # memcached limits key length to 250 self.assertRaises(Exception, cache.set, 'a' * 251, 'value') # Explicitly display a skipped test if no configured cache uses MemcachedCache @unittest.skipUnless( memcached_params.get('BACKEND') == 'django.core.cache.backends.memcached.MemcachedCache', "cache with python-memcached library not available") def test_memcached_uses_highest_pickle_version(self): # Regression test for #19810 for cache_key, cache_config in settings.CACHES.items(): if cache_config['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache': self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL) @override_settings(CACHES=caches_setting_for_tests(base=memcached_never_expiring_params)) def test_default_never_expiring_timeout(self): # Regression test for #22845 cache.set('infinite_foo', 'bar') self.assertEqual(cache.get('infinite_foo'), 'bar') @override_settings(CACHES=caches_setting_for_tests(base=memcached_far_future_params)) def test_default_far_future_timeout(self): # Regression test for #22845 cache.set('future_foo', 'bar') self.assertEqual(cache.get('future_foo'), 'bar') def test_cull(self): # culling isn't implemented, memcached deals with it. pass def test_zero_cull(self): # culling isn't implemented, memcached deals with it. pass def test_memcached_deletes_key_on_failed_set(self): # By default memcached allows objects up to 1MB. For the cache_db session # backend to always use the current session, memcached needs to delete # the old key if it fails to set. # pylibmc doesn't seem to have SERVER_MAX_VALUE_LENGTH as far as I can # tell from a quick check of its source code. This is falling back to # the default value exposed by python-memcached on my system. max_value_length = getattr(cache._lib, 'SERVER_MAX_VALUE_LENGTH', 1048576) cache.set('small_value', 'a') self.assertEqual(cache.get('small_value'), 'a') large_value = 'a' * (max_value_length + 1) cache.set('small_value', large_value) # small_value should be deleted, or set if configured to accept larger values value = cache.get('small_value') self.assertTrue(value is None or value == large_value) @override_settings(CACHES=caches_setting_for_tests( BACKEND='django.core.cache.backends.filebased.FileBasedCache', )) class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ def setUp(self): super(FileBasedCacheTests, self).setUp() self.dirname = tempfile.mkdtemp() # Caches location cannot be modified through override_settings / modify_settings, # hence settings are manipulated directly here and the setting_changed signal # is triggered manually. for cache_params in settings.CACHES.values(): cache_params.update({'LOCATION': self.dirname}) setting_changed.send(self.__class__, setting='CACHES', enter=False) def tearDown(self): super(FileBasedCacheTests, self).tearDown() # Call parent first, as cache.clear() may recreate cache base directory shutil.rmtree(self.dirname) def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) cache.set('foo', 'bar') os.path.exists(self.dirname) @override_settings(CACHES={ 'default': { 'BACKEND': 'cache.liberal_backend.CacheClass', }, }) class CustomCacheKeyValidationTests(TestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin backend, and override the default key validation. Refs #6447. """ def test_custom_key_validation(self): # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' cache.set(key, val) self.assertEqual(cache.get(key), val) @override_settings( CACHES={ 'default': { 'BACKEND': 'cache.closeable_cache.CacheClass', } } ) class CacheClosingTests(TestCase): def test_close(self): self.assertFalse(cache.closed) signals.request_finished.send(self.__class__) self.assertTrue(cache.closed) DEFAULT_MEMORY_CACHES_SETTINGS = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } } NEVER_EXPIRING_CACHES_SETTINGS = copy.deepcopy(DEFAULT_MEMORY_CACHES_SETTINGS) NEVER_EXPIRING_CACHES_SETTINGS['default']['TIMEOUT'] = None class DefaultNonExpiringCacheKeyTests(TestCase): """Tests that verify that settings having Cache arguments with a TIMEOUT set to `None` will create Caches that will set non-expiring keys. This fixes ticket #22085. """ def setUp(self): # The 5 minute (300 seconds) default expiration time for keys is # defined in the implementation of the initializer method of the # BaseCache type. self.DEFAULT_TIMEOUT = caches[DEFAULT_CACHE_ALIAS].default_timeout def tearDown(self): del(self.DEFAULT_TIMEOUT) def test_default_expiration_time_for_keys_is_5_minutes(self): """The default expiration time of a cache key is 5 minutes. This value is defined inside the __init__() method of the :class:`django.core.cache.backends.base.BaseCache` type. """ self.assertEqual(300, self.DEFAULT_TIMEOUT) def test_caches_with_unset_timeout_has_correct_default_timeout(self): """Caches that have the TIMEOUT parameter undefined in the default settings will use the default 5 minute timeout. """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertEqual(self.DEFAULT_TIMEOUT, cache.default_timeout) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def test_caches_set_with_timeout_as_none_has_correct_default_timeout(self): """Memory caches that have the TIMEOUT parameter set to `None` in the default settings with have `None` as the default timeout. This means "no timeout". """ cache = caches[DEFAULT_CACHE_ALIAS] self.assertIsNone(cache.default_timeout) self.assertIsNone(cache.get_backend_timeout()) @override_settings(CACHES=DEFAULT_MEMORY_CACHES_SETTINGS) def test_caches_with_unset_timeout_set_expiring_key(self): """Memory caches that have the TIMEOUT parameter unset will set cache keys having the default 5 minute timeout. """ key = "my-key" value = "my-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNotNone(cache._expire_info[cache_key]) @override_settings(CACHES=NEVER_EXPIRING_CACHES_SETTINGS) def text_caches_set_with_timeout_as_none_set_non_expiring_key(self): """Memory caches that have the TIMEOUT parameter set to `None` will set a non expiring key by default. """ key = "another-key" value = "another-value" cache = caches[DEFAULT_CACHE_ALIAS] cache.set(key, value) cache_key = cache.make_key(key) self.assertIsNone(cache._expire_info[cache_key]) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class CacheUtils(TestCase): """TestCase for django.utils.cache functions.""" def setUp(self): self.host = 'www.example.com' self.path = '/cache/test/' self.factory = RequestFactory(HTTP_HOST=self.host) def tearDown(self): cache.clear() def _get_request_cache(self, method='GET', query_string=None, update_cache=None): request = self._get_request(self.host, self.path, method, query_string=query_string) request._cache_update_cache = True if not update_cache else update_cache return request def _set_cache(self, request, msg): response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: response = HttpResponse() if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) # Verify that a specified key_prefix is taken into account. key_prefix = 'localprefix' learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) response = HttpResponse() # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # Verify that the querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' 'beaf87a9a99ee81c673ea2d67ccbec2a.d41d8cd98f00b204e9800998ecf8427e' ) def test_cache_key_varies_by_url(self): """ get_cache_key keys differ by fully-qualified URL instead of path """ request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') learn_cache_key(request1, HttpResponse()) request2 = self.factory.get(self.path, HTTP_HOST='sub-2.example.com') learn_cache_key(request2, HttpResponse()) self.assertNotEqual(get_cache_key(request1), get_cache_key(request2)) def test_learn_cache_key(self): request = self.factory.head(self.path) response = HttpResponse() response['Vary'] = 'Pony' # Make sure that the Vary header is added to the key hash learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '18a03f9c9649f7d684af5db3524f5c99.d41d8cd98f00b204e9800998ecf8427e' ) def test_patch_cache_control(self): tests = ( # Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts (None, {'private': True}, {'private'}), # Test whether private/public attributes are mutually exclusive ('private', {'private': True}, {'private'}), ('private', {'public': True}, {'public'}), ('public', {'public': True}, {'public'}), ('public', {'private': True}, {'private'}), ('must-revalidate,max-age=60,private', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ('must-revalidate,max-age=60,public', {'private': True}, {'must-revalidate', 'max-age=60', 'private'}), ('must-revalidate,max-age=60', {'public': True}, {'must-revalidate', 'max-age=60', 'public'}), ) cc_delim_re = re.compile(r'\s*,\s*') for initial_cc, newheaders, expected_cc in tests: response = HttpResponse() if initial_cc is not None: response['Cache-Control'] = initial_cc patch_cache_control(response, **newheaders) parts = set(cc_delim_re.split(response['Cache-Control'])) self.assertEqual(parts, expected_cc) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix', }, }, ) class PrefixedCacheUtils(CacheUtils): pass @override_settings( CACHE_MIDDLEWARE_SECONDS=60, CACHE_MIDDLEWARE_KEY_PREFIX='test', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, ) class CacheHEADTest(TestCase): def setUp(self): self.path = '/cache/test/' self.factory = RequestFactory() def tearDown(self): cache.clear() def _set_cache(self, request, msg): response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) def test_head_caches_correctly(self): test_content = 'test content' request = self.factory.head(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) def test_head_with_cached_get(self): test_content = 'test content' request = self.factory.get(self.path) request._cache_update_cache = True self._set_cache(request, test_content) request = self.factory.head(self.path) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNotNone(get_cache_data) self.assertEqual(test_content.encode(), get_cache_data.content) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, LANGUAGES=[ ('en', 'English'), ('es', 'Spanish'), ], ) class CacheI18nTest(TestCase): def setUp(self): self.path = '/cache/test/' self.factory = RequestFactory() def tearDown(self): cache.clear() @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") key2 = get_cache_key(request) self.assertEqual(key, key2) def check_accept_language_vary(self, accept_language, vary, reference_key): request = self.factory.get(self.path) request.META['HTTP_ACCEPT_LANGUAGE'] = accept_language request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = vary key = learn_cache_key(request, response) key2 = get_cache_key(request) self.assertEqual(key, reference_key) self.assertEqual(key2, reference_key) @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation_accept_language(self): lang = translation.get_language() self.assertEqual(lang, 'en') request = self.factory.get(self.path) request.META['HTTP_ACCEPT_ENCODING'] = 'gzip;q=1.0, identity; q=0.5, *;q=0' response = HttpResponse() response['Vary'] = 'accept-encoding' key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when translation is active") self.check_accept_language_vary( 'en-us', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'en-US', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'en-US,en;q=0.8', 'accept-encoding, accept-language, cookie', key ) self.check_accept_language_vary( 'en-US,en;q=0.8,ko;q=0.6', 'accept-language, cookie, accept-encoding', key ) self.check_accept_language_vary( 'ko-kr,ko;q=0.8,en-us;q=0.5,en;q=0.3 ', 'accept-encoding, cookie, accept-language', key ) self.check_accept_language_vary( 'ko-KR,ko;q=0.8,en-US;q=0.6,en;q=0.4', 'accept-language, accept-encoding, cookie', key ) self.check_accept_language_vary( 'ko;q=1.0,en;q=0.5', 'cookie, accept-language, accept-encoding', key ) self.check_accept_language_vary( 'ko, en', 'cookie, accept-encoding, accept-language', key ) self.check_accept_language_vary( 'ko-KR, en-US', 'accept-encoding, accept-language, cookie', key ) @override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False) def test_cache_key_i18n_formatting(self): request = self.factory.get(self.path) lang = translation.get_language() response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(lang, key, "Cache keys should include the language name when formatting is active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_i18n_timezone(self): request = self.factory.get(self.path) # This is tightly coupled to the implementation, # but it's the most straightforward way to test the key. tz = force_text(timezone.get_current_timezone_name(), errors='ignore') tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_') response = HttpResponse() key = learn_cache_key(request, response) self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active") key2 = get_cache_key(request) self.assertEqual(key, key2) @override_settings(USE_I18N=False, USE_L10N=False) def test_cache_key_no_i18n(self): request = self.factory.get(self.path) lang = translation.get_language() tz = force_text(timezone.get_current_timezone_name(), errors='ignore') tz = tz.encode('ascii', 'ignore').decode('ascii').replace(' ', '_') response = HttpResponse() key = learn_cache_key(request, response) self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active") self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active") @override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True) def test_cache_key_with_non_ascii_tzname(self): # Regression test for #17476 class CustomTzName(timezone.UTC): name = '' def tzname(self, dt): return self.name request = self.factory.get(self.path) response = HttpResponse() with timezone.override(CustomTzName()): CustomTzName.name = 'Hora estándar de Argentina'.encode('UTF-8') # UTF-8 string sanitized_name = 'Hora_estndar_de_Argentina' self.assertIn(sanitized_name, learn_cache_key(request, response), "Cache keys should include the time zone name when time zones are active") CustomTzName.name = 'Hora estándar de Argentina' # unicode sanitized_name = 'Hora_estndar_de_Argentina' self.assertIn(sanitized_name, learn_cache_key(request, response), "Cache keys should include the time zone name when time zones are active") @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_ETAGS=True, USE_I18N=True, ) def test_middleware(self): def set_cache(request, lang, msg): translation.activate(lang) response = HttpResponse() response.content = msg return UpdateCacheMiddleware().process_response(request, response) # cache with non empty request.GET request = self.factory.get(self.path, {'foo': 'bar', 'other': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) # first access, cache must return None self.assertIsNone(get_cache_data) response = HttpResponse() content = 'Check for cache with QUERY_STRING' response.content = content UpdateCacheMiddleware().process_response(request, response) get_cache_data = FetchFromCacheMiddleware().process_request(request) # cache must return content self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, content.encode()) # different QUERY_STRING, cache must be empty request = self.factory.get(self.path, {'foo': 'bar', 'somethingelse': 'true'}) request._cache_update_cache = True get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) # i18n tests en_message = "Hello world!" es_message = "Hola mundo!" request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware().process_request(request) # Check that we can recover the cache self.assertIsNotNone(get_cache_data) self.assertEqual(get_cache_data.content, en_message.encode()) # Check that we use etags self.assertTrue(get_cache_data.has_header('ETag')) # Check that we can disable etags with self.settings(USE_ETAGS=False): request._cache_update_cache = True set_cache(request, 'en', en_message) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertFalse(get_cache_data.has_header('ETag')) # change the session language and set content request = self.factory.get(self.path) request._cache_update_cache = True set_cache(request, 'es', es_message) # change again the language translation.activate('en') # retrieve the content from cache get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertEqual(get_cache_data.content, en_message.encode()) # change again the language translation.activate('es') get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertEqual(get_cache_data.content, es_message.encode()) # reset the language translation.deactivate() @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX="test", CACHE_MIDDLEWARE_SECONDS=60, USE_ETAGS=True, ) def test_middleware_doesnt_cache_streaming_response(self): request = self.factory.get(self.path) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) # This test passes on Python < 3.3 even without the corresponding code # in UpdateCacheMiddleware, because pickling a StreamingHttpResponse # fails (http://bugs.python.org/issue14288). LocMemCache silently # swallows the exception and doesn't store the response in cache. content = ['Check for cache with streaming content.'] response = StreamingHttpResponse(content) UpdateCacheMiddleware().process_response(request, response) get_cache_data = FetchFromCacheMiddleware().process_request(request) self.assertIsNone(get_cache_data) @override_settings( CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'KEY_PREFIX': 'cacheprefix' }, }, ) class PrefixedCacheI18nTest(CacheI18nTest): pass def hello_world_view(request, value): return HttpResponse('Hello World %s' % value) def csrf_view(request): return HttpResponse(csrf(request)['csrf_token']) @override_settings( CACHE_MIDDLEWARE_ALIAS='other', CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix', CACHE_MIDDLEWARE_SECONDS=30, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, 'other': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'other', 'TIMEOUT': '1', }, }, ) class CacheMiddlewareTest(TestCase): def setUp(self): super(CacheMiddlewareTest, self).setUp() self.factory = RequestFactory() self.default_cache = caches['default'] self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() self.other_cache.clear() super(CacheMiddlewareTest, self).tearDown() def test_constructor(self): """ Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as Middleware vs. usage of CacheMiddleware as view decorator and setting attributes appropriately. """ # If no arguments are passed in construction, it's being used as middleware. middleware = CacheMiddleware() # Now test object attributes against values defined in setUp above self.assertEqual(middleware.cache_timeout, 30) self.assertEqual(middleware.key_prefix, 'middlewareprefix') self.assertEqual(middleware.cache_alias, 'other') # If arguments are being passed in construction, it's being used as a decorator. # First, test with "defaults": as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None) self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache # Next, test with custom values: as_view_decorator_with_custom = CacheMiddleware(cache_timeout=60, cache_alias='other', key_prefix='foo') self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60) self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo') self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other') def test_middleware(self): middleware = CacheMiddleware() prefix_middleware = CacheMiddleware(key_prefix='prefix1') timeout_middleware = CacheMiddleware(cache_timeout=1) request = self.factory.get('/view/') # Put the request through the request middleware result = middleware.process_request(request) self.assertIsNone(result) response = hello_world_view(request, '1') # Now put the response through the response middleware response = middleware.process_response(request, response) # Repeating the request should result in a cache hit result = middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') # The same request through a different middleware won't hit result = prefix_middleware.process_request(request) self.assertIsNone(result) # The same request with a timeout _will_ hit result = timeout_middleware.process_request(request) self.assertIsNotNone(result) self.assertEqual(result.content, b'Hello World 1') def test_view_decorator(self): # decorate the same view with different cache decorators default_view = cache_page(3)(hello_world_view) default_with_prefix_view = cache_page(3, key_prefix='prefix1')(hello_world_view) explicit_default_view = cache_page(3, cache='default')(hello_world_view) explicit_default_with_prefix_view = cache_page(3, cache='default', key_prefix='prefix1')(hello_world_view) other_view = cache_page(1, cache='other')(hello_world_view) other_with_prefix_view = cache_page(1, cache='other', key_prefix='prefix2')(hello_world_view) request = self.factory.get('/view/') # Request the view once response = default_view(request, '1') self.assertEqual(response.content, b'Hello World 1') # Request again -- hit the cache response = default_view(request, '2') self.assertEqual(response.content, b'Hello World 1') # Requesting the same view with the explicit cache should yield the same result response = explicit_default_view(request, '3') self.assertEqual(response.content, b'Hello World 1') # Requesting with a prefix will hit a different cache key response = explicit_default_with_prefix_view(request, '4') self.assertEqual(response.content, b'Hello World 4') # Hitting the same view again gives a cache hit response = explicit_default_with_prefix_view(request, '5') self.assertEqual(response.content, b'Hello World 4') # And going back to the implicit cache will hit the same cache response = default_with_prefix_view(request, '6') self.assertEqual(response.content, b'Hello World 4') # Requesting from an alternate cache won't hit cache response = other_view(request, '7') self.assertEqual(response.content, b'Hello World 7') # But a repeated hit will hit cache response = other_view(request, '8') self.assertEqual(response.content, b'Hello World 7') # And prefixing the alternate cache yields yet another cache entry response = other_with_prefix_view(request, '9') self.assertEqual(response.content, b'Hello World 9') # But if we wait a couple of seconds... time.sleep(2) # ... the default cache will still hit caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') # ... the default cache with a prefix will still hit response = default_with_prefix_view(request, '12') self.assertEqual(response.content, b'Hello World 4') # ... the explicit default cache will still hit response = explicit_default_view(request, '13') self.assertEqual(response.content, b'Hello World 1') # ... the explicit default cache with a prefix will still hit response = explicit_default_with_prefix_view(request, '14') self.assertEqual(response.content, b'Hello World 4') # .. but a rapidly expiring cache won't hit response = other_view(request, '15') self.assertEqual(response.content, b'Hello World 15') # .. even if it has a prefix response = other_with_prefix_view(request, '16') self.assertEqual(response.content, b'Hello World 16') def test_sensitive_cookie_not_cached(self): """ Django must prevent caching of responses that set a user-specific (and maybe security sensitive) cookie in response to a cookie-less request. """ csrf_middleware = CsrfViewMiddleware() cache_middleware = CacheMiddleware() request = self.factory.get('/view/') self.assertIsNone(cache_middleware.process_request(request)) csrf_middleware.process_view(request, csrf_view, (), {}) response = csrf_view(request) response = csrf_middleware.process_response(request, response) response = cache_middleware.process_response(request, response) # Inserting a CSRF cookie in a cookie-less request prevented caching. self.assertIsNone(cache_middleware.process_request(request)) @override_settings( CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix', CACHE_MIDDLEWARE_SECONDS=1, CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', }, }, USE_I18N=False, ) class TestWithTemplateResponse(TestCase): """ Tests various headers w/ TemplateResponse. Most are probably redundant since they manipulate the same object anyway but the Etag header is 'special' because it relies on the content being complete (which is not necessarily always the case with a TemplateResponse) """ def setUp(self): self.path = '/cache/test/' self.factory = RequestFactory() def tearDown(self): cache.clear() def test_patch_vary_headers(self): headers = ( # Initial vary, new headers, resulting vary. (None, ('Accept-Encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'), ('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'), ('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), (None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'), ('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'), ) for initial_vary, newheaders, resulting_vary in headers: template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) if initial_vary is not None: response['Vary'] = initial_vary patch_vary_headers(response, newheaders) self.assertEqual(response['Vary'], resulting_vary) def test_get_cache_key(self): request = self.factory.get(self.path) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) key_prefix = 'localprefix' # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) # Verify that a specified key_prefix is taken into account. learn_cache_key(request, response, key_prefix=key_prefix) self.assertEqual( get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.' '58a0a05c8a5620f813686ff969c26853.d41d8cd98f00b204e9800998ecf8427e' ) def test_get_cache_key_with_query(self): request = self.factory.get(self.path, {'test': 1}) template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) # Expect None if no headers have been set yet. self.assertIsNone(get_cache_key(request)) # Set headers to an empty list. learn_cache_key(request, response) # Verify that the querystring is taken into account. self.assertEqual( get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.' '0f1c2d56633c943073c4569d9a9502fe.d41d8cd98f00b204e9800998ecf8427e' ) @override_settings(USE_ETAGS=False) def test_without_etag(self): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) self.assertFalse(response.has_header('ETag')) patch_response_headers(response) self.assertFalse(response.has_header('ETag')) response = response.render() self.assertFalse(response.has_header('ETag')) @override_settings(USE_ETAGS=True) def test_with_etag(self): template = engines['django'].from_string("This is a test") response = TemplateResponse(HttpRequest(), template) self.assertFalse(response.has_header('ETag')) patch_response_headers(response) self.assertFalse(response.has_header('ETag')) response = response.render() self.assertTrue(response.has_header('ETag')) class TestMakeTemplateFragmentKey(TestCase): def test_without_vary_on(self): key = make_template_fragment_key('a.fragment') self.assertEqual(key, 'template.cache.a.fragment.d41d8cd98f00b204e9800998ecf8427e') def test_with_one_vary_on(self): key = make_template_fragment_key('foo', ['abc']) self.assertEqual(key, 'template.cache.foo.900150983cd24fb0d6963f7d28e17f72') def test_with_many_vary_on(self): key = make_template_fragment_key('bar', ['abc', 'def']) self.assertEqual(key, 'template.cache.bar.4b35f12ab03cec09beec4c21b2d2fa88') def test_proper_escaping(self): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469') class CacheHandlerTest(TestCase): def test_same_instance(self): """ Attempting to retrieve the same alias should yield the same instance. """ cache1 = caches['default'] cache2 = caches['default'] self.assertIs(cache1, cache2) def test_per_thread(self): """ Requesting the same alias from separate threads should yield separate instances. """ c = [] def runner(): c.append(caches['default']) for x in range(2): t = threading.Thread(target=runner) t.start() t.join() self.assertIsNot(c[0], c[1])
weiawe/django
tests/cache/tests.py
Python
bsd-3-clause
85,300
from __future__ import absolute_import from ..model import Model from ..core.properties import (Any, Dict, String) class ImageSource(Model): """ A base class for all image source types. """ _args = ('url', 'extra_url_vars') url = String(default="", help=""" tile service url (example: http://c.tile.openstreetmap.org/{Z}/{X}/{Y}.png) """) extra_url_vars = Dict(String, Any, help=""" A dictionary that maps url variable template keys to values. These variables are useful for parts of tile urls which do not change from tile to tile (e.g. server host name, or layer name). """)
phobson/bokeh
bokeh/models/images.py
Python
bsd-3-clause
617
####################################################################### # # Author: Malte Helmert ([email protected]) # (C) Copyright 2003-2004 Malte Helmert # # This file is part of LAMA. # # LAMA is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 3 # of the license, or (at your option) any later version. # # LAMA is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. # ####################################################################### import cStringIO import textwrap __all__ = ["print_nested_list"] def tokenize_list(obj): if isinstance(obj, list): yield "(" for item in obj: for elem in tokenize_list(item): yield elem yield ")" else: yield obj def wrap_lines(lines): for line in lines: indent = " " * (len(line) - len(line.lstrip()) + 4) line = line.replace("-", "_") # textwrap breaks on "-", but not "_" line = textwrap.fill(line, subsequent_indent=indent, break_long_words=False) yield line.replace("_", "-") def print_nested_list(nested_list): stream = cStringIO.StringIO() indent = 0 startofline = True pendingspace = False for token in tokenize_list(nested_list): if token == "(": if not startofline: stream.write("\n") stream.write("%s(" % (" " * indent)) indent += 2 startofline = False pendingspace = False elif token == ")": indent -= 2 stream.write(")") startofline = False pendingspace = False else: if startofline: stream.write(" " * indent) if pendingspace: stream.write(" ") stream.write(token) startofline = False pendingspace = True for line in wrap_lines(stream.getvalue().splitlines()): print line
PlanTool/plantool
wrappingPlanners/Deterministic/LAMA/seq-sat-lama/lama/translate/pddl/pretty_print.py
Python
gpl-2.0
2,178
#!/usr/bin/env python """ turtle-example-suite: xtx_lindenmayer_indian.py Each morning women in Tamil Nadu, in southern India, place designs, created by using rice flour and known as kolam on the thresholds of their homes. These can be described by Lindenmayer systems, which can easily be implemented with turtle graphics and Python. Two examples are shown here: (1) the snake kolam (2) anklets of Krishna Taken from Marcia Ascher: Mathematics Elsewhere, An Exploration of Ideas Across Cultures """ ################################ # Mini Lindenmayer tool ############################### from turtle import * def replace( seq, replacementRules, n ): for i in range(n): newseq = "" for element in seq: newseq = newseq + replacementRules.get(element,element) seq = newseq return seq def draw( commands, rules ): for b in commands: try: rules[b]() except TypeError: try: draw(rules[b], rules) except: pass def main(): ################################ # Example 1: Snake kolam ################################ def r(): right(45) def l(): left(45) def f(): forward(7.5) snake_rules = {"-":r, "+":l, "f":f, "b":"f+f+f--f--f+f+f"} snake_replacementRules = {"b": "b+f+b--f--b+f+b"} snake_start = "b--f--b--f" drawing = replace(snake_start, snake_replacementRules, 3) reset() speed(3) tracer(1,0) ht() up() backward(195) down() draw(drawing, snake_rules) from time import sleep sleep(3) ################################ # Example 2: Anklets of Krishna ################################ def A(): color("red") circle(10,90) def B(): from math import sqrt color("black") l = 5/sqrt(2) forward(l) circle(l, 270) forward(l) def F(): color("green") forward(10) krishna_rules = {"a":A, "b":B, "f":F} krishna_replacementRules = {"a" : "afbfa", "b" : "afbfbfbfa" } krishna_start = "fbfbfbfb" reset() speed(0) tracer(3,0) ht() left(45) drawing = replace(krishna_start, krishna_replacementRules, 3) draw(drawing, krishna_rules) tracer(1) return "Done!" if __name__=='__main__': msg = main() print msg mainloop()
teeple/pns_server
work/install/Python-2.7.4/Demo/turtle/tdemo_lindenmayer_indian.py
Python
gpl-2.0
2,432
# encoding: utf-8 import datetime from south.db import db from south.v2 import DataMigration from django.db import models class Migration(DataMigration): def forwards(self, orm): objs = orm.ProjectApplication.objects apps = objs.filter(chain__chained_project=None).order_by( 'chain', '-id') checked_chain = None projs = [] for app in apps: chain = app.chain if chain.pk != checked_chain: checked_chain = chain.pk projs.append(orm.Project(id=chain, application=app, state=1)) orm.Project.objects.bulk_create(projs) def backwards(self, orm): "Write your backwards methods here." models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'im.additionalmail': { 'Meta': {'object_name': 'AdditionalMail'}, 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}) }, 'im.approvalterms': { 'Meta': {'object_name': 'ApprovalTerms'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'location': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'im.astakosuser': { 'Meta': {'object_name': 'AstakosUser', '_ormbases': ['auth.User']}, 'accepted_email': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '75', 'null': 'True', 'blank': 'True'}), 'accepted_policy': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'activation_sent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'auth_token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'auth_token_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'auth_token_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'date_signed_terms': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'deactivated_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'deactivated_reason': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}), 'disturbed_quota': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}), 'email_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'has_credits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'has_signed_terms': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'invitations': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'is_rejected': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'is_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'level': ('django.db.models.fields.IntegerField', [], {'default': '4'}), 'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'moderated_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'moderated_data': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['im.Resource']", 'null': 'True', 'through': "orm['im.AstakosUserQuota']", 'symmetrical': 'False'}), 'rejected_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {}), 'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}), 'uuid': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}), 'verification_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True'}), 'verified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}) }, 'im.astakosuserauthprovider': { 'Meta': {'unique_together': "(('identifier', 'module', 'user'),)", 'object_name': 'AstakosUserAuthProvider'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'affiliation': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}), 'auth_backend': ('django.db.models.fields.CharField', [], {'default': "'astakos'", 'max_length': '255'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'info_data': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'module': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '255'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'auth_providers'", 'to': "orm['im.AstakosUser']"}) }, 'im.astakosuserquota': { 'Meta': {'unique_together': "(('resource', 'user'),)", 'object_name': 'AstakosUserQuota'}, 'capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'max_digits': '38', 'decimal_places': '0'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Resource']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}) }, 'im.authproviderpolicyprofile': { 'Meta': {'object_name': 'AuthProviderPolicyProfile'}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'authpolicy_profiles'", 'symmetrical': 'False', 'to': "orm['auth.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_exclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'policy_add': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy_automoderate': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy_create': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy_limit': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}), 'policy_login': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy_remove': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy_required': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'policy_switch': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'priority': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'authpolicy_profiles'", 'symmetrical': 'False', 'to': "orm['im.AstakosUser']"}) }, 'im.chain': { 'Meta': {'object_name': 'Chain'}, 'chain': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'im.component': { 'Meta': {'object_name': 'Component'}, 'auth_token': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'auth_token_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'auth_token_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}), 'url': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}) }, 'im.emailchange': { 'Meta': {'object_name': 'EmailChange'}, 'activation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'new_email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'requested_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emailchanges'", 'unique': 'True', 'to': "orm['im.AstakosUser']"}) }, 'im.endpoint': { 'Meta': {'object_name': 'Endpoint'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'service': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'endpoints'", 'to': "orm['im.Service']"}) }, 'im.endpointdata': { 'Meta': {'unique_together': "(('endpoint', 'key'),)", 'object_name': 'EndpointData'}, 'endpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'data'", 'to': "orm['im.Endpoint']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '1024'}) }, 'im.invitation': { 'Meta': {'object_name': 'Invitation'}, 'code': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True'}), 'consumed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'inviter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invitations_sent'", 'null': 'True', 'to': "orm['im.AstakosUser']"}), 'is_consumed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}), 'realname': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, 'im.pendingthirdpartyuser': { 'Meta': {'unique_together': "(('provider', 'third_party_identifier'),)", 'object_name': 'PendingThirdPartyUser'}, 'affiliation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'info': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}), 'third_party_identifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'token': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'im.project': { 'Meta': {'object_name': 'Project'}, 'application': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'project'", 'unique': 'True', 'to': "orm['im.ProjectApplication']"}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'deactivation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'deactivation_reason': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'id': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'chained_project'", 'unique': 'True', 'primary_key': 'True', 'db_column': "'id'", 'to': "orm['im.Chain']"}), 'last_approval_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['im.AstakosUser']", 'through': "orm['im.ProjectMembership']", 'symmetrical': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'unique': 'True', 'null': 'True', 'db_index': 'True'}), 'state': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'}) }, 'im.projectapplication': { 'Meta': {'unique_together': "(('chain', 'id'),)", 'object_name': 'ProjectApplication'}, 'applicant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects_applied'", 'to': "orm['im.AstakosUser']"}), 'chain': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'chained_apps'", 'db_column': "'chain'", 'to': "orm['im.Chain']"}), 'comments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'end_date': ('django.db.models.fields.DateTimeField', [], {}), 'homepage': ('django.db.models.fields.URLField', [], {'max_length': '255', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'issue_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'limit_on_members_number': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}), 'member_join_policy': ('django.db.models.fields.IntegerField', [], {}), 'member_leave_policy': ('django.db.models.fields.IntegerField', [], {}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'projects_owned'", 'to': "orm['im.AstakosUser']"}), 'precursor_application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.ProjectApplication']", 'null': 'True', 'blank': 'True'}), 'resource_grants': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['im.Resource']", 'null': 'True', 'through': "orm['im.ProjectResourceGrant']", 'blank': 'True'}), 'response': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'response_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}) }, 'im.projectmembership': { 'Meta': {'unique_together': "(('person', 'project'),)", 'object_name': 'ProjectMembership'}, 'acceptance_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'leave_request_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}), 'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Project']"}), 'request_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}) }, 'im.projectmembershiphistory': { 'Meta': {'object_name': 'ProjectMembershipHistory'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'person': ('django.db.models.fields.BigIntegerField', [], {}), 'project': ('django.db.models.fields.BigIntegerField', [], {}), 'reason': ('django.db.models.fields.IntegerField', [], {}), 'serial': ('django.db.models.fields.BigIntegerField', [], {}) }, 'im.projectresourcegrant': { 'Meta': {'unique_together': "(('resource', 'project_application'),)", 'object_name': 'ProjectResourceGrant'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'member_capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'default': '0', 'max_digits': '38', 'decimal_places': '0'}), 'project_application': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.ProjectApplication']", 'null': 'True'}), 'project_capacity': ('snf_django.lib.db.fields.IntDecimalField', [], {'null': 'True', 'max_digits': '38', 'decimal_places': '0'}), 'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Resource']"}) }, 'im.resource': { 'Meta': {'object_name': 'Resource'}, 'allow_in_projects': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}), 'desc': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'service_origin': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'service_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'unit': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}), 'uplimit': ('snf_django.lib.db.fields.IntDecimalField', [], {'default': '0', 'max_digits': '38', 'decimal_places': '0'}) }, 'im.serial': { 'Meta': {'object_name': 'Serial'}, 'serial': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'im.service': { 'Meta': {'object_name': 'Service'}, 'component': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.Component']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'type': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'im.sessioncatalog': { 'Meta': {'object_name': 'SessionCatalog'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sessions'", 'null': 'True', 'to': "orm['im.AstakosUser']"}) }, 'im.usersetting': { 'Meta': {'unique_together': "(('user', 'setting'),)", 'object_name': 'UserSetting'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'setting': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['im.AstakosUser']"}), 'value': ('django.db.models.fields.IntegerField', [], {}) } } complete_apps = ['im']
grnet/synnefo
snf-astakos-app/astakos/im/migrations/old/0043_uninitialized_projects.py
Python
gpl-3.0
25,194
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: ec2 short_description: create, terminate, start or stop an instance in ec2 description: - Creates or terminates ec2 instances. - C(state=restarted) was added in 2.2 version_added: "0.9" options: key_name: description: - key pair to use on the instance required: false default: null aliases: ['keypair'] id: version_added: "1.1" description: - identifier for this instance or set of instances, so that the module will be idempotent with respect to EC2 instances. This identifier is valid for at least 24 hours after the termination of the instance, and should not be reused for another call later on. For details, see the description of client token at U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Run_Instance_Idempotency.html). required: false default: null aliases: [] group: description: - security group (or list of groups) to use with the instance required: false default: null aliases: [ 'groups' ] group_id: version_added: "1.1" description: - security group id (or list of ids) to use with the instance required: false default: null aliases: [] region: version_added: "1.2" description: - The AWS region to use. Must be specified if ec2_url is not used. If not specified then the value of the EC2_REGION environment variable, if any, is used. See U(http://docs.aws.amazon.com/general/latest/gr/rande.html#ec2_region) required: false default: null aliases: [ 'aws_region', 'ec2_region' ] zone: version_added: "1.2" description: - AWS availability zone in which to launch the instance required: false default: null aliases: [ 'aws_zone', 'ec2_zone' ] instance_type: description: - instance type to use for the instance, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instance-types.html) required: true default: null aliases: [] tenancy: version_added: "1.9" description: - An instance with a tenancy of "dedicated" runs on single-tenant hardware and can only be launched into a VPC. Note that to use dedicated tenancy you MUST specify a vpc_subnet_id as well. Dedicated tenancy is not available for EC2 "micro" instances. required: false default: default choices: [ "default", "dedicated" ] aliases: [] spot_price: version_added: "1.5" description: - Maximum spot price to bid, If not set a regular on-demand instance is requested. A spot request is made with this maximum bid. When it is filled, the instance is started. required: false default: null aliases: [] spot_type: version_added: "2.0" description: - Type of spot request; one of "one-time" or "persistent". Defaults to "one-time" if not supplied. required: false default: "one-time" choices: [ "one-time", "persistent" ] aliases: [] image: description: - I(ami) ID to use for the instance required: true default: null aliases: [] kernel: description: - kernel I(eki) to use for the instance required: false default: null aliases: [] ramdisk: description: - ramdisk I(eri) to use for the instance required: false default: null aliases: [] wait: description: - wait for the instance to be 'running' before returning. Does not wait for SSH, see 'wait_for' example for details. required: false default: "no" choices: [ "yes", "no" ] aliases: [] wait_timeout: description: - how long before wait gives up, in seconds default: 300 aliases: [] spot_wait_timeout: version_added: "1.5" description: - how long to wait for the spot instance request to be fulfilled default: 600 aliases: [] count: description: - number of instances to launch required: False default: 1 aliases: [] monitoring: version_added: "1.1" description: - enable detailed monitoring (CloudWatch) for instance required: false default: null choices: [ "yes", "no" ] aliases: [] user_data: version_added: "0.9" description: - opaque blob of data which is made available to the ec2 instance required: false default: null aliases: [] instance_tags: version_added: "1.0" description: - a hash/dictionary of tags to add to the new instance or for starting/stopping instance by tag; '{"key":"value"}' and '{"key":"value","key":"value"}' required: false default: null aliases: [] placement_group: version_added: "1.3" description: - placement group for the instance when using EC2 Clustered Compute required: false default: null aliases: [] vpc_subnet_id: version_added: "1.1" description: - the subnet ID in which to launch the instance (VPC) required: false default: null aliases: [] assign_public_ip: version_added: "1.5" description: - when provisioning within vpc, assign a public IP address. Boto library must be 2.13.0+ required: false default: null choices: [ "yes", "no" ] aliases: [] private_ip: version_added: "1.2" description: - the private ip address to assign the instance (from the vpc subnet) required: false default: null aliases: [] instance_profile_name: version_added: "1.3" description: - Name of the IAM instance profile to use. Boto library must be 2.5.0+ required: false default: null aliases: [] instance_ids: version_added: "1.3" description: - "list of instance ids, currently used for states: absent, running, stopped" required: false default: null aliases: ['instance_id'] source_dest_check: version_added: "1.6" description: - Enable or Disable the Source/Destination checks (for NAT instances and Virtual Routers) required: false default: yes choices: [ "yes", "no" ] termination_protection: version_added: "2.0" description: - Enable or Disable the Termination Protection required: false default: no choices: [ "yes", "no" ] instance_initiated_shutdown_behavior: version_added: "2.2" description: - Set whether AWS will Stop or Terminate an instance on shutdown required: false default: 'stop' choices: [ "stop", "terminate" ] state: version_added: "1.3" description: - create or terminate instances required: false default: 'present' aliases: [] choices: ['present', 'absent', 'running', 'restarted', 'stopped'] volumes: version_added: "1.5" description: - a list of hash/dictionaries of volumes to add to the new instance; '[{"key":"value", "key":"value"}]'; keys allowed are - device_name (str; required), delete_on_termination (bool; False), device_type (deprecated), ephemeral (str), encrypted (bool; False), snapshot (str), volume_type (str), iops (int) - device_type is deprecated use volume_type, iops must be set when volume_type='io1', ephemeral and snapshot are mutually exclusive. required: false default: null aliases: [] ebs_optimized: version_added: "1.6" description: - whether instance is using optimized EBS volumes, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSOptimized.html) required: false default: 'false' exact_count: version_added: "1.5" description: - An integer value which indicates how many instances that match the 'count_tag' parameter should be running. Instances are either created or terminated based on this value. required: false default: null aliases: [] count_tag: version_added: "1.5" description: - Used with 'exact_count' to determine how many nodes based on a specific tag criteria should be running. This can be expressed in multiple ways and is shown in the EXAMPLES section. For instance, one can request 25 servers that are tagged with "class=webserver". The specified tag must already exist or be passed in as the 'instance_tags' option. required: false default: null aliases: [] network_interfaces: version_added: "2.0" description: - A list of existing network interfaces to attach to the instance at launch. When specifying existing network interfaces, none of the assign_public_ip, private_ip, vpc_subnet_id, group, or group_id parameters may be used. (Those parameters are for creating a new network interface at launch.) required: false default: null aliases: ['network_interface'] spot_launch_group: version_added: "2.1" description: - Launch group for spot request, see U(http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/how-spot-instances-work.html#spot-launch-group) required: false default: null author: - "Tim Gerla (@tgerla)" - "Lester Wade (@lwade)" - "Seth Vidal" extends_documentation_fragment: aws ''' EXAMPLES = ''' # Note: These examples do not set authentication details, see the AWS Guide for details. # Basic provisioning example - ec2: key_name: mykey instance_type: t2.micro image: ami-123456 wait: yes group: webserver count: 3 vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # Advanced example with tagging and CloudWatch - ec2: key_name: mykey group: databases instance_type: t2.micro image: ami-123456 wait: yes wait_timeout: 500 count: 5 instance_tags: db: postgres monitoring: yes vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # Single instance with additional IOPS volume from snapshot and volume delete on termination - ec2: key_name: mykey group: webserver instance_type: c3.medium image: ami-123456 wait: yes wait_timeout: 500 volumes: - device_name: /dev/sdb snapshot: snap-abcdef12 volume_type: io1 iops: 1000 volume_size: 100 delete_on_termination: true monitoring: yes vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # Single instance with ssd gp2 root volume - ec2: key_name: mykey group: webserver instance_type: c3.medium image: ami-123456 wait: yes wait_timeout: 500 volumes: - device_name: /dev/xvda volume_type: gp2 volume_size: 8 vpc_subnet_id: subnet-29e63245 assign_public_ip: yes exact_count: 1 # Multiple groups example - ec2: key_name: mykey group: ['databases', 'internal-services', 'sshable', 'and-so-forth'] instance_type: m1.large image: ami-6e649707 wait: yes wait_timeout: 500 count: 5 instance_tags: db: postgres monitoring: yes vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # Multiple instances with additional volume from snapshot - ec2: key_name: mykey group: webserver instance_type: m1.large image: ami-6e649707 wait: yes wait_timeout: 500 count: 5 volumes: - device_name: /dev/sdb snapshot: snap-abcdef12 volume_size: 10 monitoring: yes vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # Dedicated tenancy example - local_action: module: ec2 assign_public_ip: yes group_id: sg-1dc53f72 key_name: mykey image: ami-6e649707 instance_type: m1.small tenancy: dedicated vpc_subnet_id: subnet-29e63245 wait: yes # Spot instance example - ec2: spot_price: 0.24 spot_wait_timeout: 600 keypair: mykey group_id: sg-1dc53f72 instance_type: m1.small image: ami-6e649707 wait: yes vpc_subnet_id: subnet-29e63245 assign_public_ip: yes spot_launch_group: report_generators # Examples using pre-existing network interfaces - ec2: key_name: mykey instance_type: t2.small image: ami-f005ba11 network_interface: eni-deadbeef - ec2: key_name: mykey instance_type: t2.small image: ami-f005ba11 network_interfaces: ['eni-deadbeef', 'eni-5ca1ab1e'] # Launch instances, runs some tasks # and then terminate them - name: Create a sandbox instance hosts: localhost gather_facts: False vars: key_name: my_keypair instance_type: m1.small security_group: my_securitygroup image: my_ami_id region: us-east-1 tasks: - name: Launch instance ec2: key_name: "{{ keypair }}" group: "{{ security_group }}" instance_type: "{{ instance_type }}" image: "{{ image }}" wait: true region: "{{ region }}" vpc_subnet_id: subnet-29e63245 assign_public_ip: yes register: ec2 - name: Add new instance to host group add_host: hostname: "{{ item.public_ip }}" groupname: launched with_items: "{{ ec2.instances }}" - name: Wait for SSH to come up wait_for: host: "{{ item.public_dns_name }}" port: 22 delay: 60 timeout: 320 state: started with_items: "{{ ec2.instances }}" - name: Configure instance(s) hosts: launched become: True gather_facts: True roles: - my_awesome_role - my_awesome_test - name: Terminate instances hosts: localhost connection: local tasks: - name: Terminate instances that were previously launched ec2: state: 'absent' instance_ids: '{{ ec2.instance_ids }}' # Start a few existing instances, run some tasks # and stop the instances - name: Start sandbox instances hosts: localhost gather_facts: false connection: local vars: instance_ids: - 'i-xxxxxx' - 'i-xxxxxx' - 'i-xxxxxx' region: us-east-1 tasks: - name: Start the sandbox instances ec2: instance_ids: '{{ instance_ids }}' region: '{{ region }}' state: running wait: True vpc_subnet_id: subnet-29e63245 assign_public_ip: yes roles: - do_neat_stuff - do_more_neat_stuff - name: Stop sandbox instances hosts: localhost gather_facts: false connection: local vars: instance_ids: - 'i-xxxxxx' - 'i-xxxxxx' - 'i-xxxxxx' region: us-east-1 tasks: - name: Stop the sandbox instances ec2: instance_ids: '{{ instance_ids }}' region: '{{ region }}' state: stopped wait: True vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # # Start stopped instances specified by tag # - local_action: module: ec2 instance_tags: Name: ExtraPower state: running # # Restart instances specified by tag # - local_action: module: ec2 instance_tags: Name: ExtraPower state: restarted # # Enforce that 5 instances with a tag "foo" are running # (Highly recommended!) # - ec2: key_name: mykey instance_type: c1.medium image: ami-40603AD1 wait: yes group: webserver instance_tags: foo: bar exact_count: 5 count_tag: foo vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # # Enforce that 5 running instances named "database" with a "dbtype" of "postgres" # - ec2: key_name: mykey instance_type: c1.medium image: ami-40603AD1 wait: yes group: webserver instance_tags: Name: database dbtype: postgres exact_count: 5 count_tag: Name: database dbtype: postgres vpc_subnet_id: subnet-29e63245 assign_public_ip: yes # # count_tag complex argument examples # # instances with tag foo count_tag: foo: # instances with tag foo=bar count_tag: foo: bar # instances with tags foo=bar & baz count_tag: foo: bar baz: # instances with tags foo & bar & baz=bang count_tag: - foo - bar - baz: bang ''' import time from ast import literal_eval from ansible.module_utils.six import iteritems from ansible.module_utils.six import get_function_code try: import boto.ec2 from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping from boto.exception import EC2ResponseError from boto.vpc import VPCConnection HAS_BOTO = True except ImportError: HAS_BOTO = False def find_running_instances_by_count_tag(module, ec2, count_tag, zone=None): # get reservations for instances that match tag(s) and are running reservations = get_reservations(module, ec2, tags=count_tag, state="running", zone=zone) instances = [] for res in reservations: if hasattr(res, 'instances'): for inst in res.instances: instances.append(inst) return reservations, instances def _set_none_to_blank(dictionary): result = dictionary for k in result: if isinstance(result[k], dict): result[k] = _set_none_to_blank(result[k]) elif not result[k]: result[k] = "" return result def get_reservations(module, ec2, tags=None, state=None, zone=None): # TODO: filters do not work with tags that have underscores filters = dict() if tags is not None: if isinstance(tags, str): try: tags = literal_eval(tags) except: pass # if string, we only care that a tag of that name exists if isinstance(tags, str): filters.update({"tag-key": tags}) # if list, append each item to filters if isinstance(tags, list): for x in tags: if isinstance(x, dict): x = _set_none_to_blank(x) filters.update(dict(("tag:"+tn, tv) for (tn,tv) in iteritems(x))) else: filters.update({"tag-key": x}) # if dict, add the key and value to the filter if isinstance(tags, dict): tags = _set_none_to_blank(tags) filters.update(dict(("tag:"+tn, tv) for (tn,tv) in iteritems(tags))) if state: # http://stackoverflow.com/questions/437511/what-are-the-valid-instancestates-for-the-amazon-ec2-api filters.update({'instance-state-name': state}) if zone: filters.update({'availability-zone': zone}) results = ec2.get_all_instances(filters=filters) return results def get_instance_info(inst): """ Retrieves instance information from an instance ID and returns it as a dictionary """ instance_info = {'id': inst.id, 'ami_launch_index': inst.ami_launch_index, 'private_ip': inst.private_ip_address, 'private_dns_name': inst.private_dns_name, 'public_ip': inst.ip_address, 'dns_name': inst.dns_name, 'public_dns_name': inst.public_dns_name, 'state_code': inst.state_code, 'architecture': inst.architecture, 'image_id': inst.image_id, 'key_name': inst.key_name, 'placement': inst.placement, 'region': inst.placement[:-1], 'kernel': inst.kernel, 'ramdisk': inst.ramdisk, 'launch_time': inst.launch_time, 'instance_type': inst.instance_type, 'root_device_type': inst.root_device_type, 'root_device_name': inst.root_device_name, 'state': inst.state, 'hypervisor': inst.hypervisor, 'tags': inst.tags, 'groups': dict((group.id, group.name) for group in inst.groups), } try: instance_info['virtualization_type'] = getattr(inst,'virtualization_type') except AttributeError: instance_info['virtualization_type'] = None try: instance_info['ebs_optimized'] = getattr(inst, 'ebs_optimized') except AttributeError: instance_info['ebs_optimized'] = False try: bdm_dict = {} bdm = getattr(inst, 'block_device_mapping') for device_name in bdm.keys(): bdm_dict[device_name] = { 'status': bdm[device_name].status, 'volume_id': bdm[device_name].volume_id, 'delete_on_termination': bdm[device_name].delete_on_termination } instance_info['block_device_mapping'] = bdm_dict except AttributeError: instance_info['block_device_mapping'] = False try: instance_info['tenancy'] = getattr(inst, 'placement_tenancy') except AttributeError: instance_info['tenancy'] = 'default' return instance_info def boto_supports_associate_public_ip_address(ec2): """ Check if Boto library has associate_public_ip_address in the NetworkInterfaceSpecification class. Added in Boto 2.13.0 ec2: authenticated ec2 connection object Returns: True if Boto library accepts associate_public_ip_address argument, else false """ try: network_interface = boto.ec2.networkinterface.NetworkInterfaceSpecification() getattr(network_interface, "associate_public_ip_address") return True except AttributeError: return False def boto_supports_profile_name_arg(ec2): """ Check if Boto library has instance_profile_name argument. instance_profile_name has been added in Boto 2.5.0 ec2: authenticated ec2 connection object Returns: True if Boto library accept instance_profile_name argument, else false """ run_instances_method = getattr(ec2, 'run_instances') return 'instance_profile_name' in get_function_code(run_instances_method).co_varnames def create_block_device(module, ec2, volume): # Not aware of a way to determine this programatically # http://aws.amazon.com/about-aws/whats-new/2013/10/09/ebs-provisioned-iops-maximum-iops-gb-ratio-increased-to-30-1/ MAX_IOPS_TO_SIZE_RATIO = 30 # device_type has been used historically to represent volume_type, # however ec2_vol uses volume_type, as does the BlockDeviceType, so # we add handling for either/or but not both if all(key in volume for key in ['device_type','volume_type']): module.fail_json(msg = 'device_type is a deprecated name for volume_type. Do not use both device_type and volume_type') # get whichever one is set, or NoneType if neither are set volume_type = volume.get('device_type') or volume.get('volume_type') if 'snapshot' not in volume and 'ephemeral' not in volume: if 'volume_size' not in volume: module.fail_json(msg = 'Size must be specified when creating a new volume or modifying the root volume') if 'snapshot' in volume: if volume_type == 'io1' and 'iops' not in volume: module.fail_json(msg = 'io1 volumes must have an iops value set') if 'iops' in volume: snapshot = ec2.get_all_snapshots(snapshot_ids=[volume['snapshot']])[0] size = volume.get('volume_size', snapshot.volume_size) if int(volume['iops']) > MAX_IOPS_TO_SIZE_RATIO * size: module.fail_json(msg = 'IOPS must be at most %d times greater than size' % MAX_IOPS_TO_SIZE_RATIO) if 'encrypted' in volume: module.fail_json(msg = 'You can not set encryption when creating a volume from a snapshot') if 'ephemeral' in volume: if 'snapshot' in volume: module.fail_json(msg = 'Cannot set both ephemeral and snapshot') return BlockDeviceType(snapshot_id=volume.get('snapshot'), ephemeral_name=volume.get('ephemeral'), size=volume.get('volume_size'), volume_type=volume_type, delete_on_termination=volume.get('delete_on_termination', False), iops=volume.get('iops'), encrypted=volume.get('encrypted', None)) def boto_supports_param_in_spot_request(ec2, param): """ Check if Boto library has a <param> in its request_spot_instances() method. For example, the placement_group parameter wasn't added until 2.3.0. ec2: authenticated ec2 connection object Returns: True if boto library has the named param as an argument on the request_spot_instances method, else False """ method = getattr(ec2, 'request_spot_instances') return param in get_function_code(method).co_varnames def await_spot_requests(module, ec2, spot_requests, count): """ Wait for a group of spot requests to be fulfilled, or fail. module: Ansible module object ec2: authenticated ec2 connection object spot_requests: boto.ec2.spotinstancerequest.SpotInstanceRequest object returned by ec2.request_spot_instances count: Total number of instances to be created by the spot requests Returns: list of instance ID's created by the spot request(s) """ spot_wait_timeout = int(module.params.get('spot_wait_timeout')) wait_complete = time.time() + spot_wait_timeout spot_req_inst_ids = dict() while time.time() < wait_complete: reqs = ec2.get_all_spot_instance_requests() for sirb in spot_requests: if sirb.id in spot_req_inst_ids: continue for sir in reqs: if sir.id != sirb.id: continue # this is not our spot instance if sir.instance_id is not None: spot_req_inst_ids[sirb.id] = sir.instance_id elif sir.state == 'open': continue # still waiting, nothing to do here elif sir.state == 'active': continue # Instance is created already, nothing to do here elif sir.state == 'failed': module.fail_json(msg="Spot instance request %s failed with status %s and fault %s:%s" % ( sir.id, sir.status.code, sir.fault.code, sir.fault.message)) elif sir.state == 'cancelled': module.fail_json(msg="Spot instance request %s was cancelled before it could be fulfilled." % sir.id) elif sir.state == 'closed': # instance is terminating or marked for termination # this may be intentional on the part of the operator, # or it may have been terminated by AWS due to capacity, # price, or group constraints in this case, we'll fail # the module if the reason for the state is anything # other than termination by user. Codes are documented at # http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/spot-bid-status.html if sir.status.code == 'instance-terminated-by-user': # do nothing, since the user likely did this on purpose pass else: spot_msg = "Spot instance request %s was closed by AWS with the status %s and fault %s:%s" module.fail_json(msg=spot_msg % (sir.id, sir.status.code, sir.fault.code, sir.fault.message)) if len(spot_req_inst_ids) < count: time.sleep(5) else: return spot_req_inst_ids.values() module.fail_json(msg = "wait for spot requests timeout on %s" % time.asctime()) def enforce_count(module, ec2, vpc): exact_count = module.params.get('exact_count') count_tag = module.params.get('count_tag') zone = module.params.get('zone') # fail here if the exact count was specified without filtering # on a tag, as this may lead to a undesired removal of instances if exact_count and count_tag is None: module.fail_json(msg="you must use the 'count_tag' option with exact_count") reservations, instances = find_running_instances_by_count_tag(module, ec2, count_tag, zone) changed = None checkmode = False instance_dict_array = [] changed_instance_ids = None if len(instances) == exact_count: changed = False elif len(instances) < exact_count: changed = True to_create = exact_count - len(instances) if not checkmode: (instance_dict_array, changed_instance_ids, changed) \ = create_instances(module, ec2, vpc, override_count=to_create) for inst in instance_dict_array: instances.append(inst) elif len(instances) > exact_count: changed = True to_remove = len(instances) - exact_count if not checkmode: all_instance_ids = sorted([ x.id for x in instances ]) remove_ids = all_instance_ids[0:to_remove] instances = [ x for x in instances if x.id not in remove_ids] (changed, instance_dict_array, changed_instance_ids) \ = terminate_instances(module, ec2, remove_ids) terminated_list = [] for inst in instance_dict_array: inst['state'] = "terminated" terminated_list.append(inst) instance_dict_array = terminated_list # ensure all instances are dictionaries all_instances = [] for inst in instances: if not isinstance(inst, dict): inst = get_instance_info(inst) all_instances.append(inst) return (all_instances, instance_dict_array, changed_instance_ids, changed) def create_instances(module, ec2, vpc, override_count=None): """ Creates new instances module : AnsibleModule object ec2: authenticated ec2 connection object Returns: A list of dictionaries with instance information about the instances that were launched """ key_name = module.params.get('key_name') id = module.params.get('id') group_name = module.params.get('group') group_id = module.params.get('group_id') zone = module.params.get('zone') instance_type = module.params.get('instance_type') tenancy = module.params.get('tenancy') spot_price = module.params.get('spot_price') spot_type = module.params.get('spot_type') image = module.params.get('image') if override_count: count = override_count else: count = module.params.get('count') monitoring = module.params.get('monitoring') kernel = module.params.get('kernel') ramdisk = module.params.get('ramdisk') wait = module.params.get('wait') wait_timeout = int(module.params.get('wait_timeout')) spot_wait_timeout = int(module.params.get('spot_wait_timeout')) placement_group = module.params.get('placement_group') user_data = module.params.get('user_data') instance_tags = module.params.get('instance_tags') vpc_subnet_id = module.params.get('vpc_subnet_id') assign_public_ip = module.boolean(module.params.get('assign_public_ip')) private_ip = module.params.get('private_ip') instance_profile_name = module.params.get('instance_profile_name') volumes = module.params.get('volumes') ebs_optimized = module.params.get('ebs_optimized') exact_count = module.params.get('exact_count') count_tag = module.params.get('count_tag') source_dest_check = module.boolean(module.params.get('source_dest_check')) termination_protection = module.boolean(module.params.get('termination_protection')) network_interfaces = module.params.get('network_interfaces') spot_launch_group = module.params.get('spot_launch_group') instance_initiated_shutdown_behavior = module.params.get('instance_initiated_shutdown_behavior') # group_id and group_name are exclusive of each other if group_id and group_name: module.fail_json(msg = str("Use only one type of parameter (group_name) or (group_id)")) vpc_id = None if vpc_subnet_id: if not vpc: module.fail_json(msg="region must be specified") else: vpc_id = vpc.get_all_subnets(subnet_ids=[vpc_subnet_id])[0].vpc_id else: vpc_id = None try: # Here we try to lookup the group id from the security group name - if group is set. if group_name: if vpc_id: grp_details = ec2.get_all_security_groups(filters={'vpc_id': vpc_id}) else: grp_details = ec2.get_all_security_groups() if isinstance(group_name, basestring): group_name = [group_name] unmatched = set(group_name).difference(str(grp.name) for grp in grp_details) if len(unmatched) > 0: module.fail_json(msg="The following group names are not valid: %s" % ', '.join(unmatched)) group_id = [ str(grp.id) for grp in grp_details if str(grp.name) in group_name ] # Now we try to lookup the group id testing if group exists. elif group_id: #wrap the group_id in a list if it's not one already if isinstance(group_id, basestring): group_id = [group_id] grp_details = ec2.get_all_security_groups(group_ids=group_id) group_name = [grp_item.name for grp_item in grp_details] except boto.exception.NoAuthHandlerFound as e: module.fail_json(msg = str(e)) # Lookup any instances that much our run id. running_instances = [] count_remaining = int(count) if id != None: filter_dict = {'client-token':id, 'instance-state-name' : 'running'} previous_reservations = ec2.get_all_instances(None, filter_dict) for res in previous_reservations: for prev_instance in res.instances: running_instances.append(prev_instance) count_remaining = count_remaining - len(running_instances) # Both min_count and max_count equal count parameter. This means the launch request is explicit (we want count, or fail) in how many instances we want. if count_remaining == 0: changed = False else: changed = True try: params = {'image_id': image, 'key_name': key_name, 'monitoring_enabled': monitoring, 'placement': zone, 'instance_type': instance_type, 'kernel_id': kernel, 'ramdisk_id': ramdisk, 'user_data': user_data} if ebs_optimized: params['ebs_optimized'] = ebs_optimized # 'tenancy' always has a default value, but it is not a valid parameter for spot instance request if not spot_price: params['tenancy'] = tenancy if boto_supports_profile_name_arg(ec2): params['instance_profile_name'] = instance_profile_name else: if instance_profile_name is not None: module.fail_json( msg="instance_profile_name parameter requires Boto version 2.5.0 or higher") if assign_public_ip: if not boto_supports_associate_public_ip_address(ec2): module.fail_json( msg="assign_public_ip parameter requires Boto version 2.13.0 or higher.") elif not vpc_subnet_id: module.fail_json( msg="assign_public_ip only available with vpc_subnet_id") else: if private_ip: interface = boto.ec2.networkinterface.NetworkInterfaceSpecification( subnet_id=vpc_subnet_id, private_ip_address=private_ip, groups=group_id, associate_public_ip_address=assign_public_ip) else: interface = boto.ec2.networkinterface.NetworkInterfaceSpecification( subnet_id=vpc_subnet_id, groups=group_id, associate_public_ip_address=assign_public_ip) interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface) params['network_interfaces'] = interfaces else: if network_interfaces: if isinstance(network_interfaces, basestring): network_interfaces = [network_interfaces] interfaces = [] for i, network_interface_id in enumerate(network_interfaces): interface = boto.ec2.networkinterface.NetworkInterfaceSpecification( network_interface_id=network_interface_id, device_index=i) interfaces.append(interface) params['network_interfaces'] = \ boto.ec2.networkinterface.NetworkInterfaceCollection(*interfaces) else: params['subnet_id'] = vpc_subnet_id if vpc_subnet_id: params['security_group_ids'] = group_id else: params['security_groups'] = group_name if volumes: bdm = BlockDeviceMapping() for volume in volumes: if 'device_name' not in volume: module.fail_json(msg = 'Device name must be set for volume') # Minimum volume size is 1GB. We'll use volume size explicitly set to 0 # to be a signal not to create this volume if 'volume_size' not in volume or int(volume['volume_size']) > 0: bdm[volume['device_name']] = create_block_device(module, ec2, volume) params['block_device_map'] = bdm # check to see if we're using spot pricing first before starting instances if not spot_price: if assign_public_ip and private_ip: params.update(dict( min_count = count_remaining, max_count = count_remaining, client_token = id, placement_group = placement_group, )) else: params.update(dict( min_count = count_remaining, max_count = count_remaining, client_token = id, placement_group = placement_group, private_ip_address = private_ip, )) # For ordinary (not spot) instances, we can select 'stop' # (the default) or 'terminate' here. params['instance_initiated_shutdown_behavior'] = instance_initiated_shutdown_behavior or 'stop' res = ec2.run_instances(**params) instids = [ i.id for i in res.instances ] while True: try: ec2.get_all_instances(instids) break except boto.exception.EC2ResponseError as e: if "<Code>InvalidInstanceID.NotFound</Code>" in str(e): # there's a race between start and get an instance continue else: module.fail_json(msg = str(e)) # The instances returned through ec2.run_instances above can be in # terminated state due to idempotency. See commit 7f11c3d for a complete # explanation. terminated_instances = [ str(instance.id) for instance in res.instances if instance.state == 'terminated' ] if terminated_instances: module.fail_json(msg = "Instances with id(s) %s " % terminated_instances + "were created previously but have since been terminated - " + "use a (possibly different) 'instanceid' parameter") else: if private_ip: module.fail_json( msg='private_ip only available with on-demand (non-spot) instances') if boto_supports_param_in_spot_request(ec2, 'placement_group'): params['placement_group'] = placement_group elif placement_group : module.fail_json( msg="placement_group parameter requires Boto version 2.3.0 or higher.") # You can't tell spot instances to 'stop'; they will always be # 'terminate'd. For convenience, we'll ignore the latter value. if instance_initiated_shutdown_behavior and instance_initiated_shutdown_behavior != 'terminate': module.fail_json( msg="instance_initiated_shutdown_behavior=stop is not supported for spot instances.") if spot_launch_group and isinstance(spot_launch_group, basestring): params['launch_group'] = spot_launch_group params.update(dict( count = count_remaining, type = spot_type, )) res = ec2.request_spot_instances(spot_price, **params) # Now we have to do the intermediate waiting if wait: instids = await_spot_requests(module, ec2, res, count) except boto.exception.BotoServerError as e: module.fail_json(msg = "Instance creation failed => %s: %s" % (e.error_code, e.error_message)) # wait here until the instances are up num_running = 0 wait_timeout = time.time() + wait_timeout while wait_timeout > time.time() and num_running < len(instids): try: res_list = ec2.get_all_instances(instids) except boto.exception.BotoServerError as e: if e.error_code == 'InvalidInstanceID.NotFound': time.sleep(1) continue else: raise num_running = 0 for res in res_list: num_running += len([ i for i in res.instances if i.state=='running' ]) if len(res_list) <= 0: # got a bad response of some sort, possibly due to # stale/cached data. Wait a second and then try again time.sleep(1) continue if wait and num_running < len(instids): time.sleep(5) else: break if wait and wait_timeout <= time.time(): # waiting took too long module.fail_json(msg = "wait for instances running timeout on %s" % time.asctime()) #We do this after the loop ends so that we end up with one list for res in res_list: running_instances.extend(res.instances) # Enabled by default by AWS if source_dest_check is False: for inst in res.instances: inst.modify_attribute('sourceDestCheck', False) # Disabled by default by AWS if termination_protection is True: for inst in res.instances: inst.modify_attribute('disableApiTermination', True) # Leave this as late as possible to try and avoid InvalidInstanceID.NotFound if instance_tags: try: ec2.create_tags(instids, instance_tags) except boto.exception.EC2ResponseError as e: module.fail_json(msg = "Instance tagging failed => %s: %s" % (e.error_code, e.error_message)) instance_dict_array = [] created_instance_ids = [] for inst in running_instances: inst.update() d = get_instance_info(inst) created_instance_ids.append(inst.id) instance_dict_array.append(d) return (instance_dict_array, created_instance_ids, changed) def terminate_instances(module, ec2, instance_ids): """ Terminates a list of instances module: Ansible module object ec2: authenticated ec2 connection object termination_list: a list of instances to terminate in the form of [ {id: <inst-id>}, ..] Returns a dictionary of instance information about the instances terminated. If the instance to be terminated is running "changed" will be set to False. """ # Whether to wait for termination to complete before returning wait = module.params.get('wait') wait_timeout = int(module.params.get('wait_timeout')) changed = False instance_dict_array = [] if not isinstance(instance_ids, list) or len(instance_ids) < 1: module.fail_json(msg='instance_ids should be a list of instances, aborting') terminated_instance_ids = [] for res in ec2.get_all_instances(instance_ids): for inst in res.instances: if inst.state == 'running' or inst.state == 'stopped': terminated_instance_ids.append(inst.id) instance_dict_array.append(get_instance_info(inst)) try: ec2.terminate_instances([inst.id]) except EC2ResponseError as e: module.fail_json(msg='Unable to terminate instance {0}, error: {1}'.format(inst.id, e)) changed = True # wait here until the instances are 'terminated' if wait: num_terminated = 0 wait_timeout = time.time() + wait_timeout while wait_timeout > time.time() and num_terminated < len(terminated_instance_ids): response = ec2.get_all_instances( \ instance_ids=terminated_instance_ids, \ filters={'instance-state-name':'terminated'}) try: num_terminated = sum([len(res.instances) for res in response]) except Exception as e: # got a bad response of some sort, possibly due to # stale/cached data. Wait a second and then try again time.sleep(1) continue if num_terminated < len(terminated_instance_ids): time.sleep(5) # waiting took too long if wait_timeout < time.time() and num_terminated < len(terminated_instance_ids): module.fail_json(msg = "wait for instance termination timeout on %s" % time.asctime()) #Lets get the current state of the instances after terminating - issue600 instance_dict_array = [] for res in ec2.get_all_instances(instance_ids=terminated_instance_ids,\ filters={'instance-state-name':'terminated'}): for inst in res.instances: instance_dict_array.append(get_instance_info(inst)) return (changed, instance_dict_array, terminated_instance_ids) def startstop_instances(module, ec2, instance_ids, state, instance_tags): """ Starts or stops a list of existing instances module: Ansible module object ec2: authenticated ec2 connection object instance_ids: The list of instances to start in the form of [ {id: <inst-id>}, ..] instance_tags: A dict of tag keys and values in the form of {key: value, ... } state: Intended state ("running" or "stopped") Returns a dictionary of instance information about the instances started/stopped. If the instance was not able to change state, "changed" will be set to False. Note that if instance_ids and instance_tags are both non-empty, this method will process the intersection of the two """ wait = module.params.get('wait') wait_timeout = int(module.params.get('wait_timeout')) source_dest_check = module.params.get('source_dest_check') termination_protection = module.params.get('termination_protection') changed = False instance_dict_array = [] if not isinstance(instance_ids, list) or len(instance_ids) < 1: # Fail unless the user defined instance tags if not instance_tags: module.fail_json(msg='instance_ids should be a list of instances, aborting') # To make an EC2 tag filter, we need to prepend 'tag:' to each key. # An empty filter does no filtering, so it's safe to pass it to the # get_all_instances method even if the user did not specify instance_tags filters = {} if instance_tags: for key, value in instance_tags.items(): filters["tag:" + key] = value # Check that our instances are not in the state we want to take # Check (and eventually change) instances attributes and instances state existing_instances_array = [] for res in ec2.get_all_instances(instance_ids, filters=filters): for inst in res.instances: # Check "source_dest_check" attribute try: if inst.vpc_id is not None and inst.get_attribute('sourceDestCheck')['sourceDestCheck'] != source_dest_check: inst.modify_attribute('sourceDestCheck', source_dest_check) changed = True except boto.exception.EC2ResponseError as exc: # instances with more than one Elastic Network Interface will # fail, because they have the sourceDestCheck attribute defined # per-interface if exc.code == 'InvalidInstanceID': for interface in inst.interfaces: if interface.source_dest_check != source_dest_check: ec2.modify_network_interface_attribute(interface.id, "sourceDestCheck", source_dest_check) changed = True else: module.fail_json(msg='Failed to handle source_dest_check state for instance {0}, error: {1}'.format(inst.id, exc), exception=traceback.format_exc(exc)) # Check "termination_protection" attribute if (inst.get_attribute('disableApiTermination')['disableApiTermination'] != termination_protection and termination_protection is not None): inst.modify_attribute('disableApiTermination', termination_protection) changed = True # Check instance state if inst.state != state: instance_dict_array.append(get_instance_info(inst)) try: if state == 'running': inst.start() else: inst.stop() except EC2ResponseError as e: module.fail_json(msg='Unable to change state for instance {0}, error: {1}'.format(inst.id, e)) changed = True existing_instances_array.append(inst.id) instance_ids = list(set(existing_instances_array + (instance_ids or []))) ## Wait for all the instances to finish starting or stopping wait_timeout = time.time() + wait_timeout while wait and wait_timeout > time.time(): instance_dict_array = [] matched_instances = [] for res in ec2.get_all_instances(instance_ids): for i in res.instances: if i.state == state: instance_dict_array.append(get_instance_info(i)) matched_instances.append(i) if len(matched_instances) < len(instance_ids): time.sleep(5) else: break if wait and wait_timeout <= time.time(): # waiting took too long module.fail_json(msg = "wait for instances running timeout on %s" % time.asctime()) return (changed, instance_dict_array, instance_ids) def restart_instances(module, ec2, instance_ids, state, instance_tags): """ Restarts a list of existing instances module: Ansible module object ec2: authenticated ec2 connection object instance_ids: The list of instances to start in the form of [ {id: <inst-id>}, ..] instance_tags: A dict of tag keys and values in the form of {key: value, ... } state: Intended state ("restarted") Returns a dictionary of instance information about the instances. If the instance was not able to change state, "changed" will be set to False. Wait will not apply here as this is a OS level operation. Note that if instance_ids and instance_tags are both non-empty, this method will process the intersection of the two. """ source_dest_check = module.params.get('source_dest_check') termination_protection = module.params.get('termination_protection') changed = False instance_dict_array = [] if not isinstance(instance_ids, list) or len(instance_ids) < 1: # Fail unless the user defined instance tags if not instance_tags: module.fail_json(msg='instance_ids should be a list of instances, aborting') # To make an EC2 tag filter, we need to prepend 'tag:' to each key. # An empty filter does no filtering, so it's safe to pass it to the # get_all_instances method even if the user did not specify instance_tags filters = {} if instance_tags: for key, value in instance_tags.items(): filters["tag:" + key] = value # Check that our instances are not in the state we want to take # Check (and eventually change) instances attributes and instances state for res in ec2.get_all_instances(instance_ids, filters=filters): for inst in res.instances: # Check "source_dest_check" attribute try: if inst.vpc_id is not None and inst.get_attribute('sourceDestCheck')['sourceDestCheck'] != source_dest_check: inst.modify_attribute('sourceDestCheck', source_dest_check) changed = True except boto.exception.EC2ResponseError as exc: # instances with more than one Elastic Network Interface will # fail, because they have the sourceDestCheck attribute defined # per-interface if exc.code == 'InvalidInstanceID': for interface in inst.interfaces: if interface.source_dest_check != source_dest_check: ec2.modify_network_interface_attribute(interface.id, "sourceDestCheck", source_dest_check) changed = True else: module.fail_json(msg='Failed to handle source_dest_check state for instance {0}, error: {1}'.format(inst.id, exc), exception=traceback.format_exc(exc)) # Check "termination_protection" attribute if (inst.get_attribute('disableApiTermination')['disableApiTermination'] != termination_protection and termination_protection is not None): inst.modify_attribute('disableApiTermination', termination_protection) changed = True # Check instance state if inst.state != state: instance_dict_array.append(get_instance_info(inst)) try: inst.reboot() except EC2ResponseError as e: module.fail_json(msg='Unable to change state for instance {0}, error: {1}'.format(inst.id, e)) changed = True return (changed, instance_dict_array, instance_ids) def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( key_name = dict(aliases = ['keypair']), id = dict(), group = dict(type='list', aliases=['groups']), group_id = dict(type='list'), zone = dict(aliases=['aws_zone', 'ec2_zone']), instance_type = dict(aliases=['type']), spot_price = dict(), spot_type = dict(default='one-time', choices=["one-time", "persistent"]), spot_launch_group = dict(), image = dict(), kernel = dict(), count = dict(type='int', default='1'), monitoring = dict(type='bool', default=False), ramdisk = dict(), wait = dict(type='bool', default=False), wait_timeout = dict(default=300), spot_wait_timeout = dict(default=600), placement_group = dict(), user_data = dict(), instance_tags = dict(type='dict'), vpc_subnet_id = dict(), assign_public_ip = dict(type='bool', default=False), private_ip = dict(), instance_profile_name = dict(), instance_ids = dict(type='list', aliases=['instance_id']), source_dest_check = dict(type='bool', default=True), termination_protection = dict(type='bool', default=None), state = dict(default='present', choices=['present', 'absent', 'running', 'restarted', 'stopped']), instance_initiated_shutdown_behavior=dict(default=None, choices=['stop', 'terminate']), exact_count = dict(type='int', default=None), count_tag = dict(), volumes = dict(type='list'), ebs_optimized = dict(type='bool', default=False), tenancy = dict(default='default'), network_interfaces = dict(type='list', aliases=['network_interface']) ) ) module = AnsibleModule( argument_spec=argument_spec, mutually_exclusive = [ ['exact_count', 'count'], ['exact_count', 'state'], ['exact_count', 'instance_ids'], ['network_interfaces', 'assign_public_ip'], ['network_interfaces', 'group'], ['network_interfaces', 'group_id'], ['network_interfaces', 'private_ip'], ['network_interfaces', 'vpc_subnet_id'], ], ) if not HAS_BOTO: module.fail_json(msg='boto required for this module') ec2 = ec2_connect(module) region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module) if region: try: vpc = connect_to_aws(boto.vpc, region, **aws_connect_kwargs) except boto.exception.NoAuthHandlerFound as e: module.fail_json(msg = str(e)) else: vpc = None tagged_instances = [] state = module.params['state'] if state == 'absent': instance_ids = module.params['instance_ids'] if not instance_ids: module.fail_json(msg='instance_ids list is required for absent state') (changed, instance_dict_array, new_instance_ids) = terminate_instances(module, ec2, instance_ids) elif state in ('running', 'stopped'): instance_ids = module.params.get('instance_ids') instance_tags = module.params.get('instance_tags') if not (isinstance(instance_ids, list) or isinstance(instance_tags, dict)): module.fail_json(msg='running list needs to be a list of instances or set of tags to run: %s' % instance_ids) (changed, instance_dict_array, new_instance_ids) = startstop_instances(module, ec2, instance_ids, state, instance_tags) elif state in ('restarted'): instance_ids = module.params.get('instance_ids') instance_tags = module.params.get('instance_tags') if not (isinstance(instance_ids, list) or isinstance(instance_tags, dict)): module.fail_json(msg='running list needs to be a list of instances or set of tags to run: %s' % instance_ids) (changed, instance_dict_array, new_instance_ids) = restart_instances(module, ec2, instance_ids, state, instance_tags) elif state == 'present': # Changed is always set to true when provisioning new instances if not module.params.get('image'): module.fail_json(msg='image parameter is required for new instance') if module.params.get('exact_count') is None: (instance_dict_array, new_instance_ids, changed) = create_instances(module, ec2, vpc) else: (tagged_instances, instance_dict_array, new_instance_ids, changed) = enforce_count(module, ec2, vpc) module.exit_json(changed=changed, instance_ids=new_instance_ids, instances=instance_dict_array, tagged_instances=tagged_instances) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.ec2 import * main()
wimnat/ansible-modules-core
cloud/amazon/ec2.py
Python
gpl-3.0
61,472
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra # # This file is part of Essentia # # Essentia is free software: you can redistribute it and/or modify it under # the terms of the GNU Affero General Public License as published by the Free # Software Foundation (FSF), either version 3 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the Affero GNU General Public License # version 3 along with this program. If not, see http://www.gnu.org/licenses/ #! /usr/bin/env python import sys, os import essentia, essentia.standard, essentia.streaming from essentia.streaming import * from numpy import argmax, log10, mean, tanh dynamicFrameSize = 88200 dynamicHopSize = 44100 analysisSampleRate = 44100.0 # expects the audio source to already be equal-loudness filtered class LevelExtractor(essentia.streaming.CompositeBase): #"""describes the dynamics of an audio signal""" def __init__(self, frameSize=dynamicFrameSize, hopSize=dynamicHopSize): super(LevelExtractor, self).__init__() fc = FrameCutter(frameSize=frameSize, hopSize=hopSize, startFromZero=True, silentFrames='noise') dy = Loudness() fc.frame >> dy.signal # define inputs: self.inputs['signal'] = fc.signal # define outputs: self.outputs['loudness'] = dy.loudness def squeezeRange(x, x1, x2): return 0.5 + 0.5 * tanh(-1.0 + 2.0 * (x - x1) / (x2 - x1)) def levelAverage(pool, namespace=''): epsilon = 1e-4 threshold = 1e-4 # -80dB if namespace: namespace += '.lowlevel.' else: namespace = 'lowlevel.' loudness = pool[namespace + 'loudness'] pool.remove(namespace + 'loudness') maxValue = loudness[argmax(loudness)] if maxValue <= epsilon: maxValue = epsilon # normalization of the maximum: def f(x): x /= float(maxValue) if x <= threshold : return threshold return x loudness = map(f, loudness) # average level: levelAverage = 10.0*log10(mean(loudness)) # Re-scaling and range-control # This yields in numbers between # 0 for signals with large dynamic variace and thus low dynamic average # 1 for signal with little dynamic range and thus # a dynamic average close to the maximum x1 = -5.0 x2 = -2.0 levelAverageSqueezed = squeezeRange(levelAverage, x1, x2) pool.set(namespace + 'average_loudness', levelAverageSqueezed) usage = 'level.py [options] <inputfilename> <outputfilename>' def parse_args(): import numpy essentia_version = '%s\n'\ 'python version: %s\n'\ 'numpy version: %s' % (essentia.__version__, # full version sys.version.split()[0], # python major version numpy.__version__) # numpy version from optparse import OptionParser parser = OptionParser(usage=usage, version=essentia_version) parser.add_option("-c","--cpp", action="store_true", dest="generate_cpp", help="generate cpp code from CompositeBase algorithm") parser.add_option("-d", "--dot", action="store_true", dest="generate_dot", help="generate dot and cpp code from CompositeBase algorithm") (options, args) = parser.parse_args() return options, args if __name__ == '__main__': opts, args = parse_args() if len(args) != 2: cmd = './'+os.path.basename(sys.argv[0])+ ' -h' os.system(cmd) sys.exit(1) if opts.generate_dot: essentia.translate(LevelExtractor, 'streaming_extractorlevel', dot_graph=True) elif opts.generate_cpp: essentia.translate(LevelExtractor, 'streaming_extractorlevel', dot_graph=False) # find out replay gain: loader = EqloudLoader(filename=args[0], sampleRate=analysisSampleRate, downmix='mix') rgain = ReplayGain(applyEqloud=False) pool = essentia.Pool() loader.audio >> rgain.signal rgain.replayGain >> (pool, 'replay_gain') essentia.run(loader) # get average level: loader = EqloudLoader(filename=args[0], replayGain=pool['replay_gain'], sampleRate=analysisSampleRate, downmix='mix') levelExtractor = LevelExtractor() loader.audio >> levelExtractor.signal levelExtractor.loudness >> (pool, 'lowlevel.loudness') essentia.run(loader) levelAverage(pool) essentia.standard.YamlOutput(filename=args[1])(pool)
GiantSteps/essentia
src/examples/python/streaming_extractor/level.py
Python
agpl-3.0
4,852
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_serialization import jsonutils import six from nova import block_device from nova import context from nova import exception from nova import objects from nova.objects import fields from nova import test from nova.tests.unit import fake_block_device from nova.tests.unit import fake_instance from nova.tests.unit import matchers from nova.virt import block_device as driver_block_device from nova.virt import driver from nova.volume import cinder from nova.volume import encryptors class TestDriverBlockDevice(test.NoDBTestCase): driver_classes = { 'swap': driver_block_device.DriverSwapBlockDevice, 'ephemeral': driver_block_device.DriverEphemeralBlockDevice, 'volume': driver_block_device.DriverVolumeBlockDevice, 'snapshot': driver_block_device.DriverSnapshotBlockDevice, 'image': driver_block_device.DriverImageBlockDevice, 'blank': driver_block_device.DriverBlankBlockDevice } swap_bdm_dict = block_device.BlockDeviceDict( {'id': 1, 'instance_uuid': 'fake-instance', 'device_name': '/dev/sdb1', 'source_type': 'blank', 'destination_type': 'local', 'delete_on_termination': True, 'guest_format': 'swap', 'disk_bus': 'scsi', 'volume_size': 2, 'boot_index': -1}) swap_driver_bdm = { 'device_name': '/dev/sdb1', 'swap_size': 2, 'disk_bus': 'scsi'} swap_legacy_driver_bdm = { 'device_name': '/dev/sdb1', 'swap_size': 2} ephemeral_bdm_dict = block_device.BlockDeviceDict( {'id': 2, 'instance_uuid': 'fake-instance', 'device_name': '/dev/sdc1', 'source_type': 'blank', 'destination_type': 'local', 'disk_bus': 'scsi', 'device_type': 'disk', 'volume_size': 4, 'guest_format': 'ext4', 'delete_on_termination': True, 'boot_index': -1}) ephemeral_driver_bdm = { 'device_name': '/dev/sdc1', 'size': 4, 'device_type': 'disk', 'guest_format': 'ext4', 'disk_bus': 'scsi'} ephemeral_legacy_driver_bdm = { 'device_name': '/dev/sdc1', 'size': 4, 'virtual_name': 'ephemeral0', 'num': 0} volume_bdm_dict = block_device.BlockDeviceDict( {'id': 3, 'instance_uuid': 'fake-instance', 'device_name': '/dev/sda1', 'source_type': 'volume', 'disk_bus': 'scsi', 'device_type': 'disk', 'volume_size': 8, 'destination_type': 'volume', 'volume_id': 'fake-volume-id-1', 'guest_format': 'ext4', 'connection_info': '{"fake": "connection_info"}', 'delete_on_termination': False, 'boot_index': 0}) volume_driver_bdm = { 'mount_device': '/dev/sda1', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': False, 'disk_bus': 'scsi', 'device_type': 'disk', 'guest_format': 'ext4', 'boot_index': 0} volume_legacy_driver_bdm = { 'mount_device': '/dev/sda1', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': False} snapshot_bdm_dict = block_device.BlockDeviceDict( {'id': 4, 'instance_uuid': 'fake-instance', 'device_name': '/dev/sda2', 'delete_on_termination': True, 'volume_size': 3, 'disk_bus': 'scsi', 'device_type': 'disk', 'source_type': 'snapshot', 'destination_type': 'volume', 'connection_info': '{"fake": "connection_info"}', 'snapshot_id': 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2', 'boot_index': -1}) snapshot_driver_bdm = { 'mount_device': '/dev/sda2', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': True, 'disk_bus': 'scsi', 'device_type': 'disk', 'guest_format': None, 'boot_index': -1} snapshot_legacy_driver_bdm = { 'mount_device': '/dev/sda2', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': True} image_bdm_dict = block_device.BlockDeviceDict( {'id': 5, 'instance_uuid': 'fake-instance', 'device_name': '/dev/sda2', 'delete_on_termination': True, 'volume_size': 1, 'disk_bus': 'scsi', 'device_type': 'disk', 'source_type': 'image', 'destination_type': 'volume', 'connection_info': '{"fake": "connection_info"}', 'image_id': 'fake-image-id-1', 'volume_id': 'fake-volume-id-2', 'boot_index': -1}) image_driver_bdm = { 'mount_device': '/dev/sda2', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': True, 'disk_bus': 'scsi', 'device_type': 'disk', 'guest_format': None, 'boot_index': -1} image_legacy_driver_bdm = { 'mount_device': '/dev/sda2', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': True} blank_bdm_dict = block_device.BlockDeviceDict( {'id': 6, 'instance_uuid': 'fake-instance', 'device_name': '/dev/sda2', 'delete_on_termination': True, 'volume_size': 3, 'disk_bus': 'scsi', 'device_type': 'disk', 'source_type': 'blank', 'destination_type': 'volume', 'connection_info': '{"fake": "connection_info"}', 'snapshot_id': 'fake-snapshot-id-1', 'volume_id': 'fake-volume-id-2', 'boot_index': -1}) blank_driver_bdm = { 'mount_device': '/dev/sda2', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': True, 'disk_bus': 'scsi', 'device_type': 'disk', 'guest_format': None, 'boot_index': -1} blank_legacy_driver_bdm = { 'mount_device': '/dev/sda2', 'connection_info': {"fake": "connection_info"}, 'delete_on_termination': True} def setUp(self): super(TestDriverBlockDevice, self).setUp() self.volume_api = self.mox.CreateMock(cinder.API) self.virt_driver = self.mox.CreateMock(driver.ComputeDriver) self.context = context.RequestContext('fake_user', 'fake_project') # create bdm objects for testing self.swap_bdm = fake_block_device.fake_bdm_object( self.context, self.swap_bdm_dict) self.ephemeral_bdm = fake_block_device.fake_bdm_object( self.context, self.ephemeral_bdm_dict) self.volume_bdm = fake_block_device.fake_bdm_object( self.context, self.volume_bdm_dict) self.snapshot_bdm = fake_block_device.fake_bdm_object( self.context, self.snapshot_bdm_dict) self.image_bdm = fake_block_device.fake_bdm_object( self.context, self.image_bdm_dict) self.blank_bdm = fake_block_device.fake_bdm_object( self.context, self.blank_bdm_dict) def test_no_device_raises(self): for name, cls in self.driver_classes.items(): bdm = fake_block_device.fake_bdm_object( self.context, {'no_device': True}) self.assertRaises(driver_block_device._NotTransformable, cls, bdm) def _test_driver_device(self, name): db_bdm = getattr(self, "%s_bdm" % name) test_bdm = self.driver_classes[name](db_bdm) self.assertThat(test_bdm, matchers.DictMatches( getattr(self, "%s_driver_bdm" % name))) for k, v in six.iteritems(db_bdm): field_val = getattr(test_bdm._bdm_obj, k) if isinstance(field_val, bool): v = bool(v) self.assertEqual(field_val, v) self.assertThat(test_bdm.legacy(), matchers.DictMatches( getattr(self, "%s_legacy_driver_bdm" % name))) # Test passthru attributes for passthru in test_bdm._proxy_as_attr: self.assertEqual(getattr(test_bdm, passthru), getattr(test_bdm._bdm_obj, passthru)) # Make sure that all others raise _invalidType for other_name, cls in six.iteritems(self.driver_classes): if other_name == name: continue self.assertRaises(driver_block_device._InvalidType, cls, getattr(self, '%s_bdm' % name)) # Test the save method with mock.patch.object(test_bdm._bdm_obj, 'save') as save_mock: for fld, alias in six.iteritems(test_bdm._update_on_save): # We can't set fake values on enums, like device_type, # so skip those. if not isinstance(test_bdm._bdm_obj.fields[fld], fields.BaseEnumField): test_bdm[alias or fld] = 'fake_changed_value' test_bdm.save() for fld, alias in six.iteritems(test_bdm._update_on_save): self.assertEqual(test_bdm[alias or fld], getattr(test_bdm._bdm_obj, fld)) save_mock.assert_called_once_with() def check_save(): self.assertEqual(set([]), test_bdm._bdm_obj.obj_what_changed()) # Test that nothing is set on the object if there are no actual changes test_bdm._bdm_obj.obj_reset_changes() with mock.patch.object(test_bdm._bdm_obj, 'save') as save_mock: save_mock.side_effect = check_save test_bdm.save() def _test_driver_default_size(self, name): size = 'swap_size' if name == 'swap' else 'size' no_size_bdm = getattr(self, "%s_bdm_dict" % name).copy() no_size_bdm['volume_size'] = None driver_bdm = self.driver_classes[name]( fake_block_device.fake_bdm_object(self.context, no_size_bdm)) self.assertEqual(driver_bdm[size], 0) del no_size_bdm['volume_size'] driver_bdm = self.driver_classes[name]( fake_block_device.fake_bdm_object(self.context, no_size_bdm)) self.assertEqual(driver_bdm[size], 0) def test_driver_swap_block_device(self): self._test_driver_device("swap") def test_driver_swap_default_size(self): self._test_driver_default_size('swap') def test_driver_ephemeral_block_device(self): self._test_driver_device("ephemeral") def test_driver_ephemeral_default_size(self): self._test_driver_default_size('ephemeral') def test_driver_volume_block_device(self): self._test_driver_device("volume") test_bdm = self.driver_classes['volume']( self.volume_bdm) self.assertEqual(test_bdm['connection_info'], jsonutils.loads(test_bdm._bdm_obj.connection_info)) self.assertEqual(test_bdm._bdm_obj.id, 3) self.assertEqual(test_bdm.volume_id, 'fake-volume-id-1') self.assertEqual(test_bdm.volume_size, 8) def test_driver_snapshot_block_device(self): self._test_driver_device("snapshot") test_bdm = self.driver_classes['snapshot']( self.snapshot_bdm) self.assertEqual(test_bdm._bdm_obj.id, 4) self.assertEqual(test_bdm.snapshot_id, 'fake-snapshot-id-1') self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2') self.assertEqual(test_bdm.volume_size, 3) def test_driver_image_block_device(self): self._test_driver_device('image') test_bdm = self.driver_classes['image']( self.image_bdm) self.assertEqual(test_bdm._bdm_obj.id, 5) self.assertEqual(test_bdm.image_id, 'fake-image-id-1') self.assertEqual(test_bdm.volume_size, 1) def test_driver_image_block_device_destination_local(self): self._test_driver_device('image') bdm = self.image_bdm_dict.copy() bdm['destination_type'] = 'local' self.assertRaises(driver_block_device._InvalidType, self.driver_classes['image'], fake_block_device.fake_bdm_object(self.context, bdm)) def test_driver_blank_block_device(self): self._test_driver_device('blank') test_bdm = self.driver_classes['blank']( self.blank_bdm) self.assertEqual(6, test_bdm._bdm_obj.id) self.assertEqual('fake-volume-id-2', test_bdm.volume_id) self.assertEqual(3, test_bdm.volume_size) def _test_call_wait_func(self, delete_on_termination, delete_fail=False): test_bdm = self.driver_classes['volume'](self.volume_bdm) test_bdm['delete_on_termination'] = delete_on_termination with mock.patch.object(self.volume_api, 'delete') as vol_delete: wait_func = mock.MagicMock() mock_exception = exception.VolumeNotCreated(volume_id='fake-id', seconds=1, attempts=1, volume_status='error') wait_func.side_effect = mock_exception if delete_on_termination and delete_fail: vol_delete.side_effect = Exception() self.assertRaises(exception.VolumeNotCreated, test_bdm._call_wait_func, context=self.context, wait_func=wait_func, volume_api=self.volume_api, volume_id='fake-id') self.assertEqual(delete_on_termination, vol_delete.called) def test_call_wait_delete_volume(self): self._test_call_wait_func(True) def test_call_wait_delete_volume_fail(self): self._test_call_wait_func(True, True) def test_call_wait_no_delete_volume(self): self._test_call_wait_func(False) def _test_volume_attach(self, driver_bdm, bdm_dict, fake_volume, check_attach=True, fail_check_attach=False, driver_attach=False, fail_driver_attach=False, volume_attach=True, fail_volume_attach=False, access_mode='rw', availability_zone=None): elevated_context = self.context.elevated() self.stubs.Set(self.context, 'elevated', lambda: elevated_context) self.mox.StubOutWithMock(driver_bdm._bdm_obj, 'save') self.mox.StubOutWithMock(encryptors, 'get_encryption_metadata') instance_detail = {'id': '123', 'uuid': 'fake_uuid', 'availability_zone': availability_zone} instance = fake_instance.fake_instance_obj(self.context, **instance_detail) connector = {'ip': 'fake_ip', 'host': 'fake_host'} connection_info = {'data': {'access_mode': access_mode}} expected_conn_info = {'data': {'access_mode': access_mode}, 'serial': fake_volume['id']} enc_data = {'fake': 'enc_data'} self.volume_api.get(self.context, fake_volume['id']).AndReturn(fake_volume) if check_attach: if not fail_check_attach: self.volume_api.check_attach(self.context, fake_volume, instance=instance).AndReturn(None) else: self.volume_api.check_attach(self.context, fake_volume, instance=instance).AndRaise( test.TestingException) driver_bdm._bdm_obj.save().AndReturn(None) return instance, expected_conn_info self.virt_driver.get_volume_connector(instance).AndReturn(connector) self.volume_api.initialize_connection( elevated_context, fake_volume['id'], connector).AndReturn(connection_info) if driver_attach: encryptors.get_encryption_metadata( elevated_context, self.volume_api, fake_volume['id'], connection_info).AndReturn(enc_data) if not fail_driver_attach: self.virt_driver.attach_volume( elevated_context, expected_conn_info, instance, bdm_dict['device_name'], disk_bus=bdm_dict['disk_bus'], device_type=bdm_dict['device_type'], encryption=enc_data).AndReturn(None) else: self.virt_driver.attach_volume( elevated_context, expected_conn_info, instance, bdm_dict['device_name'], disk_bus=bdm_dict['disk_bus'], device_type=bdm_dict['device_type'], encryption=enc_data).AndRaise(test.TestingException) self.volume_api.terminate_connection( elevated_context, fake_volume['id'], connector).AndReturn(None) driver_bdm._bdm_obj.save().AndReturn(None) return instance, expected_conn_info if volume_attach: driver_bdm._bdm_obj.save().AndReturn(None) if not fail_volume_attach: self.volume_api.attach(elevated_context, fake_volume['id'], 'fake_uuid', bdm_dict['device_name'], mode=access_mode).AndReturn(None) else: self.volume_api.attach(elevated_context, fake_volume['id'], 'fake_uuid', bdm_dict['device_name'], mode=access_mode).AndRaise( test.TestingException) if driver_attach: self.virt_driver.detach_volume( expected_conn_info, instance, bdm_dict['device_name'], encryption=enc_data).AndReturn(None) self.volume_api.terminate_connection( elevated_context, fake_volume['id'], connector).AndReturn(None) self.volume_api.detach(elevated_context, fake_volume['id']).AndReturn(None) driver_bdm._bdm_obj.save().AndReturn(None) return instance, expected_conn_info def test_volume_attach(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} instance, expected_conn_info = self._test_volume_attach( test_bdm, self.volume_bdm, volume) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) self.assertThat(test_bdm['connection_info'], matchers.DictMatches(expected_conn_info)) def test_volume_attach_ro(self): test_bdm = self.driver_classes['volume'](self.volume_bdm) volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} instance, expected_conn_info = self._test_volume_attach( test_bdm, self.volume_bdm, volume, access_mode='ro') self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) self.assertThat(test_bdm['connection_info'], matchers.DictMatches(expected_conn_info)) def test_volume_attach_update_size(self): test_bdm = self.driver_classes['volume'](self.volume_bdm) test_bdm.volume_size = None volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached', 'size': 42} instance, expected_conn_info = self._test_volume_attach( test_bdm, self.volume_bdm, volume) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) self.assertEqual(expected_conn_info, test_bdm['connection_info']) self.assertEqual(42, test_bdm.volume_size) def test_volume_attach_check_attach_fails(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1'} instance, _ = self._test_volume_attach( test_bdm, self.volume_bdm, volume, fail_check_attach=True) self.mox.ReplayAll() self.assertRaises(test.TestingException, test_bdm.attach, self.context, instance, self.volume_api, self.virt_driver) def test_volume_no_volume_attach(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} instance, expected_conn_info = self._test_volume_attach( test_bdm, self.volume_bdm, volume, check_attach=False, driver_attach=False) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver, do_check_attach=False, do_driver_attach=False) self.assertThat(test_bdm['connection_info'], matchers.DictMatches(expected_conn_info)) def test_volume_attach_no_check_driver_attach(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} instance, expected_conn_info = self._test_volume_attach( test_bdm, self.volume_bdm, volume, check_attach=False, driver_attach=True) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver, do_check_attach=False, do_driver_attach=True) self.assertThat(test_bdm['connection_info'], matchers.DictMatches(expected_conn_info)) def test_volume_attach_driver_attach_fails(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1'} instance, _ = self._test_volume_attach( test_bdm, self.volume_bdm, volume, driver_attach=True, fail_driver_attach=True) self.mox.ReplayAll() self.assertRaises(test.TestingException, test_bdm.attach, self.context, instance, self.volume_api, self.virt_driver, do_driver_attach=True) def test_volume_attach_volume_attach_fails(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} instance, _ = self._test_volume_attach( test_bdm, self.volume_bdm, volume, driver_attach=True, fail_volume_attach=True) self.mox.ReplayAll() self.assertRaises(test.TestingException, test_bdm.attach, self.context, instance, self.volume_api, self.virt_driver, do_driver_attach=True) def test_volume_attach_no_driver_attach_volume_attach_fails(self): test_bdm = self.driver_classes['volume']( self.volume_bdm) volume = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} instance, _ = self._test_volume_attach( test_bdm, self.volume_bdm, volume, fail_volume_attach=True) self.mox.ReplayAll() self.assertRaises(test.TestingException, test_bdm.attach, self.context, instance, self.volume_api, self.virt_driver, do_driver_attach=False) def test_refresh_connection(self): test_bdm = self.driver_classes['snapshot']( self.snapshot_bdm) instance = {'id': 'fake_id', 'uuid': 'fake_uuid'} connector = {'ip': 'fake_ip', 'host': 'fake_host'} connection_info = {'data': {'multipath_id': 'fake_multipath_id'}} expected_conn_info = {'data': {'multipath_id': 'fake_multipath_id'}, 'serial': 'fake-volume-id-2'} self.mox.StubOutWithMock(test_bdm._bdm_obj, 'save') self.virt_driver.get_volume_connector(instance).AndReturn(connector) self.volume_api.initialize_connection( self.context, test_bdm.volume_id, connector).AndReturn(connection_info) test_bdm._bdm_obj.save().AndReturn(None) self.mox.ReplayAll() test_bdm.refresh_connection_info(self.context, instance, self.volume_api, self.virt_driver) self.assertThat(test_bdm['connection_info'], matchers.DictMatches(expected_conn_info)) def test_snapshot_attach_no_volume(self): no_volume_snapshot = self.snapshot_bdm_dict.copy() no_volume_snapshot['volume_id'] = None test_bdm = self.driver_classes['snapshot']( fake_block_device.fake_bdm_object( self.context, no_volume_snapshot)) snapshot = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} volume = {'id': 'fake-volume-id-2', 'attach_status': 'detached'} wait_func = self.mox.CreateMockAnything() self.volume_api.get_snapshot(self.context, 'fake-snapshot-id-1').AndReturn(snapshot) self.volume_api.create(self.context, 3, '', '', snapshot, availability_zone=None).AndReturn(volume) wait_func(self.context, 'fake-volume-id-2').AndReturn(None) instance, expected_conn_info = self._test_volume_attach( test_bdm, no_volume_snapshot, volume) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver, wait_func) self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2') def test_snapshot_attach_no_volume_cinder_cross_az_attach_false(self): # Tests that the volume created from the snapshot has the same AZ as # the instance. self.flags(cross_az_attach=False, group='cinder') no_volume_snapshot = self.snapshot_bdm_dict.copy() no_volume_snapshot['volume_id'] = None test_bdm = self.driver_classes['snapshot']( fake_block_device.fake_bdm_object( self.context, no_volume_snapshot)) snapshot = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} volume = {'id': 'fake-volume-id-2', 'attach_status': 'detached'} wait_func = self.mox.CreateMockAnything() self.volume_api.get_snapshot(self.context, 'fake-snapshot-id-1').AndReturn(snapshot) self.volume_api.create(self.context, 3, '', '', snapshot, availability_zone='test-az').AndReturn(volume) wait_func(self.context, 'fake-volume-id-2').AndReturn(None) instance, expected_conn_info = self._test_volume_attach( test_bdm, no_volume_snapshot, volume, availability_zone='test-az') self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver, wait_func) self.assertEqual('fake-volume-id-2', test_bdm.volume_id) def test_snapshot_attach_fail_volume(self): fail_volume_snapshot = self.snapshot_bdm_dict.copy() fail_volume_snapshot['volume_id'] = None test_bdm = self.driver_classes['snapshot']( fake_block_device.fake_bdm_object( self.context, fail_volume_snapshot)) snapshot = {'id': 'fake-volume-id-1', 'attach_status': 'detached'} volume = {'id': 'fake-volume-id-2', 'attach_status': 'detached'} instance = fake_instance.fake_instance_obj(mock.sentinel.ctx, **{'uuid': 'fake-uuid'}) with test.nested( mock.patch.object(self.volume_api, 'get_snapshot', return_value=snapshot), mock.patch.object(self.volume_api, 'create', return_value=volume), mock.patch.object(self.volume_api, 'delete'), ) as (vol_get_snap, vol_create, vol_delete): wait_func = mock.MagicMock() mock_exception = exception.VolumeNotCreated(volume_id=volume['id'], seconds=1, attempts=1, volume_status='error') wait_func.side_effect = mock_exception self.assertRaises(exception.VolumeNotCreated, test_bdm.attach, context=self.context, instance=instance, volume_api=self.volume_api, virt_driver=self.virt_driver, wait_func=wait_func) vol_get_snap.assert_called_once_with( self.context, 'fake-snapshot-id-1') vol_create.assert_called_once_with( self.context, 3, '', '', snapshot, availability_zone=None) vol_delete.assert_called_once_with(self.context, volume['id']) def test_snapshot_attach_volume(self): test_bdm = self.driver_classes['snapshot']( self.snapshot_bdm) instance = {'id': 'fake_id', 'uuid': 'fake_uuid'} volume_class = self.driver_classes['volume'] self.mox.StubOutWithMock(volume_class, 'attach') # Make sure theses are not called self.mox.StubOutWithMock(self.volume_api, 'get_snapshot') self.mox.StubOutWithMock(self.volume_api, 'create') volume_class.attach(self.context, instance, self.volume_api, self.virt_driver, do_check_attach=True ).AndReturn(None) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2') def test_image_attach_no_volume(self): no_volume_image = self.image_bdm_dict.copy() no_volume_image['volume_id'] = None test_bdm = self.driver_classes['image']( fake_block_device.fake_bdm_object( self.context, no_volume_image)) image = {'id': 'fake-image-id-1'} volume = {'id': 'fake-volume-id-2', 'attach_status': 'detached'} wait_func = self.mox.CreateMockAnything() self.volume_api.create(self.context, 1, '', '', image_id=image['id'], availability_zone=None).AndReturn(volume) wait_func(self.context, 'fake-volume-id-2').AndReturn(None) instance, expected_conn_info = self._test_volume_attach( test_bdm, no_volume_image, volume) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver, wait_func) self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2') def test_image_attach_no_volume_cinder_cross_az_attach_false(self): # Tests that the volume created from the image has the same AZ as the # instance. self.flags(cross_az_attach=False, group='cinder') no_volume_image = self.image_bdm_dict.copy() no_volume_image['volume_id'] = None test_bdm = self.driver_classes['image']( fake_block_device.fake_bdm_object( self.context, no_volume_image)) image = {'id': 'fake-image-id-1'} volume = {'id': 'fake-volume-id-2', 'attach_status': 'detached'} wait_func = self.mox.CreateMockAnything() self.volume_api.create(self.context, 1, '', '', image_id=image['id'], availability_zone='test-az').AndReturn(volume) wait_func(self.context, 'fake-volume-id-2').AndReturn(None) instance, expected_conn_info = self._test_volume_attach( test_bdm, no_volume_image, volume, availability_zone='test-az') self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver, wait_func) self.assertEqual('fake-volume-id-2', test_bdm.volume_id) def test_image_attach_fail_volume(self): fail_volume_image = self.image_bdm_dict.copy() fail_volume_image['volume_id'] = None test_bdm = self.driver_classes['image']( fake_block_device.fake_bdm_object( self.context, fail_volume_image)) image = {'id': 'fake-image-id-1'} volume = {'id': 'fake-volume-id-2', 'attach_status': 'detached'} instance = fake_instance.fake_instance_obj(mock.sentinel.ctx, **{'uuid': 'fake-uuid'}) with test.nested( mock.patch.object(self.volume_api, 'create', return_value=volume), mock.patch.object(self.volume_api, 'delete'), ) as (vol_create, vol_delete): wait_func = mock.MagicMock() mock_exception = exception.VolumeNotCreated(volume_id=volume['id'], seconds=1, attempts=1, volume_status='error') wait_func.side_effect = mock_exception self.assertRaises(exception.VolumeNotCreated, test_bdm.attach, context=self.context, instance=instance, volume_api=self.volume_api, virt_driver=self.virt_driver, wait_func=wait_func) vol_create.assert_called_once_with( self.context, 1, '', '', image_id=image['id'], availability_zone=None) vol_delete.assert_called_once_with(self.context, volume['id']) def test_image_attach_volume(self): test_bdm = self.driver_classes['image']( self.image_bdm) instance = {'id': 'fake_id', 'uuid': 'fake_uuid'} volume_class = self.driver_classes['volume'] self.mox.StubOutWithMock(volume_class, 'attach') # Make sure theses are not called self.mox.StubOutWithMock(self.volume_api, 'get_snapshot') self.mox.StubOutWithMock(self.volume_api, 'create') volume_class.attach(self.context, instance, self.volume_api, self.virt_driver, do_check_attach=True ).AndReturn(None) self.mox.ReplayAll() test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) self.assertEqual(test_bdm.volume_id, 'fake-volume-id-2') def test_blank_attach_fail_volume(self): no_blank_volume = self.blank_bdm_dict.copy() no_blank_volume['volume_id'] = None test_bdm = self.driver_classes['blank']( fake_block_device.fake_bdm_object( self.context, no_blank_volume)) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx, **{'uuid': 'fake-uuid'}) volume = {'id': 'fake-volume-id-2', 'display_name': 'fake-uuid-blank-vol'} with test.nested( mock.patch.object(self.volume_api, 'create', return_value=volume), mock.patch.object(self.volume_api, 'delete'), ) as (vol_create, vol_delete): wait_func = mock.MagicMock() mock_exception = exception.VolumeNotCreated(volume_id=volume['id'], seconds=1, attempts=1, volume_status='error') wait_func.side_effect = mock_exception self.assertRaises(exception.VolumeNotCreated, test_bdm.attach, context=self.context, instance=instance, volume_api=self.volume_api, virt_driver=self.virt_driver, wait_func=wait_func) vol_create.assert_called_once_with( self.context, test_bdm.volume_size, 'fake-uuid-blank-vol', '', availability_zone=None) vol_delete.assert_called_once_with( self.context, volume['id']) def test_blank_attach_volume(self): no_blank_volume = self.blank_bdm_dict.copy() no_blank_volume['volume_id'] = None test_bdm = self.driver_classes['blank']( fake_block_device.fake_bdm_object( self.context, no_blank_volume)) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx, **{'uuid': 'fake-uuid'}) volume_class = self.driver_classes['volume'] volume = {'id': 'fake-volume-id-2', 'display_name': 'fake-uuid-blank-vol'} with test.nested( mock.patch.object(self.volume_api, 'create', return_value=volume), mock.patch.object(volume_class, 'attach') ) as (vol_create, vol_attach): test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) vol_create.assert_called_once_with( self.context, test_bdm.volume_size, 'fake-uuid-blank-vol', '', availability_zone=None) vol_attach.assert_called_once_with(self.context, instance, self.volume_api, self.virt_driver, do_check_attach=True) self.assertEqual('fake-volume-id-2', test_bdm.volume_id) def test_blank_attach_volume_cinder_cross_az_attach_false(self): # Tests that the blank volume created is in the same availability zone # as the instance. self.flags(cross_az_attach=False, group='cinder') no_blank_volume = self.blank_bdm_dict.copy() no_blank_volume['volume_id'] = None test_bdm = self.driver_classes['blank']( fake_block_device.fake_bdm_object( self.context, no_blank_volume)) updates = {'uuid': 'fake-uuid', 'availability_zone': 'test-az'} instance = fake_instance.fake_instance_obj(mock.sentinel.ctx, **updates) volume_class = self.driver_classes['volume'] volume = {'id': 'fake-volume-id-2', 'display_name': 'fake-uuid-blank-vol'} with mock.patch.object(self.volume_api, 'create', return_value=volume) as vol_create: with mock.patch.object(volume_class, 'attach') as vol_attach: test_bdm.attach(self.context, instance, self.volume_api, self.virt_driver) vol_create.assert_called_once_with( self.context, test_bdm.volume_size, 'fake-uuid-blank-vol', '', availability_zone='test-az') vol_attach.assert_called_once_with(self.context, instance, self.volume_api, self.virt_driver, do_check_attach=True) self.assertEqual('fake-volume-id-2', test_bdm.volume_id) def test_convert_block_devices(self): bdms = objects.BlockDeviceMappingList( objects=[self.volume_bdm, self.ephemeral_bdm]) converted = driver_block_device._convert_block_devices( self.driver_classes['volume'], bdms) self.assertEqual(converted, [self.volume_driver_bdm]) def test_convert_all_volumes(self): converted = driver_block_device.convert_all_volumes() self.assertEqual([], converted) converted = driver_block_device.convert_all_volumes( self.volume_bdm, self.ephemeral_bdm, self.image_bdm, self.blank_bdm, self.snapshot_bdm) self.assertEqual(converted, [self.volume_driver_bdm, self.image_driver_bdm, self.blank_driver_bdm, self.snapshot_driver_bdm]) def test_convert_volume(self): self.assertIsNone(driver_block_device.convert_volume(self.swap_bdm)) self.assertEqual(self.volume_driver_bdm, driver_block_device.convert_volume(self.volume_bdm)) self.assertEqual(self.snapshot_driver_bdm, driver_block_device.convert_volume(self.snapshot_bdm)) def test_legacy_block_devices(self): test_snapshot = self.driver_classes['snapshot']( self.snapshot_bdm) block_device_mapping = [test_snapshot, test_snapshot] legacy_bdm = driver_block_device.legacy_block_devices( block_device_mapping) self.assertEqual(legacy_bdm, [self.snapshot_legacy_driver_bdm, self.snapshot_legacy_driver_bdm]) # Test that the ephemerals work as expected test_ephemerals = [self.driver_classes['ephemeral']( self.ephemeral_bdm) for _ in range(2)] expected = [self.ephemeral_legacy_driver_bdm.copy() for _ in range(2)] expected[0]['virtual_name'] = 'ephemeral0' expected[0]['num'] = 0 expected[1]['virtual_name'] = 'ephemeral1' expected[1]['num'] = 1 legacy_ephemerals = driver_block_device.legacy_block_devices( test_ephemerals) self.assertEqual(expected, legacy_ephemerals) def test_get_swap(self): swap = [self.swap_driver_bdm] legacy_swap = [self.swap_legacy_driver_bdm] no_swap = [self.volume_driver_bdm] self.assertEqual(swap[0], driver_block_device.get_swap(swap)) self.assertEqual(legacy_swap[0], driver_block_device.get_swap(legacy_swap)) self.assertIsNone(driver_block_device.get_swap(no_swap)) self.assertIsNone(driver_block_device.get_swap([])) def test_is_implemented(self): for bdm in (self.image_bdm, self.volume_bdm, self.swap_bdm, self.ephemeral_bdm, self.snapshot_bdm): self.assertTrue(driver_block_device.is_implemented(bdm)) local_image = self.image_bdm_dict.copy() local_image['destination_type'] = 'local' self.assertFalse(driver_block_device.is_implemented( fake_block_device.fake_bdm_object(self.context, local_image))) def test_is_block_device_mapping(self): test_swap = self.driver_classes['swap'](self.swap_bdm) test_ephemeral = self.driver_classes['ephemeral'](self.ephemeral_bdm) test_image = self.driver_classes['image'](self.image_bdm) test_snapshot = self.driver_classes['snapshot'](self.snapshot_bdm) test_volume = self.driver_classes['volume'](self.volume_bdm) test_blank = self.driver_classes['blank'](self.blank_bdm) for bdm in (test_image, test_snapshot, test_volume, test_blank): self.assertTrue(driver_block_device.is_block_device_mapping( bdm._bdm_obj)) for bdm in (test_swap, test_ephemeral): self.assertFalse(driver_block_device.is_block_device_mapping( bdm._bdm_obj)) def test_get_volume_create_az_cinder_cross_az_attach_true(self): # Tests that we get None back if cinder.cross_az_attach=True even if # the instance has an AZ assigned. Note that since cross_az_attach # defaults to True we don't need to set a flag explicitly for the test. updates = {'availability_zone': 'test-az'} instance = fake_instance.fake_instance_obj(self.context, **updates) self.assertIsNone( driver_block_device._get_volume_create_az_value(instance))
HybridF5/nova
nova/tests/unit/virt/test_block_device.py
Python
apache-2.0
45,741
# Copyright (c) 2014 VMware, Inc. # # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import mock from oslo_vmware import vim_util from nova import exception from nova import test from nova.tests.unit.virt.vmwareapi import fake from nova.tests.unit.virt.vmwareapi import stubs from nova.virt.vmwareapi import driver from nova.virt.vmwareapi import network_util from nova.virt.vmwareapi import vm_util ResultSet = collections.namedtuple('ResultSet', ['objects']) ObjectContent = collections.namedtuple('ObjectContent', ['obj', 'propSet']) DynamicProperty = collections.namedtuple('DynamicProperty', ['name', 'val']) class GetNetworkWithTheNameTestCase(test.NoDBTestCase): def setUp(self): super(GetNetworkWithTheNameTestCase, self).setUp() fake.reset() self.stub_out('nova.virt.vmwareapi.driver.VMwareAPISession.vim', stubs.fake_vim_prop) self.stub_out('nova.virt.vmwareapi.driver.' 'VMwareAPISession.is_vim_object', stubs.fake_is_vim_object) self._session = driver.VMwareAPISession() def _build_cluster_networks(self, networks): """Returns a set of results for a cluster network lookup. This is an example: (ObjectContent){ obj = (obj){ value = "domain-c7" _type = "ClusterComputeResource" } propSet[] = (DynamicProperty){ name = "network" val = (ArrayOfManagedObjectReference){ ManagedObjectReference[] = (ManagedObjectReference){ value = "network-54" _type = "Network" }, (ManagedObjectReference){ value = "dvportgroup-14" _type = "DistributedVirtualPortgroup" }, } }, }] """ objects = [] obj = ObjectContent(obj=vim_util.get_moref("domain-c7", "ClusterComputeResource"), propSet=[]) value = fake.DataObject() value.ManagedObjectReference = [] for network in networks: value.ManagedObjectReference.append(network) obj.propSet.append( DynamicProperty(name='network', val=value)) objects.append(obj) return ResultSet(objects=objects) def test_get_network_no_match(self): net_morefs = [vim_util.get_moref("dvportgroup-135", "DistributedVirtualPortgroup"), vim_util.get_moref("dvportgroup-136", "DistributedVirtualPortgroup")] networks = self._build_cluster_networks(net_morefs) self._continue_retrieval_called = False def mock_call_method(module, method, *args, **kwargs): if method == 'get_object_properties': return networks if method == 'get_object_property': result = fake.DataObject() result.name = 'no-match' return result if method == 'continue_retrieval': self._continue_retrieval_called = True with mock.patch.object(self._session, '_call_method', mock_call_method): res = network_util.get_network_with_the_name(self._session, 'fake_net', 'fake_cluster') self.assertTrue(self._continue_retrieval_called) self.assertIsNone(res) def _get_network_dvs_match(self, name, token=False): net_morefs = [vim_util.get_moref("dvportgroup-135", "DistributedVirtualPortgroup")] networks = self._build_cluster_networks(net_morefs) def mock_call_method(module, method, *args, **kwargs): if method == 'get_object_properties': return networks if method == 'get_object_property': result = fake.DataObject() if not token or self._continue_retrieval_called: result.name = name else: result.name = 'fake_name' result.key = 'fake_key' result.distributedVirtualSwitch = 'fake_dvs' return result if method == 'continue_retrieval': if token: self._continue_retrieval_called = True return networks if method == 'cancel_retrieval': self._cancel_retrieval_called = True with mock.patch.object(self._session, '_call_method', mock_call_method): res = network_util.get_network_with_the_name(self._session, 'fake_net', 'fake_cluster') self.assertIsNotNone(res) def test_get_network_dvs_exact_match(self): self._cancel_retrieval_called = False self._get_network_dvs_match('fake_net') self.assertTrue(self._cancel_retrieval_called) def test_get_network_dvs_match(self): self._cancel_retrieval_called = False self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net') self.assertTrue(self._cancel_retrieval_called) def test_get_network_dvs_match_with_token(self): self._continue_retrieval_called = False self._cancel_retrieval_called = False self._get_network_dvs_match('dvs_7-virtualwire-7-fake_net', token=True) self.assertTrue(self._continue_retrieval_called) self.assertTrue(self._cancel_retrieval_called) def test_get_network_network_match(self): net_morefs = [vim_util.get_moref("network-54", "Network")] networks = self._build_cluster_networks(net_morefs) def mock_call_method(module, method, *args, **kwargs): if method == 'get_object_properties': return networks if method == 'get_object_property': return 'fake_net' with mock.patch.object(self._session, '_call_method', mock_call_method): res = network_util.get_network_with_the_name(self._session, 'fake_net', 'fake_cluster') self.assertIsNotNone(res) class GetVlanIdAndVswitchForPortgroupTestCase(test.NoDBTestCase): @mock.patch.object(vm_util, 'get_host_ref') def test_no_port_groups(self, mock_get_host_ref): session = mock.Mock() session._call_method.return_value = None self.assertRaises( exception.NovaException, network_util.get_vlanid_and_vswitch_for_portgroup, session, 'port_group_name', 'fake_cluster' ) @mock.patch.object(vm_util, 'get_host_ref') def test_valid_port_group(self, mock_get_host_ref): session = mock.Mock() session._call_method.return_value = self._fake_port_groups() vlanid, vswitch = network_util.get_vlanid_and_vswitch_for_portgroup( session, 'port_group_name', 'fake_cluster' ) self.assertEqual(vlanid, 100) self.assertEqual(vswitch, 'vswitch_name') @mock.patch.object(vm_util, 'get_host_ref') def test_unknown_port_group(self, mock_get_host_ref): session = mock.Mock() session._call_method.return_value = self._fake_port_groups() vlanid, vswitch = network_util.get_vlanid_and_vswitch_for_portgroup( session, 'unknown_port_group', 'fake_cluster' ) self.assertIsNone(vlanid) self.assertIsNone(vswitch) def _fake_port_groups(self): port_group_spec = fake.DataObject() port_group_spec.name = 'port_group_name' port_group_spec.vlanId = 100 port_group = fake.DataObject() port_group.vswitch = 'vswitch_name' port_group.spec = port_group_spec response = fake.DataObject() response.HostPortGroup = [port_group] return response
zhimin711/nova
nova/tests/unit/virt/vmwareapi/test_network_util.py
Python
apache-2.0
9,191
#!/usr/bin/env python """ @package ion.agents.platform.rsn.simulator.oms_values @file ion/agents/platform/rsn/simulator/oms_values.py @author Carlos Rueda @brief Platform attribute value generators for the RSN OMS simulator. """ __author__ = 'Carlos Rueda' __license__ = 'Apache 2.0' import time import ntplib import math # time begins a few secs ago from now for purposes of reporting _START_TIME = ntplib.system_to_ntp_time(time.time() - 30) # maximum value array size for a single generation call _MAX_RESULT_SIZE = 1000 # next value for generators created by _create_simple_generator _next_value = 990000 def _create_simple_generator(gen_period): """ Returns a simple generator that reports incremental values every given time period. @param gen_period discretize the time axis by this period in secs @retval A function to be called with parameters (from_time, to_time) where from_time and to_time are the lower and upper limits (both inclusive) of desired time window (NTP). """ def _gen(from_time, to_time): global _next_value if from_time < _START_TIME: from_time = _START_TIME # t: initial abscissa coordinate within the time window l_from_time = long(from_time - 2*gen_period) t = float((l_from_time / gen_period) * gen_period) while t < from_time: t += gen_period values = [] while t <= to_time: val = _next_value _next_value += 1 timestamp = t values.append((val, timestamp)) t += gen_period if len(values) == _MAX_RESULT_SIZE: break return values return _gen def _create_sine_generator(sine_period, gen_period, min_val, max_val): """ Returns a sine stream fluctuating between min_val and max_val. @param sine_period Sine period in secs @param gen_period discretize the time axis by this period in secs @param min_val min value @param max_val max value @retval A function to be called with parameters (from_time, to_time) where from_time and to_time are the lower and upper limits (both inclusive) of desired time window (NTP). """ twopi = 2 * math.pi def _gen(from_time, to_time): if from_time < _START_TIME: from_time = _START_TIME # t: initial abscissa coordinate within the time window l_from_time = long(from_time - 2*gen_period) t = float((l_from_time / gen_period) * gen_period) while t < from_time: t += gen_period range2 = (max_val - min_val) / 2 values = [] while t <= to_time: s = math.sin(t / sine_period * twopi) val = s * range2 + (max_val + min_val) / 2 timestamp = t values.append((val, timestamp)) t += gen_period if len(values) == _MAX_RESULT_SIZE: break return values return _gen # generators per platform-ID/attribute-name: _plat_attr_generators = { # we used to have a couple here, but now none for the moment. # An example would be: # ('LJ01D', 'input_voltage'): _create_sine_generator(sine_period=30, # gen_period=2.5, # min_val=-500, # max_val=+500), } # generators per attribute name: _attribute_generators = { 'input_voltage': _create_sine_generator(sine_period=30, gen_period=2.5, min_val=-500, max_val=+500), 'input_bus_current': _create_sine_generator(sine_period=50, gen_period=5, min_val=-300, max_val=+300), 'MVPC_temperature': _create_sine_generator(sine_period=20, gen_period=4, min_val=-200, max_val=+200), 'MVPC_pressure_1': _create_sine_generator(sine_period=20, gen_period=4, min_val=-100, max_val=+100), } _default_generator = _create_simple_generator(gen_period=5) def generate_values(platform_id, attr_id, from_time, to_time): """ Generates synthetic values within a given time window (both ends are inclusive). Times are NTP. @param platform_id Platform ID @param attr_id Attribute ID. Only the name part is considered. See OOIION-1551. @param from_time lower limit of desired time window @param to_time upper limit of desired time window """ # get the attribute name from the given ID: separator = attr_id.rfind('|') attr_name = attr_id[:separator] if separator >= 0 else attr_id # try by platform/attribute: if (platform_id, attr_name) in _plat_attr_generators: gen = _plat_attr_generators[(platform_id, attr_name)] # else: try by the attribute only: elif attr_name in _attribute_generators: gen = _attribute_generators[attr_name] else: gen = _default_generator return gen(from_time, to_time) if __name__ == "__main__": # pragma: no cover # do not restrict the absolute from_time for this demo program: _START_TIME = 0 import sys if len(sys.argv) < 5: print(""" USAGE: oms_values.py platform_id attr_id delta_from delta_to Generates values in window [curr_time + delta_from, curr_time + delta_to] Example: oms_values.py Node1A input_voltage -35 0 """) exit() cur_time = ntplib.system_to_ntp_time(time.time()) platform_id = sys.argv[1] attr_id = sys.argv[2] delta_from = float(sys.argv[3]) delta_to = float(sys.argv[4]) from_time = cur_time + delta_from to_time = cur_time + delta_to values = generate_values(platform_id, attr_id, from_time, to_time) print("Generated %d values in time window [%s, %s]:" % ( len(values), from_time, to_time)) for n, (val, t) in enumerate(values): print("\t%2d: %5.2f -> %+4.3f" % (n, t, val)) """ $ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A other_attr -35 0 Generated 7 values in time window [3561992754.4, 3561992789.4]: 0: 3561992755.00 -> +990000.000 1: 3561992760.00 -> +990001.000 2: 3561992765.00 -> +990002.000 3: 3561992770.00 -> +990003.000 4: 3561992775.00 -> +990004.000 5: 3561992780.00 -> +990005.000 6: 3561992785.00 -> +990006.000 $ bin/python ion/agents/platform/rsn/simulator/oms_values.py Node1A input_voltage -35 0 Generated 7 values in time window [3561992757.86, 3561992792.86]: 0: 3561992760.00 -> -0.000 1: 3561992765.00 -> +433.013 2: 3561992770.00 -> +433.013 3: 3561992775.00 -> +0.000 4: 3561992780.00 -> -433.013 5: 3561992785.00 -> -433.013 6: 3561992790.00 -> -0.000 """
janeen666/mi-instrument
mi/platform/rsn/simulator/oms_values.py
Python
bsd-2-clause
7,128
# Copyright 2019 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import io import unittest from json5.host import Host class HostTest(unittest.TestCase): maxDiff = None def test_directory_and_file_operations(self): h = Host() orig_cwd = h.getcwd() try: d = h.mkdtemp() h.chdir(d) h.write_text_file('foo', 'bar') contents = h.read_text_file('foo') self.assertEqual(contents, 'bar') h.chdir('..') h.rmtree(d) finally: h.chdir(orig_cwd) def test_print(self): s = io.StringIO() h = Host() h.print_('hello, world', stream=s) self.assertEqual('hello, world\n', s.getvalue()) if __name__ == '__main__': # pragma: no cover unittest.main()
scheib/chromium
third_party/pyjson5/src/tests/host_test.py
Python
bsd-3-clause
1,346
from django.db.backends.ddl_references import Statement, Table from django.db.models import F, Q from django.db.models.constraints import BaseConstraint from django.db.models.sql import Query __all__ = ['ExclusionConstraint'] class ExclusionConstraint(BaseConstraint): template = 'CONSTRAINT %(name)s EXCLUDE USING %(index_type)s (%(expressions)s)%(where)s' def __init__(self, *, name, expressions, index_type=None, condition=None): if index_type and index_type.lower() not in {'gist', 'spgist'}: raise ValueError( 'Exclusion constraints only support GiST or SP-GiST indexes.' ) if not expressions: raise ValueError( 'At least one expression is required to define an exclusion ' 'constraint.' ) if not all( isinstance(expr, (list, tuple)) and len(expr) == 2 for expr in expressions ): raise ValueError('The expressions must be a list of 2-tuples.') if not isinstance(condition, (type(None), Q)): raise ValueError( 'ExclusionConstraint.condition must be a Q instance.' ) self.expressions = expressions self.index_type = index_type or 'GIST' self.condition = condition super().__init__(name=name) def _get_expression_sql(self, compiler, connection, query): expressions = [] for expression, operator in self.expressions: if isinstance(expression, str): expression = F(expression) expression = expression.resolve_expression(query=query) sql, params = expression.as_sql(compiler, connection) expressions.append('%s WITH %s' % (sql % params, operator)) return expressions def _get_condition_sql(self, compiler, schema_editor, query): if self.condition is None: return None where = query.build_where(self.condition) sql, params = where.as_sql(compiler, schema_editor.connection) return sql % tuple(schema_editor.quote_value(p) for p in params) def constraint_sql(self, model, schema_editor): query = Query(model, alias_cols=False) compiler = query.get_compiler(connection=schema_editor.connection) expressions = self._get_expression_sql(compiler, schema_editor.connection, query) condition = self._get_condition_sql(compiler, schema_editor, query) return self.template % { 'name': schema_editor.quote_name(self.name), 'index_type': self.index_type, 'expressions': ', '.join(expressions), 'where': ' WHERE (%s)' % condition if condition else '', } def create_sql(self, model, schema_editor): return Statement( 'ALTER TABLE %(table)s ADD %(constraint)s', table=Table(model._meta.db_table, schema_editor.quote_name), constraint=self.constraint_sql(model, schema_editor), ) def remove_sql(self, model, schema_editor): return schema_editor._delete_constraint_sql( schema_editor.sql_delete_check, model, schema_editor.quote_name(self.name), ) def deconstruct(self): path, args, kwargs = super().deconstruct() kwargs['expressions'] = self.expressions if self.condition is not None: kwargs['condition'] = self.condition if self.index_type.lower() != 'gist': kwargs['index_type'] = self.index_type return path, args, kwargs def __eq__(self, other): if isinstance(other, self.__class__): return ( self.name == other.name and self.index_type == other.index_type and self.expressions == other.expressions and self.condition == other.condition ) return super().__eq__(other) def __repr__(self): return '<%s: index_type=%s, expressions=%s%s>' % ( self.__class__.__qualname__, self.index_type, self.expressions, '' if self.condition is None else ', condition=%s' % self.condition, )
kaedroho/django
django/contrib/postgres/constraints.py
Python
bsd-3-clause
4,221
from __future__ import absolute_import import time class KafkaMetric(object): # NOTE java constructor takes a lock instance def __init__(self, metric_name, measurable, config): if not metric_name: raise ValueError('metric_name must be non-empty') if not measurable: raise ValueError('measurable must be non-empty') self._metric_name = metric_name self._measurable = measurable self._config = config @property def metric_name(self): return self._metric_name @property def measurable(self): return self._measurable @property def config(self): return self._config @config.setter def config(self, config): self._config = config def value(self, time_ms=None): if time_ms is None: time_ms = time.time() * 1000 return self.measurable.measure(self.config, time_ms)
OpenBankProject/OBP-Kafka-Python
lib/kafka/metrics/kafka_metric.py
Python
agpl-3.0
933
""" Help has buttons and menu items to open help, blog and forum pages in your primary browser. """ from __future__ import absolute_import #Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module. import __init__ from fabmetheus_utilities import archive from fabmetheus_utilities import settings from skeinforge_application.skeinforge_utilities import skeinforge_profile __author__ = 'Enrique Perez ([email protected])' __date__ = '$Date: 2008/21/04 $' __license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html' def getNewRepository(): 'Get new repository.' return HelpRepository() class HelpRepository: "A class to handle the help settings." def __init__(self): "Set the default settings, execute title & settings fileName." skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_utilities.skeinforge_help.html', self) announcementsText = '- Announcements - ' announcementsLabel = settings.LabelDisplay().getFromName(announcementsText, self ) announcementsLabel.columnspan = 6 settings.LabelDisplay().getFromName('Fabmetheus Blog, Announcements & Questions:', self ) settings.HelpPage().getFromNameAfterHTTP('fabmetheus.blogspot.com/', 'Fabmetheus Blog', self ) settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Documentation -', self ) settings.LabelDisplay().getFromName('Local Documentation Table of Contents: ', self ) settings.HelpPage().getFromNameSubName('Contents', self, 'contents.html') settings.LabelDisplay().getFromName('Wiki Manual with Pictures & Charts: ', self ) settings.HelpPage().getFromNameAfterHTTP('fabmetheus.crsndoo.com/wiki/index.php/Skeinforge', 'Wiki Manual', self ) settings.LabelDisplay().getFromName('Skeinforge Overview: ', self ) settings.HelpPage().getFromNameSubName('Skeinforge Overview', self, 'skeinforge_application.skeinforge.html') settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Search -', self ) settings.LabelDisplay().getFromName('Reprap Search:', self ) settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_reprap.html', 'Reprap Search', self ) settings.LabelDisplay().getFromName('Skeinforge Search:', self ) settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_skeinforge.html', 'Skeinforge Search', self ) settings.LabelDisplay().getFromName('Web Search:', self ) settings.HelpPage().getFromNameAfterHTTP('members.axion.net/~enrique/search_web.html', 'Web Search', self ) settings.LabelSeparator().getFromRepository(self) settings.LabelDisplay().getFromName('- Troubleshooting -', self ) settings.LabelDisplay().getFromName('Skeinforge Forum:', self) settings.HelpPage().getFromNameAfterHTTP('forums.reprap.org/list.php?154', ' Skeinforge Forum ', self ) settings.LabelSeparator().getFromRepository(self) self.version = settings.LabelDisplay().getFromName('Version: ' + archive.getFileText(archive.getVersionFileName()), self) self.wikiManualPrimary = settings.BooleanSetting().getFromValue('Wiki Manual Primary', self, True ) self.wikiManualPrimary.setUpdateFunction( self.save ) def save(self): "Write the entities." settings.writeSettingsPrintMessage(self)
nophead/Skeinforge50plus
skeinforge_application/skeinforge_utilities/skeinforge_help.py
Python
agpl-3.0
3,508
#!/usr/bin/env python # Copyright (c) 2006-2007 XenSource, Inc. # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # Simple example using the asynchronous version of the VM start method # Assumes the presence of a VM called 'new' import pprint, time, sys import XenAPI def main(session): print "Listing all VM references:" vms = session.xenapi.VM.get_all() pprint.pprint(vms) print "Dumping all VM records:" for vm in vms: pprint.pprint(session.xenapi.VM.get_record(vm)) print "Attempting to start a VM called 'new' (if it doesn't exist this will throw an exception)" vm = session.xenapi.VM.get_by_name_label('new')[0] session.xenapi.VM.start(vm, False, True) print "Attempting to start the VM asynchronously" task = session.xenapi.Async.VM.start(vm, False, True) task_record = session.xenapi.task.get_record(task) print "The initial contents of the task record:" pprint.pprint(task_record) print "Waiting for the task to complete" while session.xenapi.task.get_status(task) == "pending": time.sleep(1) task_record = session.xenapi.task.get_record(task) print "The final contents of the task record:" pprint.pprint(task_record) if __name__ == "__main__": if len(sys.argv) <> 4: print "Usage:" print sys.argv[0], " <url> <username> <password>" sys.exit(1) url = sys.argv[1] username = sys.argv[2] password = sys.argv[3] # First acquire a valid session by logging in: session = XenAPI.Session(url) session.xenapi.login_with_password(username, password, "1.0", "xen-api-scripts-vm-start-async.py") main(session)
anoobs/xen-api
scripts/examples/python/vm_start_async.py
Python
lgpl-2.1
2,335
#!/usr/bin/python # Copyright (c)2011-2014 the Boeing Company. # See the LICENSE file included in this distribution. # # author: Jeff Ahrenholz <[email protected]> # ''' wlanemanetests.py - This script tests the performance of the WLAN device in CORE by measuring various metrics: - delay experienced when pinging end-to-end - maximum TCP throughput achieved using iperf end-to-end - the CPU used and loss experienced when running an MGEN flow of UDP traffic All MANET nodes are arranged in a row, so that any given node can only communicate with the node to its right or to its left. Performance is measured using traffic that travels across each hop in the network. Static /32 routing is used instead of any dynamic routing protocol. Various underlying network types are tested: - bridged (the CORE default, uses ebtables) - bridged with netem (add link effects to the bridge using tc queues) - EMANE bypass - the bypass model just forwards traffic - EMANE RF-PIPE - the bandwidth (bitrate) is set very high / no restrictions - EMANE RF-PIPE - bandwidth is set similar to netem case - EMANE RF-PIPE - default connectivity is off and pathloss events are generated to connect the nodes in a line Results are printed/logged in CSV format. ''' import os, sys, time, optparse, datetime, math from string import Template try: from core import pycore except ImportError: # hack for Fedora autoconf that uses the following pythondir: if "/usr/lib/python2.6/site-packages" in sys.path: sys.path.append("/usr/local/lib/python2.6/site-packages") if "/usr/lib64/python2.6/site-packages" in sys.path: sys.path.append("/usr/local/lib64/python2.6/site-packages") if "/usr/lib/python2.7/site-packages" in sys.path: sys.path.append("/usr/local/lib/python2.7/site-packages") if "/usr/lib64/python2.7/site-packages" in sys.path: sys.path.append("/usr/local/lib64/python2.7/site-packages") from core import pycore from core.misc import ipaddr from core.misc.utils import mutecall from core.constants import QUAGGA_STATE_DIR from core.emane.emane import Emane from core.emane.bypass import EmaneBypassModel from core.emane.rfpipe import EmaneRfPipeModel try: import emaneeventservice import emaneeventpathloss except Exception, e: try: from emanesh.events import EventService from emanesh.events import PathlossEvent except Exception, e2: raise ImportError, "failed to import EMANE Python bindings:\n%s\n%s" % \ (e, e2) # global Experiment object (for interaction with 'python -i') exp = None # move these to core.misc.utils def readstat(): f = open("/proc/stat", "r") lines = f.readlines() f.close() return lines def numcpus(): lines = readstat() n = 0 for l in lines[1:]: if l[:3] != "cpu": break n += 1 return n def getcputimes(line): # return (user, nice, sys, idle) from a /proc/stat cpu line # assume columns are: # cpu# user nice sys idle iowait irq softirq steal guest (man 5 proc) items = line.split() (user, nice, sys, idle) = map(lambda(x): int(x), items[1:5]) return [user, nice, sys, idle] def calculatecpu(timesa, timesb): for i in range(len(timesa)): timesb[i] -= timesa[i] total = sum(timesb) if total == 0: return 0.0 else: # subtract % time spent in idle time return 100 - ((100.0 * timesb[-1]) / total) # end move these to core.misc.utils class Cmd(object): ''' Helper class for running a command on a node and parsing the result. ''' args = "" def __init__(self, node, verbose=False): ''' Initialize with a CoreNode (LxcNode) ''' self.id = None self.stdin = None self.out = None self.node = node self.verbose = verbose def info(self, msg): ''' Utility method for writing output to stdout.''' print msg sys.stdout.flush() def warn(self, msg): ''' Utility method for writing output to stderr. ''' print >> sys.stderr, "XXX %s:" % self.node.name, msg sys.stderr.flush() def run(self): ''' This is the primary method used for running this command. ''' self.open() status = self.id.wait() r = self.parse() self.cleanup() return r def open(self): ''' Exceute call to node.popen(). ''' self.id, self.stdin, self.out, self.err = \ self.node.popen((self.args)) def parse(self): ''' This method is overloaded by child classes and should return some result. ''' return None def cleanup(self): ''' Close the Popen channels.''' self.stdin.close() self.out.close() self.err.close() tmp = self.id.wait() if tmp: self.warn("nonzero exit status:", tmp) class ClientServerCmd(Cmd): ''' Helper class for running a command on a node and parsing the result. ''' args = "" client_args = "" def __init__(self, node, client_node, verbose=False): ''' Initialize with two CoreNodes, node is the server ''' Cmd.__init__(self, node, verbose) self.client_node = client_node def run(self): ''' Run the server command, then the client command, then kill the server ''' self.open() # server self.client_open() # client status = self.client_id.wait() self.node.cmdresult(['killall', self.args[0]]) # stop the server r = self.parse() self.cleanup() return r def client_open(self): ''' Exceute call to client_node.popen(). ''' self.client_id, self.client_stdin, self.client_out, self.client_err = \ self.client_node.popen((self.client_args)) def parse(self): ''' This method is overloaded by child classes and should return some result. ''' return None def cleanup(self): ''' Close the Popen channels.''' self.stdin.close() self.out.close() self.err.close() tmp = self.id.wait() if tmp: self.warn("nonzero exit status: %s" % tmp) self.warn("command was: %s" % ((self.args, ))) class PingCmd(Cmd): ''' Test latency using ping. ''' def __init__(self, node, verbose=False, addr=None, count=50, interval=0.1, ): Cmd.__init__(self, node, verbose) self.addr = addr self.count = count self.interval = interval self.args = ['ping', '-q', '-c', '%s' % count, '-i', '%s' % interval, addr] def run(self): if self.verbose: self.info("%s initial test ping (max 1 second)..." % self.node.name) (status, result) = self.node.cmdresult(["ping", "-q", "-c", "1", "-w", "1", self.addr]) if status != 0: self.warn("initial ping from %s to %s failed! result:\n%s" % \ (self.node.name, self.addr, result)) return (0.0, 0.0) if self.verbose: self.info("%s pinging %s (%d seconds)..." % \ (self.node.name, self.addr, self.count * self.interval)) return Cmd.run(self) def parse(self): lines = self.out.readlines() avg_latency = 0 mdev = 0 try: stats_str = lines[-1].split('=')[1] stats = stats_str.split('/') avg_latency = float(stats[1]) mdev = float(stats[3].split(' ')[0]) except Exception, e: self.warn("ping parsing exception: %s" % e) return (avg_latency, mdev) class IperfCmd(ClientServerCmd): ''' Test throughput using iperf. ''' def __init__(self, node, client_node, verbose=False, addr=None, time=10): # node is the server ClientServerCmd.__init__(self, node, client_node, verbose) self.addr = addr self.time = time # -s server, -y c CSV report output self.args = ["iperf", "-s", "-y", "c"] self.client_args = ["iperf", "-c", self.addr, "-t", "%s" % self.time] def run(self): if self.verbose: self.info("Launching the iperf server on %s..." % self.node.name) self.info("Running the iperf client on %s (%s seconds)..." % \ (self.client_node.name, self.time)) return ClientServerCmd.run(self) def parse(self): lines = self.out.readlines() try: bps = int(lines[-1].split(',')[-1].strip('\n')) except Exception, e: self.warn("iperf parsing exception: %s" % e) bps = 0 return bps class MgenCmd(ClientServerCmd): ''' Run a test traffic flow using an MGEN sender and receiver. ''' def __init__(self, node, client_node, verbose=False, addr=None, time=10, rate=512): ClientServerCmd.__init__(self, node, client_node, verbose) self.addr = addr self.time = time self.args = ['mgen', 'event', 'listen udp 5000', 'output', '/var/log/mgen.log'] self.rate = rate sendevent = "ON 1 UDP DST %s/5000 PERIODIC [%s]" % \ (addr, self.mgenrate(self.rate)) stopevent = "%s OFF 1" % time self.client_args = ['mgen', 'event', sendevent, 'event', stopevent, 'output', '/var/log/mgen.log'] @staticmethod def mgenrate(kbps): ''' Return a MGEN periodic rate string for the given kilobits-per-sec. Assume 1500 byte MTU, 20-byte IP + 8-byte UDP headers, leaving 1472 bytes for data. ''' bps = (kbps / 8) * 1000.0 maxdata = 1472 pps = math.ceil(bps / maxdata) return "%s %s" % (pps, maxdata) def run(self): if self.verbose: self.info("Launching the MGEN receiver on %s..." % self.node.name) self.info("Running the MGEN sender on %s (%s seconds)..." % \ (self.client_node.name, self.time)) return ClientServerCmd.run(self) def cleanup(self): ''' Close the Popen channels.''' self.stdin.close() self.out.close() self.err.close() tmp = self.id.wait() # non-zero mgen exit status OK def parse(self): ''' Check MGEN receiver's log file for packet sequence numbers, and return the percentage of lost packets. ''' logfile = os.path.join(self.node.nodedir, 'var.log/mgen.log') f = open(logfile, 'r') numlost = 0 lastseq = 0 for line in f.readlines(): fields = line.split() if fields[1] != 'RECV': continue try: seq = int(fields[4].split('>')[1]) except: self.info("Unexpected MGEN line:\n%s" % fields) if seq > (lastseq + 1): numlost += seq - (lastseq + 1) lastseq = seq f.close() if lastseq > 0: loss = 100.0 * numlost / lastseq else: loss = 0 if self.verbose: self.info("Receiver log shows %d of %d packets lost" % \ (numlost, lastseq)) return loss class Experiment(object): ''' Experiment object to organize tests. ''' def __init__(self, opt, start): ''' Initialize with opt and start time. ''' self.session = None # node list self.nodes = [] # WLAN network self.net = None self.verbose = opt.verbose # dict from OptionParser self.opt = opt self.start = start self.numping = opt.numping self.numiperf = opt.numiperf self.nummgen = opt.nummgen self.logbegin() def info(self, msg): ''' Utility method for writing output to stdout. ''' print msg sys.stdout.flush() self.log(msg) def warn(self, msg): ''' Utility method for writing output to stderr. ''' print >> sys.stderr, msg sys.stderr.flush() self.log(msg) def logbegin(self): ''' Start logging. ''' self.logfp = None if not self.opt.logfile: return self.logfp = open(self.opt.logfile, "w") self.log("%s begin: %s\n" % (sys.argv[0], self.start.ctime())) self.log("%s args: %s\n" % (sys.argv[0], sys.argv[1:])) (sysname, rel, ver, machine, nodename) = os.uname() self.log("%s %s %s %s on %s" % (sysname, rel, ver, machine, nodename)) def logend(self): ''' End logging. ''' if not self.logfp: return end = datetime.datetime.now() self.log("%s end: %s (%s)\n" % \ (sys.argv[0], end.ctime(), end - self.start)) self.logfp.flush() self.logfp.close() self.logfp = None def log(self, msg): ''' Write to the log file, if any. ''' if not self.logfp: return print >> self.logfp, msg def reset(self): ''' Prepare for another experiment run. ''' if self.session: self.session.shutdown() del self.session self.session = None self.nodes = [] self.net = None def createbridgedsession(self, numnodes, verbose = False): ''' Build a topology consisting of the given number of LxcNodes connected to a WLAN. ''' # IP subnet prefix = ipaddr.IPv4Prefix("10.0.0.0/16") self.session = pycore.Session() # emulated network self.net = self.session.addobj(cls = pycore.nodes.WlanNode, name = "wlan1") prev = None for i in xrange(1, numnodes + 1): addr = "%s/%s" % (prefix.addr(i), 32) tmp = self.session.addobj(cls = pycore.nodes.CoreNode, objid = i, name = "n%d" % i) tmp.newnetif(self.net, [addr]) self.nodes.append(tmp) self.session.services.addservicestonode(tmp, "router", "IPForward", self.verbose) self.session.services.bootnodeservices(tmp) self.staticroutes(i, prefix, numnodes) # link each node in a chain, with the previous node if prev: self.net.link(prev.netif(0), tmp.netif(0)) prev = tmp def createemanesession(self, numnodes, verbose = False, cls = None, values = None): ''' Build a topology consisting of the given number of LxcNodes connected to an EMANE WLAN. ''' prefix = ipaddr.IPv4Prefix("10.0.0.0/16") self.session = pycore.Session() self.session.node_count = str(numnodes + 1) self.session.master = True self.session.location.setrefgeo(47.57917,-122.13232,2.00000) self.session.location.refscale = 150.0 self.session.cfg['emane_models'] = "RfPipe, Ieee80211abg, Bypass" self.session.emane.loadmodels() self.net = self.session.addobj(cls = pycore.nodes.EmaneNode, objid = numnodes + 1, name = "wlan1") self.net.verbose = verbose #self.session.emane.addobj(self.net) for i in xrange(1, numnodes + 1): addr = "%s/%s" % (prefix.addr(i), 32) tmp = self.session.addobj(cls = pycore.nodes.CoreNode, objid = i, name = "n%d" % i) #tmp.setposition(i * 20, 50, None) tmp.setposition(50, 50, None) tmp.newnetif(self.net, [addr]) self.nodes.append(tmp) self.session.services.addservicestonode(tmp, "router", "IPForward", self.verbose) if values is None: values = cls.getdefaultvalues() self.session.emane.setconfig(self.net.objid, cls._name, values) self.session.instantiate() self.info("waiting %s sec (TAP bring-up)" % 2) time.sleep(2) for i in xrange(1, numnodes + 1): tmp = self.nodes[i-1] self.session.services.bootnodeservices(tmp) self.staticroutes(i, prefix, numnodes) def setnodes(self): ''' Set the sender and receiver nodes for use in this experiment, along with the address of the receiver to be used. ''' self.firstnode = self.nodes[0] self.lastnode = self.nodes[-1] self.lastaddr = self.lastnode.netif(0).addrlist[0].split('/')[0] def staticroutes(self, i, prefix, numnodes): ''' Add static routes on node number i to the other nodes in the chain. ''' routecmd = ["/sbin/ip", "route", "add"] node = self.nodes[i-1] neigh_left = "" neigh_right = "" # add direct interface routes first if i > 1: neigh_left = "%s" % prefix.addr(i - 1) cmd = routecmd + [neigh_left, "dev", node.netif(0).name] (status, result) = node.cmdresult(cmd) if status != 0: self.warn("failed to add interface route: %s" % cmd) if i < numnodes: neigh_right = "%s" % prefix.addr(i + 1) cmd = routecmd + [neigh_right, "dev", node.netif(0).name] (status, result) = node.cmdresult(cmd) if status != 0: self.warn("failed to add interface route: %s" % cmd) # add static routes to all other nodes via left/right neighbors for j in xrange(1, numnodes + 1): if abs(j - i) < 2: continue addr = "%s" % prefix.addr(j) if j < i: gw = neigh_left else: gw = neigh_right cmd = routecmd + [addr, "via", gw] (status, result) = node.cmdresult(cmd) if status != 0: self.warn("failed to add route: %s" % cmd) def setpathloss(self, numnodes): ''' Send EMANE pathloss events to connect all NEMs in a chain. ''' if self.session.emane.version < self.session.emane.EMANE091: service = emaneeventservice.EventService() e = emaneeventpathloss.EventPathloss(1) old = True else: if self.session.emane.version == self.session.emane.EMANE091: dev = 'lo' else: dev = self.session.obj('ctrlnet').brname service = EventService(eventchannel=("224.1.2.8", 45703, dev), otachannel=None) old = False for i in xrange(1, numnodes + 1): rxnem = i # inform rxnem that it can hear node to the left with 10dB noise txnem = rxnem - 1 if txnem > 0: if old: e.set(0, txnem, 10.0, 10.0) service.publish(emaneeventpathloss.EVENT_ID, emaneeventservice.PLATFORMID_ANY, rxnem, emaneeventservice.COMPONENTID_ANY, e.export()) else: e = PathlossEvent() e.append(txnem, forward=10.0, reverse=10.0) service.publish(rxnem, e) # inform rxnem that it can hear node to the right with 10dB noise txnem = rxnem + 1 if txnem > numnodes: continue if old: e.set(0, txnem, 10.0, 10.0) service.publish(emaneeventpathloss.EVENT_ID, emaneeventservice.PLATFORMID_ANY, rxnem, emaneeventservice.COMPONENTID_ANY, e.export()) else: e = PathlossEvent() e.append(txnem, forward=10.0, reverse=10.0) service.publish(rxnem, e) def setneteffects(self, bw = None, delay = None): ''' Set link effects for all interfaces attached to the network node. ''' if not self.net: self.warn("failed to set effects: no network node") return for netif in self.net.netifs(): self.net.linkconfig(netif, bw = bw, delay = delay) def runalltests(self, title=""): ''' Convenience helper to run all defined experiment tests. If tests are run multiple times, this returns the average of those runs. ''' duration = self.opt.duration rate = self.opt.rate if len(title) > 0: self.info("----- running %s tests (duration=%s, rate=%s) -----" % \ (title, duration, rate)) (latency, mdev, throughput, cpu, loss) = (0,0,0,0,0) self.info("number of runs: ping=%d, iperf=%d, mgen=%d" % \ (self.numping, self.numiperf, self.nummgen)) if self.numping > 0: (latency, mdev) = self.pingtest(count=self.numping) if self.numiperf > 0: throughputs = [] for i in range(1, self.numiperf + 1): throughput = self.iperftest(time=duration) if self.numiperf > 1: throughputs += throughput time.sleep(1) # iperf is very CPU intensive if self.numiperf > 1: throughput = sum(throughputs) / len(throughputs) self.info("throughputs=%s" % ["%.2f" % v for v in throughputs]) if self.nummgen > 0: cpus = [] losses = [] for i in range(1, self.nummgen + 1): (cpu, loss) = self.cputest(time=duration, rate=rate) if self.nummgen > 1: cpus += cpu, losses += loss, if self.nummgen > 1: cpu = sum(cpus) / len(cpus) loss = sum(losses) / len(losses) self.info("cpus=%s" % ["%.2f" % v for v in cpus]) self.info("losses=%s" % ["%.2f" % v for v in losses]) return (latency, mdev, throughput, cpu, loss) def pingtest(self, count=50): ''' Ping through a chain of nodes and report the average latency. ''' p = PingCmd(node=self.firstnode, verbose=self.verbose, addr = self.lastaddr, count=count, interval=0.1).run() (latency, mdev) = p self.info("latency (ms): %.03f, %.03f" % (latency, mdev)) return p def iperftest(self, time=10): ''' Run iperf through a chain of nodes and report the maximum throughput. ''' bps = IperfCmd(node=self.lastnode, client_node=self.firstnode, verbose=False, addr=self.lastaddr, time=time).run() self.info("throughput (bps): %s" % bps) return bps def cputest(self, time=10, rate=512): ''' Run MGEN through a chain of nodes and report the CPU usage and percent of lost packets. Rate is in kbps. ''' if self.verbose: self.info("%s initial test ping (max 1 second)..." % \ self.firstnode.name) (status, result) = self.firstnode.cmdresult(["ping", "-q", "-c", "1", "-w", "1", self.lastaddr]) if status != 0: self.warn("initial ping from %s to %s failed! result:\n%s" % \ (self.firstnode.name, self.lastaddr, result)) return (0.0, 0.0) lines = readstat() cpustart = getcputimes(lines[0]) loss = MgenCmd(node=self.lastnode, client_node=self.firstnode, verbose=False, addr=self.lastaddr, time=time, rate=rate).run() lines = readstat() cpuend = getcputimes(lines[0]) percent = calculatecpu(cpustart, cpuend) self.info("CPU usage (%%): %.02f, %.02f loss" % (percent, loss)) return percent, loss def main(): ''' Main routine when running from command-line. ''' usagestr = "usage: %prog [-h] [options] [args]" parser = optparse.OptionParser(usage = usagestr) parser.set_defaults(numnodes = 10, delay = 3, duration = 10, rate = 512, verbose = False, numping = 50, numiperf = 1, nummgen = 1) parser.add_option("-d", "--delay", dest = "delay", type = float, help = "wait time before testing") parser.add_option("-l", "--logfile", dest = "logfile", type = str, help = "log detailed output to the specified file") parser.add_option("-n", "--numnodes", dest = "numnodes", type = int, help = "number of nodes") parser.add_option("-r", "--rate", dest = "rate", type = float, help = "kbps rate to use for MGEN CPU tests") parser.add_option("--numping", dest = "numping", type = int, help = "number of ping latency test runs") parser.add_option("--numiperf", dest = "numiperf", type = int, help = "number of iperf throughput test runs") parser.add_option("--nummgen", dest = "nummgen", type = int, help = "number of MGEN CPU tests runs") parser.add_option("-t", "--time", dest = "duration", type = int, help = "duration in seconds of throughput and CPU tests") parser.add_option("-v", "--verbose", dest = "verbose", action = "store_true", help = "be more verbose") def usage(msg = None, err = 0): sys.stdout.write("\n") if msg: sys.stdout.write(msg + "\n\n") parser.print_help() sys.exit(err) # parse command line opt (opt, args) = parser.parse_args() if opt.numnodes < 2: usage("invalid numnodes: %s" % opt.numnodes) if opt.delay < 0.0: usage("invalid delay: %s" % opt.delay) if opt.rate < 0.0: usage("invalid rate: %s" % opt.rate) for a in args: sys.stderr.write("ignoring command line argument: '%s'\n" % a) results = {} starttime = datetime.datetime.now() exp = Experiment(opt = opt, start=starttime) exp.info("Starting wlanemanetests.py tests %s" % starttime.ctime()) # system sanity checks here emanever, emaneverstr = Emane.detectversionfromcmd() if opt.verbose: exp.info("Detected EMANE version %s" % (emaneverstr,)) # bridged exp.info("setting up bridged tests 1/2 no link effects") exp.info("creating topology: numnodes = %s" % \ (opt.numnodes, )) exp.createbridgedsession(numnodes=opt.numnodes, verbose=opt.verbose) exp.setnodes() exp.info("waiting %s sec (node/route bring-up)" % opt.delay) time.sleep(opt.delay) results['0 bridged'] = exp.runalltests("bridged") exp.info("done; elapsed time: %s" % (datetime.datetime.now() - exp.start)) # bridged with netem exp.info("setting up bridged tests 2/2 with netem") exp.setneteffects(bw=54000000, delay=0) exp.info("waiting %s sec (queue bring-up)" % opt.delay) results['1.0 netem'] = exp.runalltests("netem") exp.info("shutting down bridged session") # bridged with netem (1 Mbps,200ms) exp.info("setting up bridged tests 3/2 with netem") exp.setneteffects(bw=1000000, delay=20000) exp.info("waiting %s sec (queue bring-up)" % opt.delay) results['1.2 netem_1M'] = exp.runalltests("netem_1M") exp.info("shutting down bridged session") # bridged with netem (54 kbps,500ms) exp.info("setting up bridged tests 3/2 with netem") exp.setneteffects(bw=54000, delay=100000) exp.info("waiting %s sec (queue bring-up)" % opt.delay) results['1.4 netem_54K'] = exp.runalltests("netem_54K") exp.info("shutting down bridged session") exp.reset() # EMANE bypass model exp.info("setting up EMANE tests 1/2 with bypass model") exp.createemanesession(numnodes=opt.numnodes, verbose=opt.verbose, cls=EmaneBypassModel, values=None) exp.setnodes() exp.info("waiting %s sec (node/route bring-up)" % opt.delay) time.sleep(opt.delay) results['2.0 bypass'] = exp.runalltests("bypass") exp.info("shutting down bypass session") exp.reset() exp.info("waiting %s sec (between EMANE tests)" % opt.delay) time.sleep(opt.delay) # EMANE RF-PIPE model: no restrictions (max datarate) exp.info("setting up EMANE tests 2/4 with RF-PIPE model") rfpipevals = list(EmaneRfPipeModel.getdefaultvalues()) rfpnames = EmaneRfPipeModel.getnames() rfpipevals[ rfpnames.index('datarate') ] = '4294967295' # max value if emanever < Emane.EMANE091: rfpipevals[ rfpnames.index('pathlossmode') ] = '2ray' rfpipevals[ rfpnames.index('defaultconnectivitymode') ] = '1' else: rfpipevals[ rfpnames.index('propagationmodel') ] = '2ray' exp.createemanesession(numnodes=opt.numnodes, verbose=opt.verbose, cls=EmaneRfPipeModel, values=rfpipevals) exp.setnodes() exp.info("waiting %s sec (node/route bring-up)" % opt.delay) time.sleep(opt.delay) results['3.0 rfpipe'] = exp.runalltests("rfpipe") exp.info("shutting down RF-PIPE session") exp.reset() # EMANE RF-PIPE model: 54M datarate exp.info("setting up EMANE tests 3/4 with RF-PIPE model 54M") rfpipevals = list(EmaneRfPipeModel.getdefaultvalues()) rfpnames = EmaneRfPipeModel.getnames() rfpipevals[ rfpnames.index('datarate') ] = '54000000' # TX delay != propagation delay #rfpipevals[ rfpnames.index('delay') ] = '5000' if emanever < Emane.EMANE091: rfpipevals[ rfpnames.index('pathlossmode') ] = '2ray' rfpipevals[ rfpnames.index('defaultconnectivitymode') ] = '1' else: rfpipevals[ rfpnames.index('propagationmodel') ] = '2ray' exp.createemanesession(numnodes=opt.numnodes, verbose=opt.verbose, cls=EmaneRfPipeModel, values=rfpipevals) exp.setnodes() exp.info("waiting %s sec (node/route bring-up)" % opt.delay) time.sleep(opt.delay) results['4.0 rfpipe54m'] = exp.runalltests("rfpipe54m") exp.info("shutting down RF-PIPE session") exp.reset() # EMANE RF-PIPE model: 54K datarate exp.info("setting up EMANE tests 4/4 with RF-PIPE model pathloss") rfpipevals = list(EmaneRfPipeModel.getdefaultvalues()) rfpnames = EmaneRfPipeModel.getnames() rfpipevals[ rfpnames.index('datarate') ] = '54000' if emanever < Emane.EMANE091: rfpipevals[ rfpnames.index('pathlossmode') ] = 'pathloss' rfpipevals[ rfpnames.index('defaultconnectivitymode') ] = '0' else: rfpipevals[ rfpnames.index('propagationmodel') ] = 'precomputed' exp.createemanesession(numnodes=opt.numnodes, verbose=opt.verbose, cls=EmaneRfPipeModel, values=rfpipevals) exp.setnodes() exp.info("waiting %s sec (node/route bring-up)" % opt.delay) time.sleep(opt.delay) exp.info("sending pathloss events to govern connectivity") exp.setpathloss(opt.numnodes) results['5.0 pathloss'] = exp.runalltests("pathloss") exp.info("shutting down RF-PIPE session") exp.reset() # EMANE RF-PIPE model (512K, 200ms) exp.info("setting up EMANE tests 4/4 with RF-PIPE model pathloss") rfpipevals = list(EmaneRfPipeModel.getdefaultvalues()) rfpnames = EmaneRfPipeModel.getnames() rfpipevals[ rfpnames.index('datarate') ] = '512000' rfpipevals[ rfpnames.index('delay') ] = '200' rfpipevals[ rfpnames.index('pathlossmode') ] = 'pathloss' rfpipevals[ rfpnames.index('defaultconnectivitymode') ] = '0' exp.createemanesession(numnodes=opt.numnodes, verbose=opt.verbose, cls=EmaneRfPipeModel, values=rfpipevals) exp.setnodes() exp.info("waiting %s sec (node/route bring-up)" % opt.delay) time.sleep(opt.delay) exp.info("sending pathloss events to govern connectivity") exp.setpathloss(opt.numnodes) results['5.1 pathloss'] = exp.runalltests("pathloss") exp.info("shutting down RF-PIPE session") exp.reset() # summary of results in CSV format exp.info("----- summary of results (%s nodes, rate=%s, duration=%s) -----" \ % (opt.numnodes, opt.rate, opt.duration)) exp.info("netname:latency,mdev,throughput,cpu,loss") for test in sorted(results.keys()): (latency, mdev, throughput, cpu, loss) = results[test] exp.info("%s:%.03f,%.03f,%d,%.02f,%.02f" % \ (test, latency, mdev, throughput, cpu,loss)) exp.logend() return exp if __name__ == "__main__": exp = main()
D3f0/coreemu
daemon/examples/netns/wlanemanetests.py
Python
bsd-2-clause
33,033
"""Testing the pytest fixtures themselves which are declared in conftest.py.""" import pytest import responses import requests from requests.exceptions import ConnectionError from olympia.access.models import Group def test_admin_group(admin_group): assert Group.objects.count() == 1 admin_group = Group.objects.get() assert admin_group.name == 'Admins' assert admin_group.rules == '*:*' def test_mozilla_user(mozilla_user): admin_group = mozilla_user.groups.get() assert admin_group.name == 'Admins' assert admin_group.rules == '*:*' @pytest.mark.allow_external_http_requests def test_external_requests_enabled(): with pytest.raises(ConnectionError): requests.get('http://example.invalid') assert len(responses.calls) == 0 def test_external_requests_disabled_by_default(): with pytest.raises(ConnectionError): requests.get('http://example.invalid') assert len(responses.calls) == 1
aviarypl/mozilla-l10n-addons-server
src/olympia/amo/tests/test_pytest_fixtures.py
Python
bsd-3-clause
955
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (C) 2017 Google # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # ---------------------------------------------------------------------------- # # *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** # # ---------------------------------------------------------------------------- # # This file is automatically generated by Magic Modules and manual # changes will be clobbered when the file is regenerated. # # Please read more about how to change this file at # https://www.github.com/GoogleCloudPlatform/magic-modules # # ---------------------------------------------------------------------------- from __future__ import absolute_import, division, print_function __metaclass__ = type ################################################################################ # Documentation ################################################################################ ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: gcp_sql_user_info description: - Gather info for GCP User short_description: Gather info for GCP User version_added: '2.8' author: Google Inc. (@googlecloudplatform) requirements: - python >= 2.6 - requests >= 2.18.4 - google-auth >= 1.3.0 options: instance: description: - The name of the Cloud SQL instance. This does not include the project ID. - 'This field represents a link to a Instance resource in GCP. It can be specified in two ways. First, you can place a dictionary with key ''name'' and value of your resource''s name Alternatively, you can add `register: name-of-resource` to a gcp_sql_instance task and then set this instance field to "{{ name-of-resource }}"' required: true type: dict project: description: - The Google Cloud Platform project to use. type: str auth_kind: description: - The type of credential used. type: str required: true choices: - application - machineaccount - serviceaccount service_account_contents: description: - The contents of a Service Account JSON file, either in a dictionary or as a JSON string that represents it. type: jsonarg service_account_file: description: - The path of a Service Account JSON file if serviceaccount is selected as type. type: path service_account_email: description: - An optional service account email address if machineaccount is selected and the user does not wish to use the default email. type: str scopes: description: - Array of scopes to be used type: list env_type: description: - Specifies which Ansible environment you're running this module within. - This should not be set unless you know what you're doing. - This only alters the User Agent string for any API requests. type: str notes: - for authentication, you can set service_account_file using the C(gcp_service_account_file) env variable. - for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS) env variable. - For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL) env variable. - For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable. - For authentication, you can set scopes using the C(GCP_SCOPES) env variable. - Environment variables values will only be used if the playbook values are not set. - The I(service_account_email) and I(service_account_file) options are mutually exclusive. ''' EXAMPLES = ''' - name: get info on a user gcp_sql_user_info: instance: "{{ instance }}" project: test_project auth_kind: serviceaccount service_account_file: "/tmp/auth.pem" ''' RETURN = ''' resources: description: List of resources returned: always type: complex contains: host: description: - The host name from which the user can connect. For insert operations, host defaults to an empty string. For update operations, host is specified as part of the request URL. The host name cannot be updated after insertion. returned: success type: str name: description: - The name of the user in the Cloud SQL instance. returned: success type: str instance: description: - The name of the Cloud SQL instance. This does not include the project ID. returned: success type: dict password: description: - The password for the user. returned: success type: str ''' ################################################################################ # Imports ################################################################################ from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict import json ################################################################################ # Main ################################################################################ def main(): module = GcpModule(argument_spec=dict(instance=dict(required=True, type='dict'))) if not module.params['scopes']: module.params['scopes'] = ['https://www.googleapis.com/auth/sqlservice.admin'] return_value = {'resources': fetch_list(module, collection(module))} module.exit_json(**return_value) def collection(module): res = {'project': module.params['project'], 'instance': replace_resource_dict(module.params['instance'], 'name')} return "https://www.googleapis.com/sql/v1beta4/projects/{project}/instances/{instance}/users".format(**res) def fetch_list(module, link): auth = GcpSession(module, 'sql') return auth.list(link, return_if_object, array_name='items') def return_if_object(module, response): # If not found, return nothing. if response.status_code == 404: return None # If no content, return nothing. if response.status_code == 204: return None try: module.raise_for_status(response) result = response.json() except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst: module.fail_json(msg="Invalid JSON response with error: %s" % inst) if navigate_hash(result, ['error', 'errors']): module.fail_json(msg=navigate_hash(result, ['error', 'errors'])) return result if __name__ == "__main__": main()
anryko/ansible
lib/ansible/modules/cloud/google/gcp_sql_user_info.py
Python
gpl-3.0
6,527
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # (c) 2017 Red Hat Inc. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # from contextlib import contextmanager from xml.etree.ElementTree import Element, SubElement, fromstring, tostring from ansible.module_utils.connection import exec_command NS_MAP = {'nc': "urn:ietf:params:xml:ns:netconf:base:1.0"} def send_request(module, obj, check_rc=True): request = tostring(obj) rc, out, err = exec_command(module, request) if rc != 0 and check_rc: error_root = fromstring(err) fake_parent = Element('root') fake_parent.append(error_root) error_list = fake_parent.findall('.//nc:rpc-error', NS_MAP) if not error_list: module.fail_json(msg=str(err)) warnings = [] for rpc_error in error_list: message = rpc_error.find('./nc:error-message', NS_MAP).text severity = rpc_error.find('./nc:error-severity', NS_MAP).text if severity == 'warning': warnings.append(message) else: module.fail_json(msg=str(err)) return warnings return fromstring(out) def children(root, iterable): for item in iterable: try: ele = SubElement(ele, item) except NameError: ele = SubElement(root, item) def lock(module, target='candidate'): obj = Element('lock') children(obj, ('target', target)) return send_request(module, obj) def unlock(module, target='candidate'): obj = Element('unlock') children(obj, ('target', target)) return send_request(module, obj) def commit(module): return send_request(module, Element('commit')) def discard_changes(module): return send_request(module, Element('discard-changes')) def validate(module): obj = Element('validate') children(obj, ('source', 'candidate')) return send_request(module, obj) def get_config(module, source='running', filter=None): obj = Element('get-config') children(obj, ('source', source)) children(obj, ('filter', filter)) return send_request(module, obj) @contextmanager def locked_config(module): try: lock(module) yield finally: unlock(module)
tux-00/ansible
lib/ansible/module_utils/netconf.py
Python
gpl-3.0
3,772
# -*- coding: utf-8 -*- ################################################################################## # # Copyright (c) 2005-2006 Axelor SARL. (http://www.axelor.com) # and 2004-2010 Tiny SPRL (<http://tiny.be>). # # $Id: hr.py 4656 2006-11-24 09:58:42Z Cyp $ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import datetime import math import time from operator import attrgetter from openerp.exceptions import Warning from openerp import tools from openerp.osv import fields, osv from openerp.tools.translate import _ class hr_holidays_status(osv.osv): _name = "hr.holidays.status" _description = "Leave Type" def get_days(self, cr, uid, ids, employee_id, context=None): result = dict((id, dict(max_leaves=0, leaves_taken=0, remaining_leaves=0, virtual_remaining_leaves=0)) for id in ids) holiday_ids = self.pool['hr.holidays'].search(cr, uid, [('employee_id', '=', employee_id), ('state', 'in', ['confirm', 'validate1', 'validate']), ('holiday_status_id', 'in', ids) ], context=context) for holiday in self.pool['hr.holidays'].browse(cr, uid, holiday_ids, context=context): status_dict = result[holiday.holiday_status_id.id] if holiday.type == 'add': status_dict['virtual_remaining_leaves'] += holiday.number_of_days_temp if holiday.state == 'validate': status_dict['max_leaves'] += holiday.number_of_days_temp status_dict['remaining_leaves'] += holiday.number_of_days_temp elif holiday.type == 'remove': # number of days is negative status_dict['virtual_remaining_leaves'] -= holiday.number_of_days_temp if holiday.state == 'validate': status_dict['leaves_taken'] += holiday.number_of_days_temp status_dict['remaining_leaves'] -= holiday.number_of_days_temp return result def _user_left_days(self, cr, uid, ids, name, args, context=None): employee_id = False if context and 'employee_id' in context: employee_id = context['employee_id'] else: employee_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context) if employee_ids: employee_id = employee_ids[0] if employee_id: res = self.get_days(cr, uid, ids, employee_id, context=context) else: res = dict((res_id, {'leaves_taken': 0, 'remaining_leaves': 0, 'max_leaves': 0}) for res_id in ids) return res _columns = { 'name': fields.char('Leave Type', size=64, required=True, translate=True), 'categ_id': fields.many2one('calendar.event.type', 'Meeting Type', help='Once a leave is validated, Odoo will create a corresponding meeting of this type in the calendar.'), 'color_name': fields.selection([('red', 'Red'),('blue','Blue'), ('lightgreen', 'Light Green'), ('lightblue','Light Blue'), ('lightyellow', 'Light Yellow'), ('magenta', 'Magenta'),('lightcyan', 'Light Cyan'),('black', 'Black'),('lightpink', 'Light Pink'),('brown', 'Brown'),('violet', 'Violet'),('lightcoral', 'Light Coral'),('lightsalmon', 'Light Salmon'),('lavender', 'Lavender'),('wheat', 'Wheat'),('ivory', 'Ivory')],'Color in Report', required=True, help='This color will be used in the leaves summary located in Reporting\Leaves by Department.'), 'limit': fields.boolean('Allow to Override Limit', help='If you select this check box, the system allows the employees to take more leaves than the available ones for this type and will not take them into account for the "Remaining Legal Leaves" defined on the employee form.'), 'active': fields.boolean('Active', help="If the active field is set to false, it will allow you to hide the leave type without removing it."), 'max_leaves': fields.function(_user_left_days, string='Maximum Allowed', help='This value is given by the sum of all holidays requests with a positive value.', multi='user_left_days'), 'leaves_taken': fields.function(_user_left_days, string='Leaves Already Taken', help='This value is given by the sum of all holidays requests with a negative value.', multi='user_left_days'), 'remaining_leaves': fields.function(_user_left_days, string='Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken', multi='user_left_days'), 'virtual_remaining_leaves': fields.function(_user_left_days, string='Virtual Remaining Leaves', help='Maximum Leaves Allowed - Leaves Already Taken - Leaves Waiting Approval', multi='user_left_days'), 'double_validation': fields.boolean('Apply Double Validation', help="When selected, the Allocation/Leave Requests for this type require a second validation to be approved."), } _defaults = { 'color_name': 'red', 'active': True, } def name_get(self, cr, uid, ids, context=None): if context is None: context = {} if not context.get('employee_id',False): # leave counts is based on employee_id, would be inaccurate if not based on correct employee return super(hr_holidays_status, self).name_get(cr, uid, ids, context=context) res = [] for record in self.browse(cr, uid, ids, context=context): name = record.name if not record.limit: name = name + (' (%g/%g)' % (record.leaves_taken or 0.0, record.max_leaves or 0.0)) res.append((record.id, name)) return res class hr_holidays(osv.osv): _name = "hr.holidays" _description = "Leave" _order = "type desc, date_from asc" _inherit = ['mail.thread', 'ir.needaction_mixin'] _track = { 'state': { 'hr_holidays.mt_holidays_approved': lambda self, cr, uid, obj, ctx=None: obj.state == 'validate', 'hr_holidays.mt_holidays_refused': lambda self, cr, uid, obj, ctx=None: obj.state == 'refuse', 'hr_holidays.mt_holidays_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state == 'confirm', }, } def _employee_get(self, cr, uid, context=None): emp_id = context.get('default_employee_id', False) if emp_id: return emp_id ids = self.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context) if ids: return ids[0] return False def _compute_number_of_days(self, cr, uid, ids, name, args, context=None): result = {} for hol in self.browse(cr, uid, ids, context=context): if hol.type=='remove': result[hol.id] = -hol.number_of_days_temp else: result[hol.id] = hol.number_of_days_temp return result def _get_can_reset(self, cr, uid, ids, name, arg, context=None): """User can reset a leave request if it is its own leave request or if he is an Hr Manager. """ user = self.pool['res.users'].browse(cr, uid, uid, context=context) group_hr_manager_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'group_hr_manager')[1] if group_hr_manager_id in [g.id for g in user.groups_id]: return dict.fromkeys(ids, True) result = dict.fromkeys(ids, False) for holiday in self.browse(cr, uid, ids, context=context): if holiday.employee_id and holiday.employee_id.user_id and holiday.employee_id.user_id.id == uid: result[holiday.id] = True return result def _check_date(self, cr, uid, ids, context=None): for holiday in self.browse(cr, uid, ids, context=context): domain = [ ('date_from', '<=', holiday.date_to), ('date_to', '>=', holiday.date_from), ('employee_id', '=', holiday.employee_id.id), ('id', '!=', holiday.id), ('state', 'not in', ['cancel', 'refuse']), ] nholidays = self.search_count(cr, uid, domain, context=context) if nholidays: return False return True _check_holidays = lambda self, cr, uid, ids, context=None: self.check_holidays(cr, uid, ids, context=context) _columns = { 'name': fields.char('Description', size=64), 'state': fields.selection([('draft', 'To Submit'), ('cancel', 'Cancelled'),('confirm', 'To Approve'), ('refuse', 'Refused'), ('validate1', 'Second Approval'), ('validate', 'Approved')], 'Status', readonly=True, track_visibility='onchange', copy=False, help='The status is set to \'To Submit\', when a holiday request is created.\ \nThe status is \'To Approve\', when holiday request is confirmed by user.\ \nThe status is \'Refused\', when holiday request is refused by manager.\ \nThe status is \'Approved\', when holiday request is approved by manager.'), 'user_id':fields.related('employee_id', 'user_id', type='many2one', relation='res.users', string='User', store=True), 'date_from': fields.datetime('Start Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, select=True, copy=False), 'date_to': fields.datetime('End Date', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False), 'holiday_status_id': fields.many2one("hr.holidays.status", "Leave Type", required=True,readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'employee_id': fields.many2one('hr.employee', "Employee", select=True, invisible=False, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'manager_id': fields.many2one('hr.employee', 'First Approval', invisible=False, readonly=True, copy=False, help='This area is automatically filled by the user who validate the leave'), 'notes': fields.text('Reasons',readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'number_of_days_temp': fields.float('Allocation', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, copy=False), 'number_of_days': fields.function(_compute_number_of_days, string='Number of Days', store=True), 'meeting_id': fields.many2one('calendar.event', 'Meeting'), 'type': fields.selection([('remove','Leave Request'),('add','Allocation Request')], 'Request Type', required=True, readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help="Choose 'Leave Request' if someone wants to take an off-day. \nChoose 'Allocation Request' if you want to increase the number of leaves available for someone", select=True), 'parent_id': fields.many2one('hr.holidays', 'Parent'), 'linked_request_ids': fields.one2many('hr.holidays', 'parent_id', 'Linked Requests',), 'department_id':fields.related('employee_id', 'department_id', string='Department', type='many2one', relation='hr.department', readonly=True, store=True), 'category_id': fields.many2one('hr.employee.category', "Employee Tag", help='Category of Employee', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}), 'holiday_type': fields.selection([('employee','By Employee'),('category','By Employee Tag')], 'Allocation Mode', readonly=True, states={'draft':[('readonly',False)], 'confirm':[('readonly',False)]}, help='By Employee: Allocation/Request for individual Employee, By Employee Tag: Allocation/Request for group of employees in category', required=True), 'manager_id2': fields.many2one('hr.employee', 'Second Approval', readonly=True, copy=False, help='This area is automaticly filled by the user who validate the leave with second level (If Leave type need second validation)'), 'double_validation': fields.related('holiday_status_id', 'double_validation', type='boolean', relation='hr.holidays.status', string='Apply Double Validation'), 'can_reset': fields.function( _get_can_reset, type='boolean'), } _defaults = { 'employee_id': _employee_get, 'state': 'confirm', 'type': 'remove', 'user_id': lambda obj, cr, uid, context: uid, 'holiday_type': 'employee' } _constraints = [ (_check_date, 'You can not have 2 leaves that overlaps on same day!', ['date_from','date_to']), (_check_holidays, 'The number of remaining leaves is not sufficient for this leave type', ['state','number_of_days_temp']) ] _sql_constraints = [ ('type_value', "CHECK( (holiday_type='employee' AND employee_id IS NOT NULL) or (holiday_type='category' AND category_id IS NOT NULL))", "The employee or employee category of this request is missing. Please make sure that your user login is linked to an employee."), ('date_check2', "CHECK ( (type='add') OR (date_from <= date_to))", "The start date must be anterior to the end date."), ('date_check', "CHECK ( number_of_days_temp >= 0 )", "The number of days must be greater than 0."), ] def _create_resource_leave(self, cr, uid, leaves, context=None): '''This method will create entry in resource calendar leave object at the time of holidays validated ''' obj_res_leave = self.pool.get('resource.calendar.leaves') for leave in leaves: vals = { 'name': leave.name, 'date_from': leave.date_from, 'holiday_id': leave.id, 'date_to': leave.date_to, 'resource_id': leave.employee_id.resource_id.id, 'calendar_id': leave.employee_id.resource_id.calendar_id.id } obj_res_leave.create(cr, uid, vals, context=context) return True def _remove_resource_leave(self, cr, uid, ids, context=None): '''This method will create entry in resource calendar leave object at the time of holidays cancel/removed''' obj_res_leave = self.pool.get('resource.calendar.leaves') leave_ids = obj_res_leave.search(cr, uid, [('holiday_id', 'in', ids)], context=context) return obj_res_leave.unlink(cr, uid, leave_ids, context=context) def onchange_type(self, cr, uid, ids, holiday_type, employee_id=False, context=None): result = {} if holiday_type == 'employee' and not employee_id: ids_employee = self.pool.get('hr.employee').search(cr, uid, [('user_id','=', uid)]) if ids_employee: result['value'] = { 'employee_id': ids_employee[0] } elif holiday_type != 'employee': result['value'] = { 'employee_id': False } return result def onchange_employee(self, cr, uid, ids, employee_id): result = {'value': {'department_id': False}} if employee_id: employee = self.pool.get('hr.employee').browse(cr, uid, employee_id) result['value'] = {'department_id': employee.department_id.id} return result # TODO: can be improved using resource calendar method def _get_number_of_days(self, date_from, date_to): """Returns a float equals to the timedelta between two dates given as string.""" DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S" from_dt = datetime.datetime.strptime(date_from, DATETIME_FORMAT) to_dt = datetime.datetime.strptime(date_to, DATETIME_FORMAT) timedelta = to_dt - from_dt diff_day = timedelta.days + float(timedelta.seconds) / 86400 return diff_day def unlink(self, cr, uid, ids, context=None): for rec in self.browse(cr, uid, ids, context=context): if rec.state not in ['draft', 'cancel', 'confirm']: raise osv.except_osv(_('Warning!'),_('You cannot delete a leave which is in %s state.')%(rec.state)) return super(hr_holidays, self).unlink(cr, uid, ids, context) def onchange_date_from(self, cr, uid, ids, date_to, date_from): """ If there are no date set for date_to, automatically set one 8 hours later than the date_from. Also update the number_of_days. """ # date_to has to be greater than date_from if (date_from and date_to) and (date_from > date_to): raise osv.except_osv(_('Warning!'),_('The start date must be anterior to the end date.')) result = {'value': {}} # No date_to set so far: automatically compute one 8 hours later if date_from and not date_to: date_to_with_delta = datetime.datetime.strptime(date_from, tools.DEFAULT_SERVER_DATETIME_FORMAT) + datetime.timedelta(hours=8) result['value']['date_to'] = str(date_to_with_delta) # Compute and update the number of days if (date_to and date_from) and (date_from <= date_to): diff_day = self._get_number_of_days(date_from, date_to) result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1 else: result['value']['number_of_days_temp'] = 0 return result def onchange_date_to(self, cr, uid, ids, date_to, date_from): """ Update the number_of_days. """ # date_to has to be greater than date_from if (date_from and date_to) and (date_from > date_to): raise osv.except_osv(_('Warning!'),_('The start date must be anterior to the end date.')) result = {'value': {}} # Compute and update the number of days if (date_to and date_from) and (date_from <= date_to): diff_day = self._get_number_of_days(date_from, date_to) result['value']['number_of_days_temp'] = round(math.floor(diff_day))+1 else: result['value']['number_of_days_temp'] = 0 return result def add_follower(self, cr, uid, ids, employee_id, context=None): employee = self.pool['hr.employee'].browse(cr, uid, employee_id, context=context) if employee.user_id: self.message_subscribe(cr, uid, ids, [employee.user_id.partner_id.id], context=context) def create(self, cr, uid, values, context=None): """ Override to avoid automatic logging of creation """ if context is None: context = {} employee_id = values.get('employee_id', False) context = dict(context, mail_create_nolog=True, mail_create_nosubscribe=True) if values.get('state') and values['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'): raise osv.except_osv(_('Warning!'), _('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % values.get('state')) hr_holiday_id = super(hr_holidays, self).create(cr, uid, values, context=context) self.add_follower(cr, uid, [hr_holiday_id], employee_id, context=context) return hr_holiday_id def write(self, cr, uid, ids, vals, context=None): employee_id = vals.get('employee_id', False) if vals.get('state') and vals['state'] not in ['draft', 'confirm', 'cancel'] and not self.pool['res.users'].has_group(cr, uid, 'base.group_hr_user'): raise osv.except_osv(_('Warning!'), _('You cannot set a leave request as \'%s\'. Contact a human resource manager.') % vals.get('state')) hr_holiday_id = super(hr_holidays, self).write(cr, uid, ids, vals, context=context) self.add_follower(cr, uid, ids, employee_id, context=context) return hr_holiday_id def holidays_reset(self, cr, uid, ids, context=None): self.write(cr, uid, ids, { 'state': 'draft', 'manager_id': False, 'manager_id2': False, }) to_unlink = [] for record in self.browse(cr, uid, ids, context=context): for record2 in record.linked_request_ids: self.holidays_reset(cr, uid, [record2.id], context=context) to_unlink.append(record2.id) if to_unlink: self.unlink(cr, uid, to_unlink, context=context) return True def holidays_first_validate(self, cr, uid, ids, context=None): obj_emp = self.pool.get('hr.employee') ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)]) manager = ids2 and ids2[0] or False self.holidays_first_validate_notificate(cr, uid, ids, context=context) return self.write(cr, uid, ids, {'state':'validate1', 'manager_id': manager}) def holidays_validate(self, cr, uid, ids, context=None): obj_emp = self.pool.get('hr.employee') ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)]) manager = ids2 and ids2[0] or False self.write(cr, uid, ids, {'state':'validate'}) data_holiday = self.browse(cr, uid, ids) for record in data_holiday: if record.double_validation: self.write(cr, uid, [record.id], {'manager_id2': manager}) else: self.write(cr, uid, [record.id], {'manager_id': manager}) if record.holiday_type == 'employee' and record.type == 'remove': meeting_obj = self.pool.get('calendar.event') meeting_vals = { 'name': record.name or _('Leave Request'), 'categ_ids': record.holiday_status_id.categ_id and [(6,0,[record.holiday_status_id.categ_id.id])] or [], 'duration': record.number_of_days_temp * 8, 'description': record.notes, 'user_id': record.user_id.id, 'start': record.date_from, 'stop': record.date_to, 'allday': False, 'state': 'open', # to block that meeting date in the calendar 'class': 'confidential' } #Add the partner_id (if exist) as an attendee if record.user_id and record.user_id.partner_id: meeting_vals['partner_ids'] = [(4,record.user_id.partner_id.id)] ctx_no_email = dict(context or {}, no_email=True) meeting_id = meeting_obj.create(cr, uid, meeting_vals, context=ctx_no_email) self._create_resource_leave(cr, uid, [record], context=context) self.write(cr, uid, ids, {'meeting_id': meeting_id}) elif record.holiday_type == 'category': emp_ids = obj_emp.search(cr, uid, [('category_ids', 'child_of', [record.category_id.id])]) leave_ids = [] batch_context = dict(context, mail_notify_force_send=False) for emp in obj_emp.browse(cr, uid, emp_ids, context=context): vals = { 'name': record.name, 'type': record.type, 'holiday_type': 'employee', 'holiday_status_id': record.holiday_status_id.id, 'date_from': record.date_from, 'date_to': record.date_to, 'notes': record.notes, 'number_of_days_temp': record.number_of_days_temp, 'parent_id': record.id, 'employee_id': emp.id } leave_ids.append(self.create(cr, uid, vals, context=batch_context)) for leave_id in leave_ids: # TODO is it necessary to interleave the calls? for sig in ('confirm', 'validate', 'second_validate'): self.signal_workflow(cr, uid, [leave_id], sig) return True def holidays_confirm(self, cr, uid, ids, context=None): for record in self.browse(cr, uid, ids, context=context): if record.employee_id and record.employee_id.parent_id and record.employee_id.parent_id.user_id: self.message_subscribe_users(cr, uid, [record.id], user_ids=[record.employee_id.parent_id.user_id.id], context=context) return self.write(cr, uid, ids, {'state': 'confirm'}) def holidays_refuse(self, cr, uid, ids, context=None): obj_emp = self.pool.get('hr.employee') ids2 = obj_emp.search(cr, uid, [('user_id', '=', uid)]) manager = ids2 and ids2[0] or False for holiday in self.browse(cr, uid, ids, context=context): if holiday.state == 'validate1': self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id': manager}) else: self.write(cr, uid, [holiday.id], {'state': 'refuse', 'manager_id2': manager}) self.holidays_cancel(cr, uid, ids, context=context) return True def holidays_cancel(self, cr, uid, ids, context=None): for record in self.browse(cr, uid, ids, context=context): # Delete the meeting if record.meeting_id: record.meeting_id.unlink() # If a category that created several holidays, cancel all related self.signal_workflow(cr, uid, map(attrgetter('id'), record.linked_request_ids or []), 'refuse') self._remove_resource_leave(cr, uid, ids, context=context) return True def check_holidays(self, cr, uid, ids, context=None): for record in self.browse(cr, uid, ids, context=context): if record.holiday_type != 'employee' or record.type != 'remove' or not record.employee_id or record.holiday_status_id.limit: continue leave_days = self.pool.get('hr.holidays.status').get_days(cr, uid, [record.holiday_status_id.id], record.employee_id.id, context=context)[record.holiday_status_id.id] if leave_days['remaining_leaves'] < 0 or leave_days['virtual_remaining_leaves'] < 0: # Raising a warning gives a more user-friendly feedback than the default constraint error raise Warning(_('The number of remaining leaves is not sufficient for this leave type.\n' 'Please verify also the leaves waiting for validation.')) return True # ----------------------------- # OpenChatter and notifications # ----------------------------- def _needaction_domain_get(self, cr, uid, context=None): emp_obj = self.pool.get('hr.employee') empids = emp_obj.search(cr, uid, [('parent_id.user_id', '=', uid)], context=context) dom = ['&', ('state', '=', 'confirm'), ('employee_id', 'in', empids)] # if this user is a hr.manager, he should do second validations if self.pool.get('res.users').has_group(cr, uid, 'base.group_hr_manager'): dom = ['|'] + dom + [('state', '=', 'validate1')] return dom def holidays_first_validate_notificate(self, cr, uid, ids, context=None): for obj in self.browse(cr, uid, ids, context=context): self.message_post(cr, uid, [obj.id], _("Request approved, waiting second validation."), context=context) class resource_calendar_leaves(osv.osv): _inherit = "resource.calendar.leaves" _description = "Leave Detail" _columns = { 'holiday_id': fields.many2one("hr.holidays", "Leave Request"), } class hr_employee(osv.osv): _inherit="hr.employee" def create(self, cr, uid, vals, context=None): # don't pass the value of remaining leave if it's 0 at the creation time, otherwise it will trigger the inverse # function _set_remaining_days and the system may not be configured for. Note that we don't have this problem on # the write because the clients only send the fields that have been modified. if 'remaining_leaves' in vals and not vals['remaining_leaves']: del(vals['remaining_leaves']) return super(hr_employee, self).create(cr, uid, vals, context=context) def _set_remaining_days(self, cr, uid, empl_id, name, value, arg, context=None): employee = self.browse(cr, uid, empl_id, context=context) diff = value - employee.remaining_leaves type_obj = self.pool.get('hr.holidays.status') holiday_obj = self.pool.get('hr.holidays') # Find for holidays status status_ids = type_obj.search(cr, uid, [('limit', '=', False)], context=context) if len(status_ids) != 1 : raise osv.except_osv(_('Warning!'),_("The feature behind the field 'Remaining Legal Leaves' can only be used when there is only one leave type with the option 'Allow to Override Limit' unchecked. (%s Found). Otherwise, the update is ambiguous as we cannot decide on which leave type the update has to be done. \nYou may prefer to use the classic menus 'Leave Requests' and 'Allocation Requests' located in 'Human Resources \ Leaves' to manage the leave days of the employees if the configuration does not allow to use this field.") % (len(status_ids))) status_id = status_ids and status_ids[0] or False if not status_id: return False if diff > 0: leave_id = holiday_obj.create(cr, uid, {'name': _('Allocation for %s') % employee.name, 'employee_id': employee.id, 'holiday_status_id': status_id, 'type': 'add', 'holiday_type': 'employee', 'number_of_days_temp': diff}, context=context) elif diff < 0: raise osv.except_osv(_('Warning!'), _('You cannot reduce validated allocation requests')) else: return False for sig in ('confirm', 'validate', 'second_validate'): holiday_obj.signal_workflow(cr, uid, [leave_id], sig) return True def _get_remaining_days(self, cr, uid, ids, name, args, context=None): cr.execute("""SELECT sum(h.number_of_days) as days, h.employee_id from hr_holidays h join hr_holidays_status s on (s.id=h.holiday_status_id) where h.state='validate' and s.limit=False and h.employee_id in %s group by h.employee_id""", (tuple(ids),)) res = cr.dictfetchall() remaining = {} for r in res: remaining[r['employee_id']] = r['days'] for employee_id in ids: if not remaining.get(employee_id): remaining[employee_id] = 0.0 return remaining def _get_leave_status(self, cr, uid, ids, name, args, context=None): holidays_obj = self.pool.get('hr.holidays') holidays_id = holidays_obj.search(cr, uid, [('employee_id', 'in', ids), ('date_from','<=',time.strftime('%Y-%m-%d %H:%M:%S')), ('date_to','>=',time.strftime('%Y-%m-%d 23:59:59')),('type','=','remove'),('state','not in',('cancel','refuse'))], context=context) result = {} for id in ids: result[id] = { 'current_leave_state': False, 'current_leave_id': False, 'leave_date_from':False, 'leave_date_to':False, } for holiday in self.pool.get('hr.holidays').browse(cr, uid, holidays_id, context=context): result[holiday.employee_id.id]['leave_date_from'] = holiday.date_from result[holiday.employee_id.id]['leave_date_to'] = holiday.date_to result[holiday.employee_id.id]['current_leave_state'] = holiday.state result[holiday.employee_id.id]['current_leave_id'] = holiday.holiday_status_id.id return result def _leaves_count(self, cr, uid, ids, field_name, arg, context=None): Holidays = self.pool['hr.holidays'] return { employee_id: Holidays.search_count(cr,uid, [('employee_id', '=', employee_id), ('type', '=', 'remove')], context=context) for employee_id in ids } _columns = { 'remaining_leaves': fields.function(_get_remaining_days, string='Remaining Legal Leaves', fnct_inv=_set_remaining_days, type="float", help='Total number of legal leaves allocated to this employee, change this value to create allocation/leave request. Total based on all the leave types without overriding limit.'), 'current_leave_state': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Status", type="selection", selection=[('draft', 'New'), ('confirm', 'Waiting Approval'), ('refuse', 'Refused'), ('validate1', 'Waiting Second Approval'), ('validate', 'Approved'), ('cancel', 'Cancelled')]), 'current_leave_id': fields.function(_get_leave_status, multi="leave_status", string="Current Leave Type",type='many2one', relation='hr.holidays.status'), 'leave_date_from': fields.function(_get_leave_status, multi='leave_status', type='date', string='From Date'), 'leave_date_to': fields.function(_get_leave_status, multi='leave_status', type='date', string='To Date'), 'leaves_count': fields.function(_leaves_count, type='integer', string='Leaves'), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
ncliam/serverpos
openerp/addons/hr_holidays/hr_holidays.py
Python
agpl-3.0
34,256
# Copyright (c) 2017 Keith Ito """ from https://github.com/keithito/tacotron """ ''' Defines the set of symbols used in text input to the model. The default is a set of ASCII characters that works well for English or text that has been run through Unidecode. For other data, you can modify _characters. See TRAINING_DATA.md for details. ''' from . import cmudict _pad = '_' _punctuation = '!\'(),.:;? ' _special = '-' _letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz' # Prepend "@" to ARPAbet symbols to ensure uniqueness (some are the same as uppercase letters): _arpabet = ['@' + s for s in cmudict.valid_symbols] # Export all symbols: symbols = [_pad] + list(_special) + list(_punctuation) + list(_letters) + _arpabet
mlperf/inference_results_v0.7
open/Inspur/code/rnnt/tensorrt/preprocessing/parts/text/symbols.py
Python
apache-2.0
749
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # pylint: disable=W0401,W0614 from telemetry import story from telemetry.page import page as page_module from telemetry.page import shared_page_state class SkiaBuildbotDesktopPage(page_module.Page): def __init__(self, url, page_set): super(SkiaBuildbotDesktopPage, self).__init__( url=url, name=url, page_set=page_set, shared_page_state_class=shared_page_state.SharedDesktopPageState) self.archive_data_file = 'data/skia_espn_desktop.json' def RunNavigateSteps(self, action_runner): action_runner.Navigate(self.url) action_runner.Wait(5) class SkiaEspnDesktopPageSet(story.StorySet): """ Pages designed to represent the median, not highly optimized web """ def __init__(self): super(SkiaEspnDesktopPageSet, self).__init__( archive_data_file='data/skia_espn_desktop.json') urls_list = [ # Why: #1 sports. 'http://espn.go.com', ] for url in urls_list: self.AddStory(SkiaBuildbotDesktopPage(url, self))
HalCanary/skia-hc
tools/skp/page_sets/skia_espn_desktop.py
Python
bsd-3-clause
1,170
import dataclasses from typing import ClassVar, Dict, List, Set, Tuple, Type from collections import OrderedDict @dataclasses.dataclass class A: a: List[int] = <error descr="Mutable default '[]' is not allowed. Use 'default_factory'">[]</error> b: List[int] = <error descr="Mutable default 'list()' is not allowed. Use 'default_factory'">list()</error> c: Set[int] = <error descr="Mutable default '{1}' is not allowed. Use 'default_factory'">{1}</error> d: Set[int] = <error descr="Mutable default 'set()' is not allowed. Use 'default_factory'">set()</error> e: Tuple[int, ...] = () f: Tuple[int, ...] = tuple() g: ClassVar[List[int]] = [] h: ClassVar = [] i: Dict[int, int] = <error descr="Mutable default '{1: 2}' is not allowed. Use 'default_factory'">{1: 2}</error> j: Dict[int, int] = <error descr="Mutable default 'dict()' is not allowed. Use 'default_factory'">dict()</error> k = [] l = list() m: Dict[int, int] = <error descr="Mutable default 'OrderedDict()' is not allowed. Use 'default_factory'">OrderedDict()</error> n: FrozenSet[int] = frozenset() a2: Type[List[int]] = list b2: Type[Set[int]] = set c2: Type[Tuple[int, ...]] = tuple
siosio/intellij-community
python/testData/inspections/PyDataclassInspection/defaultFieldValue.py
Python
apache-2.0
1,215
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page_module from telemetry.page import page_set as page_set_module class BigJsPageSet(page_set_module.PageSet): """ Sites which load and run big JavaScript files.""" def __init__(self): super(BigJsPageSet, self).__init__( archive_data_file='data/big_js.json', bucket=page_set_module.PARTNER_BUCKET, user_agent_type='desktop') # Page sets with only one page don't work well, since we end up reusing a # renderer all the time and it keeps its memory caches alive (see # crbug.com/403735). Add a dummy second page here. urls_list = [ 'http://beta.unity3d.com/jonas/DT2/', 'http://www.foo.com', ] for url in urls_list: self.AddUserStory(page_module.Page(url, self))
markYoungH/chromium.src
tools/perf/page_sets/big_js.py
Python
bsd-3-clause
935
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.pipeline import ClientRawResponse from .. import models class FormdataOperations(object): """FormdataOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self.config = config def upload_file( self, file_content, file_name, custom_headers=None, raw=False, callback=None, **operation_config): """Upload file. :param file_content: File to upload. :type file_content: Generator :param file_name: File name to upload. Name has to be spelled exactly as written here. :type file_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param callback: When specified, will be called with each chunk of data that is streamed. The callback should take two arguments, the bytes of the current chunk of data and the response object. If the data is uploading, response will be None. :type callback: Callable[Bytes, response=None] :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: Generator :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true :raises: :class:`ErrorException<fixtures.acceptancetestsbodyformdata.models.ErrorException>` """ # Construct URL url = '/formdata/stream/uploadfile' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'multipart/form-data' if custom_headers: header_parameters.update(custom_headers) # Construct form data form_data_content = { 'fileContent': file_content, 'fileName': file_name, } # Construct and send request request = self._client.post(url, query_parameters) response = self._client.send_formdata( request, header_parameters, form_data_content, **operation_config) if response.status_code not in [200]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._client.stream_download(response, callback) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized def upload_file_via_body( self, file_content, custom_headers=None, raw=False, callback=None, **operation_config): """Upload file. :param file_content: File to upload. :type file_content: Generator :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param callback: When specified, will be called with each chunk of data that is streamed. The callback should take two arguments, the bytes of the current chunk of data and the response object. If the data is uploading, response will be None. :type callback: Callable[Bytes, response=None] :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :rtype: Generator :rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if raw=true :raises: :class:`ErrorException<fixtures.acceptancetestsbodyformdata.models.ErrorException>` """ # Construct URL url = '/formdata/stream/uploadfile' # Construct parameters query_parameters = {} # Construct headers header_parameters = {} header_parameters['Content-Type'] = 'application/octet-stream' if custom_headers: header_parameters.update(custom_headers) # Construct body body_content = self._client.stream_upload(file_content, callback) # Construct and send request request = self._client.put(url, query_parameters) response = self._client.send( request, header_parameters, body_content, **operation_config) if response.status_code not in [200]: raise models.ErrorException(self._deserialize, response) deserialized = None if response.status_code == 200: deserialized = self._client.stream_download(response, callback) if raw: client_raw_response = ClientRawResponse(deserialized, response) return client_raw_response return deserialized
annatisch/autorest
src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyFormData/autorestswaggerbatformdataservice/operations/formdata_operations.py
Python
mit
5,590
# testing/mock.py # Copyright (C) 2005-2016 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Import stub for mock library. NOTE: copied/adapted from SQLAlchemy master for backwards compatibility; this should be removable when Alembic targets SQLAlchemy 1.0.0 """ from __future__ import absolute_import from ..util.compat import py33 if py33: from unittest.mock import MagicMock, Mock, call, patch, ANY else: try: from mock import MagicMock, Mock, call, patch, ANY # noqa except ImportError: raise ImportError( "SQLAlchemy's test suite requires the " "'mock' library as of 0.8.2.")
pcu4dros/pandora-core
workspace/lib/python3.5/site-packages/alembic/testing/mock.py
Python
mit
791
# -*- coding: utf-8 -*- # # Copyright (C) 2007-2010 Edgewall Software # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at http://genshi.edgewall.org/wiki/License. # # This software consists of voluntary contributions made by many # individuals. For the exact contribution history, see the revision # history and logs, available at http://genshi.edgewall.org/log/. from datetime import datetime import doctest from gettext import NullTranslations import unittest from genshi.core import Attrs from genshi.template import MarkupTemplate, Context from genshi.filters.i18n import Translator, extract from genshi.input import HTML from genshi.compat import IS_PYTHON2, StringIO class DummyTranslations(NullTranslations): _domains = {} def __init__(self, catalog=()): NullTranslations.__init__(self) self._catalog = catalog or {} self.plural = lambda n: n != 1 def add_domain(self, domain, catalog): translation = DummyTranslations(catalog) translation.add_fallback(self) self._domains[domain] = translation def _domain_call(self, func, domain, *args, **kwargs): return getattr(self._domains.get(domain, self), func)(*args, **kwargs) if IS_PYTHON2: def ugettext(self, message): missing = object() tmsg = self._catalog.get(message, missing) if tmsg is missing: if self._fallback: return self._fallback.ugettext(message) return unicode(message) return tmsg else: def gettext(self, message): missing = object() tmsg = self._catalog.get(message, missing) if tmsg is missing: if self._fallback: return self._fallback.gettext(message) return unicode(message) return tmsg if IS_PYTHON2: def dugettext(self, domain, message): return self._domain_call('ugettext', domain, message) else: def dgettext(self, domain, message): return self._domain_call('gettext', domain, message) def ungettext(self, msgid1, msgid2, n): try: return self._catalog[(msgid1, self.plural(n))] except KeyError: if self._fallback: return self._fallback.ngettext(msgid1, msgid2, n) if n == 1: return msgid1 else: return msgid2 if not IS_PYTHON2: ngettext = ungettext del ungettext if IS_PYTHON2: def dungettext(self, domain, singular, plural, numeral): return self._domain_call('ungettext', domain, singular, plural, numeral) else: def dngettext(self, domain, singular, plural, numeral): return self._domain_call('ngettext', domain, singular, plural, numeral) class TranslatorTestCase(unittest.TestCase): def test_translate_included_attribute_text(self): """ Verify that translated attributes end up in a proper `Attrs` instance. """ html = HTML(u"""<html> <span title="Foo"></span> </html>""") translator = Translator(lambda s: u"Voh") stream = list(html.filter(translator)) kind, data, pos = stream[2] assert isinstance(data[1], Attrs) def test_extract_without_text(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <p title="Bar">Foo</p> ${ngettext("Singular", "Plural", num)} </html>""") translator = Translator(extract_text=False) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, 'ngettext', ('Singular', 'Plural', None), []), messages[0]) def test_extract_plural_form(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> ${ngettext("Singular", "Plural", num)} </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, 'ngettext', ('Singular', 'Plural', None), []), messages[0]) def test_extract_funky_plural_form(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> ${ngettext(len(items), *widget.display_names)} </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, 'ngettext', (None, None), []), messages[0]) def test_extract_gettext_with_unicode_string(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> ${gettext("Grüße")} </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, 'gettext', u'Gr\xfc\xdfe', []), messages[0]) def test_extract_included_attribute_text(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <span title="Foo"></span> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, None, 'Foo', []), messages[0]) def test_extract_attribute_expr(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <input type="submit" value="${_('Save')}" /> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, '_', 'Save', []), messages[0]) def test_extract_non_included_attribute_interpolated(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <a href="#anchor_${num}">Foo</a> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, None, 'Foo', []), messages[0]) def test_extract_text_from_sub(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <py:if test="foo">Foo</py:if> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, None, 'Foo', []), messages[0]) def test_ignore_tag_with_fixed_xml_lang(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <p xml:lang="en">(c) 2007 Edgewall Software</p> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(0, len(messages)) def test_extract_tag_with_variable_xml_lang(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <p xml:lang="${lang}">(c) 2007 Edgewall Software</p> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((2, None, '(c) 2007 Edgewall Software', []), messages[0]) def test_ignore_attribute_with_expression(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/"> <input type="submit" value="Reply" title="Reply to comment $num" /> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(0, len(messages)) def test_translate_with_translations_object(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" i18n:comment="As in foo bar">Foo</p> </html>""") translator = Translator(DummyTranslations({'Foo': 'Voh'})) translator.setup(tmpl) self.assertEqual("""<html> <p>Voh</p> </html>""", tmpl.generate().render()) class MsgDirectiveTestCase(unittest.TestCase): def test_extract_i18n_msg(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Please see <a href="help.html">Help</a> for details. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Please see [1:Help] for details.', messages[0][2]) def test_translate_i18n_msg(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Please see <a href="help.html">Help</a> for details. </p> </html>""") gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]." translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p>Für Details siehe bitte <a href="help.html">Hilfe</a>.</p> </html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8')) def test_extract_i18n_msg_nonewline(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="">Please see <a href="help.html">Help</a></p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Please see [1:Help]', messages[0][2]) def test_translate_i18n_msg_nonewline(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="">Please see <a href="help.html">Help</a></p> </html>""") gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p>Für Details siehe bitte <a href="help.html">Hilfe</a></p> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_elt_nonewline(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:msg>Please see <a href="help.html">Help</a></i18n:msg> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Please see [1:Help]', messages[0][2]) def test_translate_i18n_msg_elt_nonewline(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:msg>Please see <a href="help.html">Help</a></i18n:msg> </html>""") gettext = lambda s: u"Für Details siehe bitte [1:Hilfe]" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> Für Details siehe bitte <a href="help.html">Hilfe</a> </html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8')) def test_extract_i18n_msg_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" title="A helpful paragraph"> Please see <a href="help.html" title="Click for help">Help</a> </p> </html>""") translator = Translator() translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(3, len(messages)) self.assertEqual('A helpful paragraph', messages[0][2]) self.assertEqual(3, messages[0][0]) self.assertEqual('Click for help', messages[1][2]) self.assertEqual(4, messages[1][0]) self.assertEqual('Please see [1:Help]', messages[2][2]) self.assertEqual(3, messages[2][0]) def test_translate_i18n_msg_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" title="A helpful paragraph"> Please see <a href="help.html" title="Click for help">Help</a> </p> </html>""") translator = Translator(lambda msgid: { 'A helpful paragraph': 'Ein hilfreicher Absatz', 'Click for help': u'Klicken für Hilfe', 'Please see [1:Help]': u'Siehe bitte [1:Hilfe]' }[msgid]) translator.setup(tmpl) self.assertEqual(u"""<html> <p title="Ein hilfreicher Absatz">Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a></p> </html>""", tmpl.generate().render(encoding=None)) def test_extract_i18n_msg_with_dynamic_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" title="${_('A helpful paragraph')}"> Please see <a href="help.html" title="${_('Click for help')}">Help</a> </p> </html>""") translator = Translator() translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(3, len(messages)) self.assertEqual('A helpful paragraph', messages[0][2]) self.assertEqual(3, messages[0][0]) self.assertEqual('Click for help', messages[1][2]) self.assertEqual(4, messages[1][0]) self.assertEqual('Please see [1:Help]', messages[2][2]) self.assertEqual(3, messages[2][0]) def test_translate_i18n_msg_with_dynamic_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" title="${_('A helpful paragraph')}"> Please see <a href="help.html" title="${_('Click for help')}">Help</a> </p> </html>""") translator = Translator(lambda msgid: { 'A helpful paragraph': 'Ein hilfreicher Absatz', 'Click for help': u'Klicken für Hilfe', 'Please see [1:Help]': u'Siehe bitte [1:Hilfe]' }[msgid]) translator.setup(tmpl) self.assertEqual(u"""<html> <p title="Ein hilfreicher Absatz">Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a></p> </html>""", tmpl.generate(_=translator.translate).render(encoding=None)) def test_extract_i18n_msg_as_element_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:msg params=""> Please see <a href="help.html" title="Click for help">Help</a> </i18n:msg> </html>""") translator = Translator() translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(2, len(messages)) self.assertEqual('Click for help', messages[0][2]) self.assertEqual(4, messages[0][0]) self.assertEqual('Please see [1:Help]', messages[1][2]) self.assertEqual(3, messages[1][0]) def test_translate_i18n_msg_as_element_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:msg params=""> Please see <a href="help.html" title="Click for help">Help</a> </i18n:msg> </html>""") translator = Translator(lambda msgid: { 'Click for help': u'Klicken für Hilfe', 'Please see [1:Help]': u'Siehe bitte [1:Hilfe]' }[msgid]) translator.setup(tmpl) self.assertEqual(u"""<html> Siehe bitte <a href="help.html" title="Klicken für Hilfe">Hilfe</a> </html>""", tmpl.generate().render(encoding=None)) def test_extract_i18n_msg_nested(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Please see <a href="help.html"><em>Help</em> page</a> for details. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Please see [1:[2:Help] page] for details.', messages[0][2]) def test_translate_i18n_msg_nested(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Please see <a href="help.html"><em>Help</em> page</a> for details. </p> </html>""") gettext = lambda s: u"Für Details siehe bitte [1:[2:Hilfeseite]]." translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p>Für Details siehe bitte <a href="help.html"><em>Hilfeseite</em></a>.</p> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_label_with_nested_input(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:msg=""> <label><input type="text" size="3" name="daysback" value="30" /> days back</label> </div> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('[1:[2:] days back]', messages[0][2]) def test_translate_i18n_msg_label_with_nested_input(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:msg=""> <label><input type="text" size="3" name="daysback" value="30" /> foo bar</label> </div> </html>""") gettext = lambda s: "[1:[2:] foo bar]" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual("""<html> <div><label><input type="text" size="3" name="daysback" value="30"/> foo bar</label></div> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_empty(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Show me <input type="text" name="num" /> entries per page. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Show me [1:] entries per page.', messages[0][2]) def test_translate_i18n_msg_empty(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Show me <input type="text" name="num" /> entries per page. </p> </html>""") gettext = lambda s: u"[1:] Einträge pro Seite anzeigen." translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p><input type="text" name="num"/> Einträge pro Seite anzeigen.</p> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_multiple(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Please see <a href="help.html">Help</a> for <em>details</em>. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Please see [1:Help] for [2:details].', messages[0][2]) def test_translate_i18n_msg_multiple(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Please see <a href="help.html">Help</a> for <em>details</em>. </p> </html>""") gettext = lambda s: u"Für [2:Details] siehe bitte [1:Hilfe]." translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p>Für <em>Details</em> siehe bitte <a href="help.html">Hilfe</a>.</p> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_multiple_empty(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Show me <input type="text" name="num" /> entries per page, starting at page <input type="text" name="num" />. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Show me [1:] entries per page, starting at page [2:].', messages[0][2]) def test_translate_i18n_msg_multiple_empty(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Show me <input type="text" name="num" /> entries per page, starting at page <input type="text" name="num" />. </p> </html>""", encoding='utf-8') gettext = lambda s: u"[1:] Einträge pro Seite, beginnend auf Seite [2:]." translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p><input type="text" name="num"/> Eintr\u00E4ge pro Seite, beginnend auf Seite <input type="text" name="num"/>.</p> </html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8')) def test_extract_i18n_msg_with_param(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="name"> Hello, ${user.name}! </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Hello, %(name)s!', messages[0][2]) def test_translate_i18n_msg_with_param(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="name"> Hello, ${user.name}! </p> </html>""") gettext = lambda s: u"Hallo, %(name)s!" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual("""<html> <p>Hallo, Jim!</p> </html>""", tmpl.generate(user=dict(name='Jim')).render()) def test_translate_i18n_msg_with_param_reordered(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="name"> Hello, ${user.name}! </p> </html>""") gettext = lambda s: u"%(name)s, sei gegrüßt!" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p>Jim, sei gegrüßt!</p> </html>""", tmpl.generate(user=dict(name='Jim')).render()) def test_translate_i18n_msg_with_attribute_param(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Hello, <a href="#${anchor}">dude</a>! </p> </html>""") gettext = lambda s: u"Sei gegrüßt, [1:Alter]!" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p>Sei gegrüßt, <a href="#42">Alter</a>!</p> </html>""", tmpl.generate(anchor='42').render()) def test_extract_i18n_msg_with_two_params(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="name, time"> Posted by ${post.author} at ${entry.time.strftime('%H:%m')} </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Posted by %(name)s at %(time)s', messages[0][2]) def test_translate_i18n_msg_with_two_params(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="name, time"> Written by ${entry.author} at ${entry.time.strftime('%H:%M')} </p> </html>""") gettext = lambda s: u"%(name)s schrieb dies um %(time)s" translator = Translator(gettext) translator.setup(tmpl) entry = { 'author': 'Jim', 'time': datetime(2008, 4, 1, 14, 30) } self.assertEqual("""<html> <p>Jim schrieb dies um 14:30</p> </html>""", tmpl.generate(entry=entry).render()) def test_extract_i18n_msg_with_directive(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Show me <input type="text" name="num" py:attrs="{'value': x}" /> entries per page. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual('Show me [1:] entries per page.', messages[0][2]) def test_translate_i18n_msg_with_directive(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""> Show me <input type="text" name="num" py:attrs="{'value': 'x'}" /> entries per page. </p> </html>""") gettext = lambda s: u"[1:] Einträge pro Seite anzeigen." translator = Translator(gettext) translator.setup(tmpl) self.assertEqual(u"""<html> <p><input type="text" name="num" value="x"/> Einträge pro Seite anzeigen.</p> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_with_comment(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:comment="As in foo bar" i18n:msg="">Foo</p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, None, 'Foo', ['As in foo bar']), messages[0]) tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" i18n:comment="As in foo bar">Foo</p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, None, 'Foo', ['As in foo bar']), messages[0]) def test_translate_i18n_msg_with_comment(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" i18n:comment="As in foo bar">Foo</p> </html>""") gettext = lambda s: u"Voh" translator = Translator(gettext) translator.setup(tmpl) self.assertEqual("""<html> <p>Voh</p> </html>""", tmpl.generate().render()) def test_extract_i18n_msg_with_attr(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" title="Foo bar">Foo</p> </html>""") translator = Translator() messages = list(translator.extract(tmpl.stream)) self.assertEqual(2, len(messages)) self.assertEqual((3, None, 'Foo bar', []), messages[0]) self.assertEqual((3, None, 'Foo', []), messages[1]) def test_translate_i18n_msg_with_attr(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" title="Foo bar">Foo</p> </html>""") gettext = lambda s: u"Voh" translator = Translator(DummyTranslations({ 'Foo': 'Voh', 'Foo bar': u'Voh bär' })) tmpl.filters.insert(0, translator) tmpl.add_directives(Translator.NAMESPACE, translator) self.assertEqual(u"""<html> <p title="Voh bär">Voh</p> </html>""", tmpl.generate().render()) def test_translate_i18n_msg_and_py_strip_directives(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" py:strip="">Foo</p> <p py:strip="" i18n:msg="">Foo</p> </html>""") translator = Translator(DummyTranslations({'Foo': 'Voh'})) translator.setup(tmpl) self.assertEqual("""<html> Voh Voh </html>""", tmpl.generate().render()) def test_i18n_msg_ticket_300_extract(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:msg params="date, author"> Changed ${ '10/12/2008' } ago by ${ 'me, the author' } </i18n:msg> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual( (3, None, 'Changed %(date)s ago by %(author)s', []), messages[0] ) def test_i18n_msg_ticket_300_translate(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:msg params="date, author"> Changed ${ date } ago by ${ author } </i18n:msg> </html>""") translations = DummyTranslations({ 'Changed %(date)s ago by %(author)s': u'Modificado à %(date)s por %(author)s' }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual(u"""<html> Modificado à um dia por Pedro </html>""".encode('utf-8'), tmpl.generate(date='um dia', author="Pedro").render(encoding='utf-8')) def test_i18n_msg_ticket_251_extract(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""><tt><b>Translation[&nbsp;0&nbsp;]</b>: <em>One coin</em></tt></p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual( (3, None, u'[1:[2:Translation\\[\xa00\xa0\\]]: [3:One coin]]', []), messages[0] ) def test_i18n_msg_ticket_251_translate(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg=""><tt><b>Translation[&nbsp;0&nbsp;]</b>: <em>One coin</em></tt></p> </html>""") translations = DummyTranslations({ u'[1:[2:Translation\\[\xa00\xa0\\]]: [3:One coin]]': u'[1:[2:Trandução\\[\xa00\xa0\\]]: [3:Uma moeda]]' }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual(u"""<html> <p><tt><b>Trandução[ 0 ]</b>: <em>Uma moeda</em></tt></p> </html>""".encode('utf-8'), tmpl.generate().render(encoding='utf-8')) def test_extract_i18n_msg_with_other_directives_nested(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" py:with="q = quote_plus(message[:80])">Before you do that, though, please first try <strong><a href="${trac.homepage}search?ticket=yes&amp;noquickjump=1&amp;q=$q">searching</a> for similar issues</strong>, as it is quite likely that this problem has been reported before. For questions about installation and configuration of Trac, please try the <a href="${trac.homepage}wiki/MailingList">mailing list</a> instead of filing a ticket. </p> </html>""") translator = Translator() translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual( 'Before you do that, though, please first try\n ' '[1:[2:searching]\n for similar issues], as it is ' 'quite likely that this problem\n has been reported ' 'before. For questions about installation\n and ' 'configuration of Trac, please try the\n ' '[3:mailing list]\n instead of filing a ticket.', messages[0][2] ) def test_translate_i18n_msg_with_other_directives_nested(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="">Before you do that, though, please first try <strong><a href="${trac.homepage}search?ticket=yes&amp;noquickjump=1&amp;q=q">searching</a> for similar issues</strong>, as it is quite likely that this problem has been reported before. For questions about installation and configuration of Trac, please try the <a href="${trac.homepage}wiki/MailingList">mailing list</a> instead of filing a ticket. </p> </html>""") translations = DummyTranslations({ 'Before you do that, though, please first try\n ' '[1:[2:searching]\n for similar issues], as it is ' 'quite likely that this problem\n has been reported ' 'before. For questions about installation\n and ' 'configuration of Trac, please try the\n ' '[3:mailing list]\n instead of filing a ticket.': u'Antes de o fazer, porém,\n ' u'[1:por favor tente [2:procurar]\n por problemas semelhantes], uma vez que ' u'é muito provável que este problema\n já tenha sido reportado ' u'anteriormente. Para questões relativas à instalação\n e ' u'configuração do Trac, por favor tente a\n ' u'[3:mailing list]\n em vez de criar um assunto.' }) translator = Translator(translations) translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) ctx = Context() ctx.push({'trac': {'homepage': 'http://trac.edgewall.org/'}}) self.assertEqual(u"""<html> <p>Antes de o fazer, porém, <strong>por favor tente <a href="http://trac.edgewall.org/search?ticket=yes&amp;noquickjump=1&amp;q=q">procurar</a> por problemas semelhantes</strong>, uma vez que é muito provável que este problema já tenha sido reportado anteriormente. Para questões relativas à instalação e configuração do Trac, por favor tente a <a href="http://trac.edgewall.org/wiki/MailingList">mailing list</a> em vez de criar um assunto.</p> </html>""", tmpl.generate(ctx).render()) def test_i18n_msg_with_other_nested_directives_with_reordered_content(self): # See: http://genshi.edgewall.org/ticket/300#comment:10 tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p py:if="not editable" class="hint" i18n:msg=""> <strong>Note:</strong> This repository is defined in <code><a href="${ 'href.wiki(TracIni)' }">trac.ini</a></code> and cannot be edited on this page. </p> </html>""") translations = DummyTranslations({ '[1:Note:] This repository is defined in\n ' '[2:[3:trac.ini]]\n and cannot be edited on this page.': u'[1:Nota:] Este repositório está definido em \n ' u'[2:[3:trac.ini]]\n e não pode ser editado nesta página.', }) translator = Translator(translations) translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual( '[1:Note:] This repository is defined in\n ' '[2:[3:trac.ini]]\n and cannot be edited on this page.', messages[0][2] ) self.assertEqual(u"""<html> <p class="hint"><strong>Nota:</strong> Este repositório está definido em <code><a href="href.wiki(TracIni)">trac.ini</a></code> e não pode ser editado nesta página.</p> </html>""".encode('utf-8'), tmpl.generate(editable=False).render(encoding='utf-8')) def test_extract_i18n_msg_with_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" py:strip=""> Please see <a href="help.html">Help</a> for details. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, None, 'Please see [1:Help] for details.', []), messages[0]) def test_extract_i18n_msg_with_py_strip_and_comment(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" py:strip="" i18n:comment="Foo"> Please see <a href="help.html">Help</a> for details. </p> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, None, 'Please see [1:Help] for details.', ['Foo']), messages[0]) def test_translate_i18n_msg_and_comment_with_py_strip_directives(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" i18n:comment="As in foo bar" py:strip="">Foo</p> <p py:strip="" i18n:msg="" i18n:comment="As in foo bar">Foo</p> </html>""") translator = Translator(DummyTranslations({'Foo': 'Voh'})) translator.setup(tmpl) self.assertEqual("""<html> Voh Voh </html>""", tmpl.generate().render()) def test_translate_i18n_msg_ticket_404(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="first,second"> $first <span>$second</span> KEPT <span>Inside a tag</span> tail </p></html>""") translator = Translator(DummyTranslations()) translator.setup(tmpl) self.assertEqual("""<html> <p>FIRST <span>SECOND</span> KEPT <span>Inside a tag</span> tail""" """</p></html>""", tmpl.generate(first="FIRST", second="SECOND").render()) class ChooseDirectiveTestCase(unittest.TestCase): def test_translate_i18n_choose_as_attribute(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="one"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </div> <div i18n:choose="two"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </div> </html>""") translations = DummyTranslations() translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>FooBar</p> </div> <div> <p>FooBars</p> </div> </html>""", tmpl.generate(one=1, two=2).render()) def test_translate_i18n_choose_as_directive(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </i18n:choose> <i18n:choose numeral="one"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </i18n:choose> </html>""") translations = DummyTranslations() translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>FooBars</p> <p>FooBar</p> </html>""", tmpl.generate(one=1, two=2).render()) def test_translate_i18n_choose_as_directive_singular_and_plural_with_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two"> <p i18n:singular="" py:strip="">FooBar Singular with Strip</p> <p i18n:plural="">FooBars Plural without Strip</p> </i18n:choose> <i18n:choose numeral="two"> <p i18n:singular="">FooBar singular without strip</p> <p i18n:plural="" py:strip="">FooBars plural with strip</p> </i18n:choose> <i18n:choose numeral="one"> <p i18n:singular="">FooBar singular without strip</p> <p i18n:plural="" py:strip="">FooBars plural with strip</p> </i18n:choose> <i18n:choose numeral="one"> <p i18n:singular="" py:strip="">FooBar singular with strip</p> <p i18n:plural="">FooBars plural without strip</p> </i18n:choose> </html>""") translations = DummyTranslations() translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>FooBars Plural without Strip</p> FooBars plural with strip <p>FooBar singular without strip</p> FooBar singular with strip </html>""", tmpl.generate(one=1, two=2).render()) def test_translate_i18n_choose_plural_singular_as_directive(self): # Ticket 371 tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two"> <i18n:singular>FooBar</i18n:singular> <i18n:plural>FooBars</i18n:plural> </i18n:choose> <i18n:choose numeral="one"> <i18n:singular>FooBar</i18n:singular> <i18n:plural>FooBars</i18n:plural> </i18n:choose> </html>""") translations = DummyTranslations({ ('FooBar', 0): 'FuBar', ('FooBars', 1): 'FuBars', 'FooBar': 'FuBar', 'FooBars': 'FuBars', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> FuBars FuBar </html>""", tmpl.generate(one=1, two=2).render()) def test_translate_i18n_choose_as_attribute_with_params(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="two; fname, lname"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translations = DummyTranslations({ ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>Vohs John Doe</p> </div> </html>""", tmpl.generate(two=2, fname='John', lname='Doe').render()) def test_translate_i18n_choose_as_attribute_with_params_and_domain_as_param(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo"> <div i18n:choose="two; fname, lname"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translations = DummyTranslations() translations.add_domain('foo', { ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>Vohs John Doe</p> </div> </html>""", tmpl.generate(two=2, fname='John', lname='Doe').render()) def test_translate_i18n_choose_as_directive_with_params(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> <i18n:choose numeral="one" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </html>""") translations = DummyTranslations({ ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>Vohs John Doe</p> <p>Voh John Doe</p> </html>""", tmpl.generate(one=1, two=2, fname='John', lname='Doe').render()) def test_translate_i18n_choose_as_directive_with_params_and_domain_as_directive(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:domain name="foo"> <i18n:choose numeral="two" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </i18n:domain> <i18n:choose numeral="one" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </html>""") translations = DummyTranslations() translations.add_domain('foo', { ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>Vohs John Doe</p> <p>Foo John Doe</p> </html>""", tmpl.generate(one=1, two=2, fname='John', lname='Doe').render()) def test_extract_i18n_choose_as_attribute(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="one"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </div> <div i18n:choose="two"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </div> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(2, len(messages)) self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0]) self.assertEqual((7, 'ngettext', ('FooBar', 'FooBars'), []), messages[1]) def test_extract_i18n_choose_as_directive(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </i18n:choose> <i18n:choose numeral="one"> <p i18n:singular="">FooBar</p> <p i18n:plural="">FooBars</p> </i18n:choose> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(2, len(messages)) self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0]) self.assertEqual((7, 'ngettext', ('FooBar', 'FooBars'), []), messages[1]) def test_extract_i18n_choose_as_attribute_with_params(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="two; fname, lname"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), []), messages[0]) def test_extract_i18n_choose_as_attribute_with_params_and_domain_as_param(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo"> <div i18n:choose="two; fname, lname"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((4, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), []), messages[0]) def test_extract_i18n_choose_as_directive_with_params(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> <i18n:choose numeral="one" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(2, len(messages)) self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), []), messages[0]) self.assertEqual((7, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), []), messages[1]) def test_extract_i18n_choose_as_directive_with_params_and_domain_as_directive(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:domain name="foo"> <i18n:choose numeral="two" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </i18n:domain> <i18n:choose numeral="one" params="fname, lname"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(2, len(messages)) self.assertEqual((4, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), []), messages[0]) self.assertEqual((9, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), []), messages[1]) def test_extract_i18n_choose_as_attribute_with_params_and_comment(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="two; fname, lname" i18n:comment="As in Foo Bar"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), ['As in Foo Bar']), messages[0]) def test_extract_i18n_choose_as_directive_with_params_and_comment(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="two" params="fname, lname" i18n:comment="As in Foo Bar"> <p i18n:singular="">Foo ${fname} ${lname}</p> <p i18n:plural="">Foos ${fname} ${lname}</p> </i18n:choose> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, 'ngettext', ('Foo %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s'), ['As in Foo Bar']), messages[0]) def test_extract_i18n_choose_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:choose="num; num" title="Things"> <i18n:singular> There is <a href="$link" title="View thing">${num} thing</a>. </i18n:singular> <i18n:plural> There are <a href="$link" title="View things">${num} things</a>. </i18n:plural> </p> </html>""") translator = Translator() translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(4, len(messages)) self.assertEqual((3, None, 'Things', []), messages[0]) self.assertEqual((5, None, 'View thing', []), messages[1]) self.assertEqual((8, None, 'View things', []), messages[2]) self.assertEqual( (3, 'ngettext', ('There is [1:%(num)s thing].', 'There are [1:%(num)s things].'), []), messages[3]) def test_translate_i18n_choose_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:choose="num; num" title="Things"> <i18n:singular> There is <a href="$link" title="View thing">${num} thing</a>. </i18n:singular> <i18n:plural> There are <a href="$link" title="View things">${num} things</a>. </i18n:plural> </p> </html>""") translations = DummyTranslations({ 'Things': 'Sachen', 'View thing': 'Sache betrachten', 'View things': 'Sachen betrachten', ('There is [1:%(num)s thing].', 0): 'Da ist [1:%(num)s Sache].', ('There is [1:%(num)s thing].', 1): 'Da sind [1:%(num)s Sachen].' }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual(u"""<html> <p title="Sachen"> Da ist <a href="/things" title="Sache betrachten">1 Sache</a>. </p> </html>""", tmpl.generate(link="/things", num=1).render(encoding=None)) self.assertEqual(u"""<html> <p title="Sachen"> Da sind <a href="/things" title="Sachen betrachten">3 Sachen</a>. </p> </html>""", tmpl.generate(link="/things", num=3).render(encoding=None)) def test_extract_i18n_choose_as_element_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="num" params="num"> <p i18n:singular="" title="Things"> There is <a href="$link" title="View thing">${num} thing</a>. </p> <p i18n:plural="" title="Things"> There are <a href="$link" title="View things">${num} things</a>. </p> </i18n:choose> </html>""") translator = Translator() translator.setup(tmpl) messages = list(translator.extract(tmpl.stream)) self.assertEqual(5, len(messages)) self.assertEqual((4, None, 'Things', []), messages[0]) self.assertEqual((5, None, 'View thing', []), messages[1]) self.assertEqual((7, None, 'Things', []), messages[2]) self.assertEqual((8, None, 'View things', []), messages[3]) self.assertEqual( (3, 'ngettext', ('There is [1:%(num)s thing].', 'There are [1:%(num)s things].'), []), messages[4]) def test_translate_i18n_choose_as_element_with_attributes(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:choose numeral="num" params="num"> <p i18n:singular="" title="Things"> There is <a href="$link" title="View thing">${num} thing</a>. </p> <p i18n:plural="" title="Things"> There are <a href="$link" title="View things">${num} things</a>. </p> </i18n:choose> </html>""") translations = DummyTranslations({ 'Things': 'Sachen', 'View thing': 'Sache betrachten', 'View things': 'Sachen betrachten', ('There is [1:%(num)s thing].', 0): 'Da ist [1:%(num)s Sache].', ('There is [1:%(num)s thing].', 1): 'Da sind [1:%(num)s Sachen].' }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual(u"""<html> <p title="Sachen">Da ist <a href="/things" title="Sache betrachten">1 Sache</a>.</p> </html>""", tmpl.generate(link="/things", num=1).render(encoding=None)) self.assertEqual(u"""<html> <p title="Sachen">Da sind <a href="/things" title="Sachen betrachten">3 Sachen</a>.</p> </html>""", tmpl.generate(link="/things", num=3).render(encoding=None)) def test_translate_i18n_choose_and_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="two; fname, lname"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translations = DummyTranslations({ ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>Vohs John Doe</p> </div> </html>""", tmpl.generate(two=2, fname='John', lname='Doe').render()) def test_translate_i18n_choose_and_domain_and_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo"> <div i18n:choose="two; fname, lname"> <p i18n:singular="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translations = DummyTranslations() translations.add_domain('foo', { ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>Vohs John Doe</p> </div> </html>""", tmpl.generate(two=2, fname='John', lname='Doe').render()) def test_translate_i18n_choose_and_singular_with_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="two; fname, lname"> <p i18n:singular="" py:strip="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> <div i18n:choose="one; fname, lname"> <p i18n:singular="" py:strip="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translations = DummyTranslations({ ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>Vohs John Doe</p> </div> <div> Voh John Doe </div> </html>""", tmpl.generate( one=1, two=2, fname='John',lname='Doe').render()) def test_translate_i18n_choose_and_plural_with_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="two; fname, lname"> <p i18n:singular="" py:strip="">Foo $fname $lname</p> <p i18n:plural="">Foos $fname $lname</p> </div> </html>""") translations = DummyTranslations({ ('Foo %(fname)s %(lname)s', 0): 'Voh %(fname)s %(lname)s', ('Foo %(fname)s %(lname)s', 1): 'Vohs %(fname)s %(lname)s', 'Foo %(fname)s %(lname)s': 'Voh %(fname)s %(lname)s', 'Foos %(fname)s %(lname)s': 'Vohs %(fname)s %(lname)s', }) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> Voh John Doe </div> </html>""", tmpl.generate(two=1, fname='John', lname='Doe').render()) def test_extract_i18n_choose_as_attribute_and_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:choose="one" py:strip=""> <p i18n:singular="" py:strip="">FooBar</p> <p i18n:plural="" py:strip="">FooBars</p> </div> </html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(1, len(messages)) self.assertEqual((3, 'ngettext', ('FooBar', 'FooBars'), []), messages[0]) class DomainDirectiveTestCase(unittest.TestCase): def test_translate_i18n_domain_with_msg_directives(self): #"""translate with i18n:domain and nested i18n:msg directives """ tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <div i18n:domain="foo"> <p i18n:msg="">FooBar</p> <p i18n:msg="">Bar</p> </div> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <div> <p>BarFoo</p> <p>PT_Foo</p> </div> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_with_inline_directives(self): #"""translate with inlined i18n:domain and i18n:msg directives""" tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="" i18n:domain="foo">FooBar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>BarFoo</p> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_without_msg_directives(self): #"""translate domain call without i18n:msg directives still uses current domain""" tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="">Bar</p> <div i18n:domain="foo"> <p i18n:msg="">FooBar</p> <p i18n:msg="">Bar</p> <p>Bar</p> </div> <p>Bar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>Voh</p> <div> <p>BarFoo</p> <p>PT_Foo</p> <p>PT_Foo</p> </div> <p>Voh</p> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_as_directive_not_attribute(self): #"""translate with domain as directive""" tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <i18n:domain name="foo"> <p i18n:msg="">FooBar</p> <p i18n:msg="">Bar</p> <p>Bar</p> </i18n:domain> <p>Bar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'PT_Foo'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>BarFoo</p> <p>PT_Foo</p> <p>PT_Foo</p> <p>Voh</p> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_nested_directives(self): #"""translate with nested i18n:domain directives""" tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="">Bar</p> <div i18n:domain="foo"> <p i18n:msg="">FooBar</p> <p i18n:domain="bar" i18n:msg="">Bar</p> <p>Bar</p> </div> <p>Bar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'foo_Bar'}) translations.add_domain('bar', {'Bar': 'bar_Bar'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>Voh</p> <div> <p>BarFoo</p> <p>bar_Bar</p> <p>foo_Bar</p> </div> <p>Voh</p> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_with_empty_nested_domain_directive(self): #"""translate with empty nested i18n:domain directive does not use dngettext""" tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n"> <p i18n:msg="">Bar</p> <div i18n:domain="foo"> <p i18n:msg="">FooBar</p> <p i18n:domain="" i18n:msg="">Bar</p> <p>Bar</p> </div> <p>Bar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo', 'Bar': 'foo_Bar'}) translations.add_domain('bar', {'Bar': 'bar_Bar'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>Voh</p> <div> <p>BarFoo</p> <p>Voh</p> <p>foo_Bar</p> </div> <p>Voh</p> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_with_inline_directive_on_START_NS(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo"> <p i18n:msg="">FooBar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual("""<html> <p>BarFoo</p> </html>""", tmpl.generate().render()) def test_translate_i18n_domain_with_inline_directive_on_START_NS_with_py_strip(self): tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo" py:strip=""> <p i18n:msg="">FooBar</p> </html>""") translations = DummyTranslations({'Bar': 'Voh'}) translations.add_domain('foo', {'FooBar': 'BarFoo'}) translator = Translator(translations) translator.setup(tmpl) self.assertEqual(""" <p>BarFoo</p> """, tmpl.generate().render()) def test_translate_i18n_domain_with_nested_includes(self): import os, shutil, tempfile from genshi.template.loader import TemplateLoader dirname = tempfile.mkdtemp(suffix='genshi_test') try: for idx in range(7): file1 = open(os.path.join(dirname, 'tmpl%d.html' % idx), 'w') try: file1.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" py:strip=""> <div>Included tmpl$idx</div> <p i18n:msg="idx">Bar $idx</p> <p i18n:domain="bar">Bar</p> <p i18n:msg="idx" i18n:domain="">Bar $idx</p> <p i18n:domain="" i18n:msg="idx">Bar $idx</p> <py:if test="idx &lt; 6"> <xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/> </py:if> </html>""") finally: file1.close() file2 = open(os.path.join(dirname, 'tmpl10.html'), 'w') try: file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo"> <xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/> </html>""") finally: file2.close() def callback(template): translations = DummyTranslations({'Bar %(idx)s': 'Voh %(idx)s'}) translations.add_domain('foo', {'Bar %(idx)s': 'foo_Bar %(idx)s'}) translations.add_domain('bar', {'Bar': 'bar_Bar'}) translator = Translator(translations) translator.setup(template) loader = TemplateLoader([dirname], callback=callback) tmpl = loader.load('tmpl10.html') self.assertEqual("""<html> <div>Included tmpl0</div> <p>foo_Bar 0</p> <p>bar_Bar</p> <p>Voh 0</p> <p>Voh 0</p> <div>Included tmpl1</div> <p>foo_Bar 1</p> <p>bar_Bar</p> <p>Voh 1</p> <p>Voh 1</p> <div>Included tmpl2</div> <p>foo_Bar 2</p> <p>bar_Bar</p> <p>Voh 2</p> <p>Voh 2</p> <div>Included tmpl3</div> <p>foo_Bar 3</p> <p>bar_Bar</p> <p>Voh 3</p> <p>Voh 3</p> <div>Included tmpl4</div> <p>foo_Bar 4</p> <p>bar_Bar</p> <p>Voh 4</p> <p>Voh 4</p> <div>Included tmpl5</div> <p>foo_Bar 5</p> <p>bar_Bar</p> <p>Voh 5</p> <p>Voh 5</p> <div>Included tmpl6</div> <p>foo_Bar 6</p> <p>bar_Bar</p> <p>Voh 6</p> <p>Voh 6</p> </html>""", tmpl.generate(idx=-1).render()) finally: shutil.rmtree(dirname) def test_translate_i18n_domain_with_nested_includes_with_translatable_attrs(self): import os, shutil, tempfile from genshi.template.loader import TemplateLoader dirname = tempfile.mkdtemp(suffix='genshi_test') try: for idx in range(4): file1 = open(os.path.join(dirname, 'tmpl%d.html' % idx), 'w') try: file1.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" py:strip=""> <div>Included tmpl$idx</div> <p title="${dg('foo', 'Bar %(idx)s') % dict(idx=idx)}" i18n:msg="idx">Bar $idx</p> <p title="Bar" i18n:domain="bar">Bar</p> <p title="Bar" i18n:msg="idx" i18n:domain="">Bar $idx</p> <p i18n:msg="idx" i18n:domain="" title="Bar">Bar $idx</p> <p i18n:domain="" i18n:msg="idx" title="Bar">Bar $idx</p> <py:if test="idx &lt; 3"> <xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/> </py:if> </html>""") finally: file1.close() file2 = open(os.path.join(dirname, 'tmpl10.html'), 'w') try: file2.write("""<html xmlns:xi="http://www.w3.org/2001/XInclude" xmlns:py="http://genshi.edgewall.org/" xmlns:i18n="http://genshi.edgewall.org/i18n" i18n:domain="foo"> <xi:include href="tmpl${idx}.html" py:with="idx = idx+1"/> </html>""") finally: file2.close() translations = DummyTranslations({'Bar %(idx)s': 'Voh %(idx)s', 'Bar': 'Voh'}) translations.add_domain('foo', {'Bar %(idx)s': 'foo_Bar %(idx)s'}) translations.add_domain('bar', {'Bar': 'bar_Bar'}) translator = Translator(translations) def callback(template): translator.setup(template) loader = TemplateLoader([dirname], callback=callback) tmpl = loader.load('tmpl10.html') if IS_PYTHON2: dgettext = translations.dugettext else: dgettext = translations.dgettext self.assertEqual("""<html> <div>Included tmpl0</div> <p title="foo_Bar 0">foo_Bar 0</p> <p title="bar_Bar">bar_Bar</p> <p title="Voh">Voh 0</p> <p title="Voh">Voh 0</p> <p title="Voh">Voh 0</p> <div>Included tmpl1</div> <p title="foo_Bar 1">foo_Bar 1</p> <p title="bar_Bar">bar_Bar</p> <p title="Voh">Voh 1</p> <p title="Voh">Voh 1</p> <p title="Voh">Voh 1</p> <div>Included tmpl2</div> <p title="foo_Bar 2">foo_Bar 2</p> <p title="bar_Bar">bar_Bar</p> <p title="Voh">Voh 2</p> <p title="Voh">Voh 2</p> <p title="Voh">Voh 2</p> <div>Included tmpl3</div> <p title="foo_Bar 3">foo_Bar 3</p> <p title="bar_Bar">bar_Bar</p> <p title="Voh">Voh 3</p> <p title="Voh">Voh 3</p> <p title="Voh">Voh 3</p> </html>""", tmpl.generate(idx=-1, dg=dgettext).render()) finally: shutil.rmtree(dirname) class ExtractTestCase(unittest.TestCase): def test_markup_template_extraction(self): buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/"> <head> <title>Example</title> </head> <body> <h1>Example</h1> <p>${_("Hello, %(name)s") % dict(name=username)}</p> <p>${ngettext("You have %d item", "You have %d items", num)}</p> </body> </html>""") results = list(extract(buf, ['_', 'ngettext'], [], {})) self.assertEqual([ (3, None, 'Example', []), (6, None, 'Example', []), (7, '_', 'Hello, %(name)s', []), (8, 'ngettext', ('You have %d item', 'You have %d items', None), []), ], results) def test_extraction_without_text(self): buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/"> <p title="Bar">Foo</p> ${ngettext("Singular", "Plural", num)} </html>""") results = list(extract(buf, ['_', 'ngettext'], [], { 'extract_text': 'no' })) self.assertEqual([ (3, 'ngettext', ('Singular', 'Plural', None), []), ], results) def test_text_template_extraction(self): buf = StringIO("""${_("Dear %(name)s") % {'name': name}}, ${ngettext("Your item:", "Your items", len(items))} #for item in items * $item #end All the best, Foobar""") results = list(extract(buf, ['_', 'ngettext'], [], { 'template_class': 'genshi.template:TextTemplate' })) self.assertEqual([ (1, '_', 'Dear %(name)s', []), (3, 'ngettext', ('Your item:', 'Your items', None), []), (7, None, 'All the best,\n Foobar', []) ], results) def test_extraction_with_keyword_arg(self): buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/"> ${gettext('Foobar', foo='bar')} </html>""") results = list(extract(buf, ['gettext'], [], {})) self.assertEqual([ (2, 'gettext', ('Foobar'), []), ], results) def test_extraction_with_nonstring_arg(self): buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/"> ${dgettext(curdomain, 'Foobar')} </html>""") results = list(extract(buf, ['dgettext'], [], {})) self.assertEqual([ (2, 'dgettext', (None, 'Foobar'), []), ], results) def test_extraction_inside_ignored_tags(self): buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/"> <script type="text/javascript"> $('#llist').tabs({ remote: true, spinner: "${_('Please wait...')}" }); </script> </html>""") results = list(extract(buf, ['_'], [], {})) self.assertEqual([ (5, '_', 'Please wait...', []), ], results) def test_extraction_inside_ignored_tags_with_directives(self): buf = StringIO("""<html xmlns:py="http://genshi.edgewall.org/"> <script type="text/javascript"> <py:if test="foobar"> alert("This shouldn't be extracted"); </py:if> </script> </html>""") self.assertEqual([], list(extract(buf, ['_'], [], {}))) def test_extract_py_def_directive_with_py_strip(self): # Failed extraction from Trac tmpl = MarkupTemplate("""<html xmlns:py="http://genshi.edgewall.org/" py:strip=""> <py:def function="diff_options_fields(diff)"> <label for="style">View differences</label> <select id="style" name="style"> <option selected="${diff.style == 'inline' or None}" value="inline">inline</option> <option selected="${diff.style == 'sidebyside' or None}" value="sidebyside">side by side</option> </select> <div class="field"> Show <input type="text" name="contextlines" id="contextlines" size="2" maxlength="3" value="${diff.options.contextlines &lt; 0 and 'all' or diff.options.contextlines}" /> <label for="contextlines">lines around each change</label> </div> <fieldset id="ignore" py:with="options = diff.options"> <legend>Ignore:</legend> <div class="field"> <input type="checkbox" id="ignoreblanklines" name="ignoreblanklines" checked="${options.ignoreblanklines or None}" /> <label for="ignoreblanklines">Blank lines</label> </div> <div class="field"> <input type="checkbox" id="ignorecase" name="ignorecase" checked="${options.ignorecase or None}" /> <label for="ignorecase">Case changes</label> </div> <div class="field"> <input type="checkbox" id="ignorewhitespace" name="ignorewhitespace" checked="${options.ignorewhitespace or None}" /> <label for="ignorewhitespace">White space changes</label> </div> </fieldset> <div class="buttons"> <input type="submit" name="update" value="${_('Update')}" /> </div> </py:def></html>""") translator = Translator() tmpl.add_directives(Translator.NAMESPACE, translator) messages = list(translator.extract(tmpl.stream)) self.assertEqual(10, len(messages)) self.assertEqual([ (3, None, 'View differences', []), (6, None, 'inline', []), (8, None, 'side by side', []), (10, None, 'Show', []), (13, None, 'lines around each change', []), (16, None, 'Ignore:', []), (20, None, 'Blank lines', []), (25, None, 'Case changes',[]), (30, None, 'White space changes', []), (34, '_', 'Update', [])], messages) def suite(): suite = unittest.TestSuite() suite.addTest(doctest.DocTestSuite(Translator.__module__)) suite.addTest(unittest.makeSuite(TranslatorTestCase, 'test')) suite.addTest(unittest.makeSuite(MsgDirectiveTestCase, 'test')) suite.addTest(unittest.makeSuite(ChooseDirectiveTestCase, 'test')) suite.addTest(unittest.makeSuite(DomainDirectiveTestCase, 'test')) suite.addTest(unittest.makeSuite(ExtractTestCase, 'test')) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
mitchellrj/genshi
genshi/filters/tests/i18n.py
Python
bsd-3-clause
89,114
# Import the SlideSet base class import math from ..slidesets import RemarkSlideSet ## # A special set of slides for creating cover page and contents class MergeCoverSet(RemarkSlideSet): ## # Extract the valid parameters for this object @staticmethod def validParams(): params = RemarkSlideSet.validParams() params.addRequiredParam('slide_sets', 'A vector of slideset names to combine into a single contents') return params def __init__(self, name, params, **kwargs): RemarkSlideSet.__init__(self, name, params) # The SlideSetWarehoue self.__warehouse = self.getParam('_warehouse') # Build a list of sets to merge self.__merge_list = self.getParam('slide_sets').split() ## # Search through all the slides in the specified slide sets for table of contents content def _extractContents(self): # print len(self.__merge_list) # print self.__merge_list # Count the number of contents entries contents = [] for obj in self.__warehouse.objects: if (obj is not self) and (len(self.__merge_list) == 0 or obj.name() in self.__merge_list): # print 'TUTORIAL_SUMMARY_COVER:', obj.name() pages = obj._extractContents() for page in pages: contents += page n = int(self.getParam('contents_items_per_slide')) output = [contents[i:i+n] for i in range(0, len(contents),n)] return output
danielru/moose
python/PresentationBuilder/slidesets/MergeCoverSet.py
Python
lgpl-2.1
1,395
#!/usr/bin/python # This script generates a list of testsuites that should be run as part of # the Samba 4 test suite. # The output of this script is parsed by selftest.pl, which then decides # which of the tests to actually run. It will, for example, skip all tests # listed in selftest/skip or only run a subset during "make quicktest". # The idea is that this script outputs all of the tests of Samba 4, not # just those that are known to pass, and list those that should be skipped # or are known to fail in selftest/skip or selftest/knownfail. This makes it # very easy to see what functionality is still missing in Samba 4 and makes # it possible to run the testsuite against other servers, such as Samba 3 or # Windows that have a different set of features. # The syntax for a testsuite is "-- TEST --" on a single line, followed # by the name of the test, the environment it needs and the command to run, all # three separated by newlines. All other lines in the output are considered # comments. import os import subprocess import sys def srcdir(): return os.path.normpath(os.getenv("SRCDIR", os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."))) def source4dir(): return os.path.normpath(os.path.join(srcdir(), "source4")) def source3dir(): return os.path.normpath(os.path.join(srcdir(), "source3")) def bindir(): return os.path.normpath(os.getenv("BINDIR", "./bin")) def binpath(name): return os.path.join(bindir(), name) # Split perl variable to allow $PERL to be set to e.g. "perl -W" perl = os.getenv("PERL", "perl").split() if subprocess.call(perl + ["-e", "eval require Test::More;"]) == 0: has_perl_test_more = True else: has_perl_test_more = False python = os.getenv("PYTHON", "python") tap2subunit = python + " " + os.path.join(srcdir(), "selftest", "tap2subunit") def valgrindify(cmdline): """Run a command under valgrind, if $VALGRIND was set.""" valgrind = os.getenv("VALGRIND") if valgrind is None: return cmdline return valgrind + " " + cmdline def plantestsuite(name, env, cmdline): """Plan a test suite. :param name: Testsuite name :param env: Environment to run the testsuite in :param cmdline: Command line to run """ print "-- TEST --" print name print env if isinstance(cmdline, list): cmdline = " ".join(cmdline) if "$LISTOPT" in cmdline: raise AssertionError("test %s supports --list, but not --load-list" % name) print cmdline + " 2>&1 " + " | " + add_prefix(name, env) def add_prefix(prefix, env, support_list=False): if support_list: listopt = "$LISTOPT " else: listopt = "" return "%s/selftest/filter-subunit %s--fail-on-empty --prefix=\"%s.\" --suffix=\"(%s)\"" % (srcdir(), listopt, prefix, env) def plantestsuite_loadlist(name, env, cmdline): print "-- TEST-LOADLIST --" if env == "none": fullname = name else: fullname = "%s(%s)" % (name, env) print fullname print env if isinstance(cmdline, list): cmdline = " ".join(cmdline) support_list = ("$LISTOPT" in cmdline) if not "$LISTOPT" in cmdline: raise AssertionError("loadlist test %s does not support not --list" % name) if not "$LOADLIST" in cmdline: raise AssertionError("loadlist test %s does not support --load-list" % name) print ("%s | %s" % (cmdline.replace("$LOADLIST", ""), add_prefix(name, env, support_list))).replace("$LISTOPT", "--list") print cmdline.replace("$LISTOPT", "") + " 2>&1 " + " | " + add_prefix(name, env, False) def skiptestsuite(name, reason): """Indicate that a testsuite was skipped. :param name: Test suite name :param reason: Reason the test suite was skipped """ # FIXME: Report this using subunit, but re-adjust the testsuite count somehow print >>sys.stderr, "skipping %s (%s)" % (name, reason) def planperltestsuite(name, path): """Run a perl test suite. :param name: Name of the test suite :param path: Path to the test runner """ if has_perl_test_more: plantestsuite(name, "none", "%s %s | %s" % (" ".join(perl), path, tap2subunit)) else: skiptestsuite(name, "Test::More not available") def planpythontestsuite(env, module, name=None, extra_path=[]): if name is None: name = module pypath = list(extra_path) args = [python, "-m", "samba.subunit.run", "$LISTOPT", "$LOADLIST", module] if pypath: args.insert(0, "PYTHONPATH=%s" % ":".join(["$PYTHONPATH"] + pypath)) plantestsuite_loadlist(name, env, args) def get_env_torture_options(): ret = [] if not os.getenv("SELFTEST_VERBOSE"): ret.append("--option=torture:progress=no") if os.getenv("SELFTEST_QUICK"): ret.append("--option=torture:quick=yes") return ret samba4srcdir = source4dir() samba3srcdir = source3dir() bbdir = os.path.join(srcdir(), "testprogs/blackbox") configuration = "--configfile=$SMB_CONF_PATH" smbtorture4 = binpath("smbtorture") smbtorture4_testsuite_list = subprocess.Popen([smbtorture4, "--list-suites"], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate("")[0].splitlines() smbtorture4_options = [ configuration, "--option=\'fss:sequence timeout=1\'", "--maximum-runtime=$SELFTEST_MAXTIME", "--basedir=$SELFTEST_TMPDIR", "--format=subunit" ] + get_env_torture_options() def plansmbtorture4testsuite(name, env, options, target, modname=None): if modname is None: modname = "samba4.%s" % name if isinstance(options, list): options = " ".join(options) options = " ".join(smbtorture4_options + ["--target=%s" % target]) + " " + options cmdline = "%s $LISTOPT $LOADLIST %s %s" % (valgrindify(smbtorture4), options, name) plantestsuite_loadlist(modname, env, cmdline) def smbtorture4_testsuites(prefix): return filter(lambda x: x.startswith(prefix), smbtorture4_testsuite_list) smbclient3 = binpath('smbclient') smbtorture3 = binpath('smbtorture3') ntlm_auth3 = binpath('ntlm_auth') net = binpath('net') scriptdir = os.path.join(srcdir(), "script/tests") wbinfo = binpath('wbinfo') dbwrap_tool = binpath('dbwrap_tool') vfstest = binpath('vfstest')
Zentyal/samba
selftest/selftesthelpers.py
Python
gpl-3.0
6,229
# Copyright (c) 2006-2010 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:[email protected] # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """try to find more bugs in the code using astng inference capabilities """ import re import shlex from logilab import astng from logilab.astng import InferenceError, NotFoundError, YES, Instance from pylint.interfaces import IASTNGChecker from pylint.checkers import BaseChecker from pylint.checkers.utils import safe_infer, is_super, check_messages MSGS = { 'E1101': ('%s %r has no %r member', 'Used when a variable is accessed for an unexistent member.'), 'E1102': ('%s is not callable', 'Used when an object being called has been inferred to a non \ callable object'), 'E1103': ('%s %r has no %r member (but some types could not be inferred)', 'Used when a variable is accessed for an unexistent member, but \ astng was not able to interpret all possible types of this \ variable.'), 'E1111': ('Assigning to function call which doesn\'t return', 'Used when an assignment is done on a function call but the \ inferred function doesn\'t return anything.'), 'W1111': ('Assigning to function call which only returns None', 'Used when an assignment is done on a function call but the \ inferred function returns nothing but None.'), 'E1120': ('No value passed for parameter %s in function call', 'Used when a function call passes too few arguments.'), 'E1121': ('Too many positional arguments for function call', 'Used when a function call passes too many positional \ arguments.'), 'E1122': ('Duplicate keyword argument %r in function call', 'Used when a function call passes the same keyword argument \ multiple times.'), 'E1123': ('Passing unexpected keyword argument %r in function call', 'Used when a function call passes a keyword argument that \ doesn\'t correspond to one of the function\'s parameter names.'), 'E1124': ('Multiple values passed for parameter %r in function call', 'Used when a function call would result in assigning multiple \ values to a function parameter, one value from a positional \ argument and one from a keyword argument.'), } class TypeChecker(BaseChecker): """try to find bugs in the code using type inference """ __implements__ = (IASTNGChecker,) # configuration section name name = 'typecheck' # messages msgs = MSGS priority = -1 # configuration options options = (('ignore-mixin-members', {'default' : True, 'type' : 'yn', 'metavar': '<y_or_n>', 'help' : 'Tells whether missing members accessed in mixin \ class should be ignored. A mixin class is detected if its name ends with \ "mixin" (case insensitive).'} ), ('ignored-classes', {'default' : ('SQLObject',), 'type' : 'csv', 'metavar' : '<members names>', 'help' : 'List of classes names for which member attributes \ should not be checked (useful for classes with attributes dynamically set).'} ), ('zope', {'default' : False, 'type' : 'yn', 'metavar': '<y_or_n>', 'help' : 'When zope mode is activated, add a predefined set \ of Zope acquired attributes to generated-members.'} ), ('generated-members', {'default' : ( 'REQUEST', 'acl_users', 'aq_parent'), 'type' : 'string', 'metavar' : '<members names>', 'help' : 'List of members which are set dynamically and \ missed by pylint inference system, and so shouldn\'t trigger E0201 when \ accessed. Python regular expressions are accepted.'} ), ) def open(self): # do this in open since config not fully initialized in __init__ self.generated_members = list(self.config.generated_members) if self.config.zope: self.generated_members.extend(('REQUEST', 'acl_users', 'aq_parent')) def visit_assattr(self, node): if isinstance(node.ass_type(), astng.AugAssign): self.visit_getattr(node) def visit_delattr(self, node): self.visit_getattr(node) @check_messages('E1101', 'E1103') def visit_getattr(self, node): """check that the accessed attribute exists to avoid to much false positives for now, we'll consider the code as correct if a single of the inferred nodes has the accessed attribute. function/method, super call and metaclasses are ignored """ # generated_members may containt regular expressions # (surrounded by quote `"` and followed by a comma `,`) # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' => # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}') if isinstance(self.config.generated_members, str): gen = shlex.shlex(self.config.generated_members) gen.whitespace += ',' self.config.generated_members = tuple(tok.strip('"') for tok in gen) for pattern in self.config.generated_members: # attribute is marked as generated, stop here if re.match(pattern, node.attrname): return try: infered = list(node.expr.infer()) except InferenceError: return # list of (node, nodename) which are missing the attribute missingattr = set() ignoremim = self.config.ignore_mixin_members inference_failure = False for owner in infered: # skip yes object if owner is YES: inference_failure = True continue # skip None anyway if isinstance(owner, astng.Const) and owner.value is None: continue # XXX "super" / metaclass call if is_super(owner) or getattr(owner, 'type', None) == 'metaclass': continue name = getattr(owner, 'name', 'None') if name in self.config.ignored_classes: continue if ignoremim and name[-5:].lower() == 'mixin': continue try: if not [n for n in owner.getattr(node.attrname) if not isinstance(n.statement(), astng.AugAssign)]: missingattr.add((owner, name)) continue except AttributeError: # XXX method / function continue except NotFoundError: if isinstance(owner, astng.Function) and owner.decorators: continue if isinstance(owner, Instance) and owner.has_dynamic_getattr(): continue # explicit skipping of optparse'Values class if owner.name == 'Values' and owner.root().name == 'optparse': continue missingattr.add((owner, name)) continue # stop on the first found break else: # we have not found any node with the attributes, display the # message for infered nodes done = set() for owner, name in missingattr: if isinstance(owner, Instance): actual = owner._proxied else: actual = owner if actual in done: continue done.add(actual) if inference_failure: msgid = 'E1103' else: msgid = 'E1101' self.add_message(msgid, node=node, args=(owner.display_type(), name, node.attrname)) def visit_assign(self, node): """check that if assigning to a function call, the function is possibly returning something valuable """ if not isinstance(node.value, astng.CallFunc): return function_node = safe_infer(node.value.func) # skip class, generator and incomplete function definition if not (isinstance(function_node, astng.Function) and function_node.root().fully_defined()): return if function_node.is_generator() \ or function_node.is_abstract(pass_is_abstract=False): return returns = list(function_node.nodes_of_class(astng.Return, skip_klass=astng.Function)) if len(returns) == 0: self.add_message('E1111', node=node) else: for rnode in returns: if not (isinstance(rnode.value, astng.Const) and rnode.value.value is None): break else: self.add_message('W1111', node=node) def visit_callfunc(self, node): """check that called functions/methods are inferred to callable objects, and that the arguments passed to the function match the parameters in the inferred function's definition """ # Build the set of keyword arguments, checking for duplicate keywords, # and count the positional arguments. keyword_args = set() num_positional_args = 0 for arg in node.args: if isinstance(arg, astng.Keyword): keyword = arg.arg if keyword in keyword_args: self.add_message('E1122', node=node, args=keyword) keyword_args.add(keyword) else: num_positional_args += 1 called = safe_infer(node.func) # only function, generator and object defining __call__ are allowed if called is not None and not called.callable(): self.add_message('E1102', node=node, args=node.func.as_string()) # Note that BoundMethod is a subclass of UnboundMethod (huh?), so must # come first in this 'if..else'. if isinstance(called, astng.BoundMethod): # Bound methods have an extra implicit 'self' argument. num_positional_args += 1 elif isinstance(called, astng.UnboundMethod): if called.decorators is not None: for d in called.decorators.nodes: if isinstance(d, astng.Name) and (d.name == 'classmethod'): # Class methods have an extra implicit 'cls' argument. num_positional_args += 1 break elif (isinstance(called, astng.Function) or isinstance(called, astng.Lambda)): pass else: return if called.args.args is None: # Built-in functions have no argument information. return if len( called.argnames() ) != len( set( called.argnames() ) ): # Duplicate parameter name (see E9801). We can't really make sense # of the function call in this case, so just return. return # Analyze the list of formal parameters. num_mandatory_parameters = len(called.args.args) - len(called.args.defaults) parameters = [] parameter_name_to_index = {} for i, arg in enumerate(called.args.args): if isinstance(arg, astng.Tuple): name = None # Don't store any parameter names within the tuple, since those # are not assignable from keyword arguments. else: if isinstance(arg, astng.Keyword): name = arg.arg else: assert isinstance(arg, astng.AssName) # This occurs with: # def f( (a), (b) ): pass name = arg.name parameter_name_to_index[name] = i if i >= num_mandatory_parameters: defval = called.args.defaults[i - num_mandatory_parameters] else: defval = None parameters.append([(name, defval), False]) # Match the supplied arguments against the function parameters. # 1. Match the positional arguments. for i in range(num_positional_args): if i < len(parameters): parameters[i][1] = True elif called.args.vararg is not None: # The remaining positional arguments get assigned to the *args # parameter. break else: # Too many positional arguments. self.add_message('E1121', node=node) break # 2. Match the keyword arguments. for keyword in keyword_args: if keyword in parameter_name_to_index: i = parameter_name_to_index[keyword] if parameters[i][1]: # Duplicate definition of function parameter. self.add_message('E1124', node=node, args=keyword) else: parameters[i][1] = True elif called.args.kwarg is not None: # The keyword argument gets assigned to the **kwargs parameter. pass else: # Unexpected keyword argument. self.add_message('E1123', node=node, args=keyword) # 3. Match the *args, if any. Note that Python actually processes # *args _before_ any keyword arguments, but we wait until after # looking at the keyword arguments so as to make a more conservative # guess at how many values are in the *args sequence. if node.starargs is not None: for i in range(num_positional_args, len(parameters)): [(name, defval), assigned] = parameters[i] # Assume that *args provides just enough values for all # non-default parameters after the last parameter assigned by # the positional arguments but before the first parameter # assigned by the keyword arguments. This is the best we can # get without generating any false positives. if (defval is not None) or assigned: break parameters[i][1] = True # 4. Match the **kwargs, if any. if node.kwargs is not None: for i, [(name, defval), assigned] in enumerate(parameters): # Assume that *kwargs provides values for all remaining # unassigned named parameters. if name is not None: parameters[i][1] = True else: # **kwargs can't assign to tuples. pass # Check that any parameters without a default have been assigned # values. for [(name, defval), assigned] in parameters: if (defval is None) and not assigned: if name is None: display = '<tuple>' else: display_name = repr(name) self.add_message('E1120', node=node, args=display_name) def register(linter): """required method to auto register this checker """ linter.register_checker(TypeChecker(linter))
michalliu/chromium-depot_tools
third_party/pylint/checkers/typecheck.py
Python
bsd-3-clause
16,288
# -*- coding: utf-8 -*- """ Python API for language and translation management. """ from collections import namedtuple from django.conf import settings from django.utils.translation import ugettext as _ from dark_lang.models import DarkLangConfig # Named tuples can be referenced using object-like variable # deferencing, making the use of tuples more readable by # eliminating the need to see the context of the tuple packing. Language = namedtuple('Language', 'code name') def released_languages(): """Retrieve the list of released languages. Constructs a list of Language tuples by intersecting the list of valid language tuples with the list of released language codes. Returns: list of Language: Languages in which full translations are available. Example: >>> print released_languages() [Language(code='en', name=u'English'), Language(code='fr', name=u'Français')] """ released_language_codes = DarkLangConfig.current().released_languages_list default_language_code = settings.LANGUAGE_CODE if default_language_code not in released_language_codes: released_language_codes.append(default_language_code) released_language_codes.sort() # Intersect the list of valid language tuples with the list # of release language codes released_languages = [ Language(tuple[0], tuple[1]) for tuple in settings.LANGUAGES if tuple[0] in released_language_codes ] return released_languages def all_languages(): """Retrieve the list of all languages, translated and sorted. Returns: list of (language code (str), language name (str)): the language names are translated in the current activated language and the results sorted alphabetically. """ languages = [(lang[0], _(lang[1])) for lang in settings.ALL_LANGUAGES] # pylint: disable=translation-of-non-string return sorted(languages, key=lambda lang: lang[1])
louyihua/edx-platform
common/djangoapps/lang_pref/api.py
Python
agpl-3.0
1,986
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at https://mozilla.org/MPL/2.0/. import os from wptrunner.update.base import Step, StepRunner from wptrunner.update.update import LoadConfig, SyncFromUpstream, UpdateMetadata from wptrunner.update.tree import NoVCSTree from .tree import GitTree, HgTree, GeckoCommit from .upstream import SyncToUpstream class LoadTrees(Step): """Load gecko tree and sync tree containing web-platform-tests""" provides = ["local_tree", "sync_tree"] def create(self, state): if os.path.exists(state.sync["path"]): sync_tree = GitTree(root=state.sync["path"]) else: sync_tree = None if GitTree.is_type(): local_tree = GitTree(commit_cls=GeckoCommit) elif HgTree.is_type(): local_tree = HgTree(commit_cls=GeckoCommit) else: local_tree = NoVCSTree() state.update({"local_tree": local_tree, "sync_tree": sync_tree}) class UpdateRunner(StepRunner): """Overall runner for updating web-platform-tests in Gecko.""" steps = [LoadConfig, LoadTrees, SyncToUpstream, SyncFromUpstream, UpdateMetadata]
UK992/servo
tests/wpt/update/update.py
Python
mpl-2.0
1,349