Dataset Viewer
Auto-converted to Parquet
repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
Eric89GXL/scipy
scipy/_lib/_ccallback.py
Python
bsd-3-clause
6,196
0.001453
from . import _ccallback_c import ctypes PyCFuncPtr = ctypes.CFUNCTYPE(ctypes.c_void_p).__bases__[0] ffi = None class CData(object): pass def _import_cffi(): global ffi, CData if ffi is not None: return try: import cffi ffi = cffi.FFI() CData = ffi.CData except ImportError: ffi = False class LowLevelCallable(tuple): """ Low-level callback function. Parameters ----------
function : {PyCapsule, ctypes function pointer, cffi function pointer}
Low-level callback function. user_data : {PyCapsule, ctypes void pointer, cffi void pointer} User data to pass on to the callback function. signature : str, optional Signature of the function. If omitted, determined from *function*, if possible. Attributes ---------- function Callback function given user_data User data given signature Signature of the function. Methods ------- from_cython Class method for constructing callables from Cython C-exported functions. Notes ----- The argument ``function`` can be one of: - PyCapsule, whose name contains the C function signature - ctypes function pointer - cffi function pointer The signature of the low-level callback must match one of those expected by the routine it is passed to. If constructing low-level functions from a PyCapsule, the name of the capsule must be the corresponding signature, in the format:: return_type (arg1_type, arg2_type, ...) For example:: "void (double)" "double (double, int *, void *)" The context of a PyCapsule passed in as ``function`` is used as ``user_data``, if an explicit value for ``user_data`` was not given. """ # Make the class immutable __slots__ = () def __new__(cls, function, user_data=None, signature=None): # We need to hold a reference to the function & user data, # to prevent them going out of scope item = cls._parse_callback(function, user_data, signature) return tuple.__new__(cls, (item, function, user_data)) def __repr__(self): return "LowLevelCallable({!r}, {!r})".format(self.function, self.user_data) @property def function(self): return tuple.__getitem__(self, 1) @property def user_data(self): return tuple.__getitem__(self, 2) @property def signature(self): return _ccallback_c.get_capsule_signature(tuple.__getitem__(self, 0)) def __getitem__(self, idx): raise ValueError() @classmethod def from_cython(cls, module, name, user_data=None, signature=None): """ Create a low-level callback function from an exported Cython function. Parameters ---------- module : module Cython module where the exported function resides name : str Name of the exported function user_data : {PyCapsule, ctypes void pointer, cffi void pointer}, optional User data to pass on to the callback function. signature : str, optional Signature of the function. If omitted, determined from *function*. """ try: function = module.__pyx_capi__[name] except AttributeError: raise ValueError("Given module is not a Cython module with __pyx_capi__ attribute") except KeyError: raise ValueError("No function {!r} found in __pyx_capi__ of the module".format(name)) return cls(function, user_data, signature) @classmethod def _parse_callback(cls, obj, user_data=None, signature=None): _import_cffi() if isinstance(obj, LowLevelCallable): func = tuple.__getitem__(obj, 0) elif isinstance(obj, PyCFuncPtr): func, signature = _get_ctypes_func(obj, signature) elif isinstance(obj, CData): func, signature = _get_cffi_func(obj, signature) elif _ccallback_c.check_capsule(obj): func = obj else: raise ValueError("Given input is not a callable or a low-level callable (pycapsule/ctypes/cffi)") if isinstance(user_data, ctypes.c_void_p): context = _get_ctypes_data(user_data) elif isinstance(user_data, CData): context = _get_cffi_data(user_data) elif user_data is None: context = 0 elif _ccallback_c.check_capsule(user_data): context = user_data else: raise ValueError("Given user data is not a valid low-level void* pointer (pycapsule/ctypes/cffi)") return _ccallback_c.get_raw_capsule(func, signature, context) # # ctypes helpers # def _get_ctypes_func(func, signature=None): # Get function pointer func_ptr = ctypes.cast(func, ctypes.c_void_p).value # Construct function signature if signature is None: signature = _typename_from_ctypes(func.restype) + " (" for j, arg in enumerate(func.argtypes): if j == 0: signature += _typename_from_ctypes(arg) else: signature += ", " + _typename_from_ctypes(arg) signature += ")" return func_ptr, signature def _typename_from_ctypes(item): if item is None: return "void" elif item is ctypes.c_void_p: return "void *" name = item.__name__ pointer_level = 0 while name.startswith("LP_"): pointer_level += 1 name = name[3:] if name.startswith('c_'): name = name[2:] if pointer_level > 0: name += " " + "*"*pointer_level return name def _get_ctypes_data(data): # Get voidp pointer return ctypes.cast(data, ctypes.c_void_p).value # # CFFI helpers # def _get_cffi_func(func, signature=None): # Get function pointer func_ptr = ffi.cast('uintptr_t', func) # Get signature if signature is None: signature = ffi.getctype(ffi.typeof(func)).replace('(*)', ' ') return func_ptr, signature def _get_cffi_data(data): # Get pointer return ffi.cast('uintptr_t', data)
QuantiModo/QuantiModo-SDK-Python
SwaggerPetstore/models/json_error_response.py
Python
gpl-2.0
1,773
0.00564
#!/usr/bin/env python # coding: utf-8 """ Copyright 2015 SmartBear Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ class JsonErrorResponse(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the
class manually. """ def __init__(self): """ Swagger model :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'status': 'str', 'message': 'str' } self.attribute_map = { 'status': 'status', 'message': 'message' } # Status: \&quot;ok\&quot; or \&quot;error\&quot; self.status = None # str # Error message self.message = None # str def __repr__(self): properties = [] for p in self.__dict__: if p != 'swaggerTypes' and p != 'attributeMap': properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p])) return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
BlueHouseLab/sms-openapi
python-requests/conf.py
Python
apache-2.0
324
0.003521
# -*- coding: utf-8 -*- appid = 'example' apikey = 'c5dd7e7dkjp27377l903c42c032b413b' sender = '01000000000'
# FIXME - MUST BE CHANGED AS REAL PHONE NUMBER receivers = ['01000000000', ] # FIXME - MUST BE CHANGED AS REAL PHONE NU
MBERS content = u'나는 유리를 먹을 수 있어요. 그래도 아프지 않아요'
ULHPC/easybuild-easyblocks
easybuild/easyblocks/generic/cmakemake.py
Python
gpl-2.0
4,702
0.002552
## # Copyright 2009-2017 Ghent University # # This file is part of EasyBuild, # originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), # with support of Ghent University (http://ugent.be/hpc), # the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), # Flemish Research Foundation (FWO) (http://www.fwo.be/en) # and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). # # http://github.com/hpcugent/easybuild # # EasyBuild is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation v2. # # EasyBuild is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EasyBuild. If not, see <http://www.gnu.org/licenses/>. ## """ EasyBuild support for software that is configured with CMake, implemented as an easyblock @author: Stijn De Weirdt (Ghent University) @author: Dries Verdegem (Ghent University) @author: Kenneth Hoste (Ghent University) @author: Pieter De Baets (Ghent University) @author: Jens Timmerman (Ghent University) @author: Ward Poelmans (Ghent University) """ import os from easybuild.easyblocks.generic.configuremake import ConfigureMake from easybuild.framework.easyconfig import CUSTOM from easybuild.tools.build_log import EasyBuildError from easybuild.tools.config import build_option from easybuild.tools.environment import setvar from easybuild.tools.run import run_cmd class CMakeMake(ConfigureMake): """Support for configuring build with CMake instead of traditional configure script""" @staticmethod def extra_options(extra_vars=None): """Define extra easyconfig parameters specific to CMakeMake.""" extra_vars = ConfigureMake.extra_options(extra_vars) extra_vars.update({ 'srcdir': [None, "Source directory location to provide to cmake command", CUSTOM], 'separate_build_dir': [False, "Perform build in a separate directory", CUSTOM], }) return extra_vars def configure_step(self, srcdir=None, builddir=None): """Configure build using cmake"""
if builddir is not None: self.log.nosupport("CMakeMake.configure_step: named argument
'builddir' (should be 'srcdir')", "2.0") # Set the search paths for CMake include_paths = os.pathsep.join(self.toolchain.get_variable("CPPFLAGS", list)) library_paths = os.pathsep.join(self.toolchain.get_variable("LDFLAGS", list)) setvar("CMAKE_INCLUDE_PATH", include_paths) setvar("CMAKE_LIBRARY_PATH", library_paths) default_srcdir = '.' if self.cfg.get('separate_build_dir', False): objdir = os.path.join(self.builddir, 'easybuild_obj') try: os.mkdir(objdir) os.chdir(objdir) except OSError, err: raise EasyBuildError("Failed to create separate build dir %s in %s: %s", objdir, os.getcwd(), err) default_srcdir = self.cfg['start_dir'] if srcdir is None: if self.cfg.get('srcdir', None) is not None: srcdir = self.cfg['srcdir'] else: srcdir = default_srcdir options = ['-DCMAKE_INSTALL_PREFIX=%s' % self.installdir] env_to_options = { 'CC': 'CMAKE_C_COMPILER', 'CFLAGS': 'CMAKE_C_FLAGS', 'CXX': 'CMAKE_CXX_COMPILER', 'CXXFLAGS': 'CMAKE_CXX_FLAGS', 'F90': 'CMAKE_Fortran_COMPILER', 'FFLAGS': 'CMAKE_Fortran_FLAGS', } for env_name, option in env_to_options.items(): value = os.getenv(env_name) if value is not None: options.append("-D%s='%s'" % (option, value)) if build_option('rpath'): # instruct CMake not to fiddle with RPATH when --rpath is used, since it will undo stuff on install... # https://github.com/LLNL/spack/blob/0f6a5cd38538e8969d11bd2167f11060b1f53b43/lib/spack/spack/build_environment.py#L416 options.append('-DCMAKE_SKIP_RPATH=ON') # show what CMake is doing by default options.append('-DCMAKE_VERBOSE_MAKEFILE=ON') options_string = ' '.join(options) command = "%s cmake %s %s %s" % (self.cfg['preconfigopts'], srcdir, options_string, self.cfg['configopts']) (out, _) = run_cmd(command, log_all=True, simple=False) return out
asiroliu/MyTools
MyArgparse.py
Python
gpl-2.0
2,738
0.004018
# coding=utf-8 from __future__ import unicode_literals """ Name: MyArgparse Author: Andy Liu Email : [email protected] Created: 3/26/2015 Copyright: All rights reserved. Licence: This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import argparse import logging def parse_command_line(): parser = argparse.ArgumentParser(prog='PROG', description='%(prog)s can ...') parser.add_argument('NoPre', action="store", help='help information') parser.add_argument('-t', action="store_true", dest='boolean_switch', default=False, help='Set a switch to true') parser.add_argument('-f', action="store_false", dest='boolean_switch', default=True, help='Set a switch to false') parser.add_argument('-s', action="store", dest='simple_value', help="Store a simple value") parser.add_argument('-st', action="store", dest="simple_value", type=int, help='Store a simple value and define type') parser.add_argument('-c', action='store_const', dest='constant_value', const='value-to-store', help='Store a constant value') parser.add_argument('-a', action='append', dest='collection', default=[], help='Add repeated values to a list') parser.add_argument('-A', action='append_const', dest='const_collection', const='value-1-to-append', default=[], help='Add different values to list') parser.add_argument('-B', action='append_const', dest='const_collection', const='value-2-to-append',
help='Add different values to list') args = parser.parse_args() logging.debug('NoPre = %r' % args.NoPre) logging.debug('simple_value = %r' % args.simple_value) logging.debug('constant_value = %r' % args.constant_value) logging.debug('boolean_switch = %r' % args.boolean_switch) logging.debug('collection = %r' % args.co
llection) logging.debug('const_collection = %r' % args.const_collection) return args if __name__ == '__main__': from MyLog import init_logger logger = init_logger() parse_command_line()
chrigu6/vocabulary
vocabulary/trainer/admin.py
Python
gpl-3.0
195
0
from django.contrib import admin from trai
ner.models import Language, Word, Card, Set admin.site.register(Language) admin.site.register(Word) admin.site.register(Card) admin.site.regist
er(Set)
tomashaber/raiden
raiden/network/protocol.py
Python
mit
24,753
0.000485
# -*- coding: utf-8 -*- import logging import random from collections import ( namedtuple, defaultdict, ) from itertools import repeat import cachetools import gevent from gevent.event import ( _AbstractLinkable, AsyncResult, Event, ) from ethereum import slogging from raiden.exceptions import ( InvalidAddress, InvalidLocksRoot, InvalidNonce, TransferWhenClosed, TransferUnwanted, UnknownAddress, UnknownTokenAddress, ) from raiden.constants import ( UDP_MAX_MESSAGE_SIZE, ) from raiden.settings import ( CACHE_TTL, ) from raiden.messages import decode, Ack, Ping, SignedMessage from raiden.utils import isaddress, sha3, pex from raiden.utils.notifying_queue import NotifyingQueue log = slogging.get_logger(__name__) # pylint: disable=invalid-name ping_log = slogging.get_logger(__name__ + '.ping') # pylint: disable=invalid-name # - async_result available for code that wants to block on message acknowledgment # - receiver_address used to tie back the echohash to the receiver (mainly for # logging purposes) SentMessageState = namedtuple('SentMessageState', ( 'async_result', 'receiver_address', )) HealthEvents = namedtuple('HealthEvents', ( 'event_healthy', 'event_unhealthy', )) NODE_NETWORK_UNKNOWN = 'unknown' NODE_NETWORK_UNREACHABLE = 'unreachable' NODE_NETWORK_REACHABLE = 'reachable' # GOALS: # - Each netting channel must have the messages processed in-order, the # protocol must detect unacknowledged messages and retry them. # - A queue must not stall because of synchronization problems in other queues. # - Assuming a queue can stall, the unhealthiness of a node must not be # inferred from the lack of acknowledgement from a single queue, but healthiness # may be safely inferred from it. # - The state of the node must be synchronized among all tasks that are # handling messages. def event_first_of(*events): """ Waits until one of `events` is set. The event returned is /not/ cleared with any of the `events`, this value must not be reused if the clearing behavior is used. """ first_finished = Event() if not all(isinstance(e, _AbstractLinkable) for e in events): raise ValueError('all events must be linkable') for event in events: event.rawlink(lambda _: first_finished.set()) return first_finished def timeout_exponential_backoff(retries, timeout, maximum): """ Timeouts generator with an exponential backoff strategy. Timeouts start spaced by `timeout`, after `retries` exponentially increase the retry delays until `maximum`, then maximum is returned indefinitely. """ yield timeout tries = 1 while tries < retries: tries += 1 yield timeout while timeout < maximum: timeout = min(timeout * 2, maximum) yield timeout while True: yield maximum def retry(protocol, data, receiver_address, event_stop, timeout_backoff): """ Send data until it's acknowledged. Exits when the first of the following happen: - The packet is acknowledged. - Event_stop is set. - The iterator timeout_backoff runs
out of values. Returns: bool: True if the message was acknowledged, False otherwise. """ async_result = protocol.send_raw_with_result( data, receiver_address, ) event_quit = event_first_of( asyn
c_result, event_stop, ) for timeout in timeout_backoff: if event_quit.wait(timeout=timeout) is True: break protocol.send_raw_with_result( data, receiver_address, ) return async_result.ready() def wait_recovery(event_stop, event_healthy): event_first_of( event_stop, event_healthy, ).wait() if event_stop.is_set(): return # There may be multiple threads waiting, do not restart them all at # once to avoid message flood. gevent.sleep(random.random()) def retry_with_recovery( protocol, data, receiver_address, event_stop, event_healthy, event_unhealthy, backoff): """ Send data while the node is healthy until it's acknowledged. Note: backoff must be an infinite iterator, otherwise this task will become a hot loop. """ # The underlying unhealthy will be cleared, care must be taken to properly # clear stop_or_unhealthy too. stop_or_unhealthy = event_first_of( event_stop, event_unhealthy, ) acknowledged = False while not event_stop.is_set() and not acknowledged: # Packets must not be sent to an unhealthy node, nor should the task # wait for it to become available if the message has been acknowledged. if event_unhealthy.is_set(): wait_recovery( event_stop, event_healthy, ) # Assume wait_recovery returned because unhealthy was cleared and # continue execution, this is safe to do because event_stop is # checked below. stop_or_unhealthy.clear() if event_stop.is_set(): return acknowledged = retry( protocol, data, receiver_address, # retry will stop when this event is set, allowing this task to # wait for recovery when the node becomes unhealthy or to quit if # the stop event is set. stop_or_unhealthy, # Intentionally reusing backoff to restart from the last # timeout/number of iterations. backoff, ) return acknowledged def single_queue_send( protocol, receiver_address, queue, event_stop, event_healthy, event_unhealthy, message_retries, message_retry_timeout, message_retry_max_timeout): """ Handles a single message queue for `receiver_address`. Notes: - This task must be the only consumer of queue. - This task can be killed at any time, but the intended usage is to stop it with the event_stop. - If there are many queues for the same receiver_address, it is the caller's responsibility to not start them together to avoid congestion. - This task assumes the endpoint is never cleared after it's first known. If this assumption changes the code must be updated to handle unknown addresses. """ # A NotifyingQueue is required to implement cancelability, otherwise the # task cannot be stoped while the greenlet waits for an element to be # inserted in the queue. if not isinstance(queue, NotifyingQueue): raise ValueError('queue must be a NotifyingQueue.') # Reusing the event, clear must be carefully done data_or_stop = event_first_of( queue, event_stop, ) # Wait for the endpoint registration or to quit event_first_of( event_healthy, event_stop, ).wait() while True: data_or_stop.wait() if event_stop.is_set(): return # The queue is not empty at this point, so this won't raise Empty. # This task being the only consumer is a requirement. data = queue.peek(block=False) backoff = timeout_exponential_backoff( message_retries, message_retry_timeout, message_retry_max_timeout, ) acknowledged = retry_with_recovery( protocol, data, receiver_address, event_stop, event_healthy, event_unhealthy, backoff, ) if acknowledged: queue.get() # Checking the length of the queue does not trigger a # context-switch, so it's safe to assume the length of the queue # won't change under our feet and when a new item will be added the # event will be set again. if not queue: data_or_stop.clear() if event_stop.is_set(): return def healt
kernsuite-debian/lofar
SAS/ResourceAssignment/ResourceAssignmentEditor/config/default.py
Python
gpl-3.0
967
0
# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy) # P.O. Box 2, 7990 AA Dwingeloo, The Netherlands # # This file is part of the LOFAR software suite. # The LOFAR software suite is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # T
he LOFAR software suite is distributed in the hope that it will b
e useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>. '''default config for webservice''' DEBUG = False JSONIFY_PRETTYPRINT_REGULAR = False print('default config loaded')
mediafactory/yats
modules/yats/caldav/storage.py
Python
mit
12,605
0.002697
# -*- coding: utf-8 -*- import json import logging import vobject from datetime import datetime from contextlib import contextmanager from radicale import ical from yats.shortcuts import get_ticket_model, build_ticket_search_ext, touch_ticket, remember_changes, mail_ticket, jabber_ticket, check_references, add_history, mail_comment, jabber_comment from yats.models import tickets_reports, UserProfile, get_flow_end, tickets_comments, ticket_resolution, get_default_resolution, convertPrio from yats.forms import SimpleTickets from django.contrib.auth.models import AnonymousUser, User from django.http import QueryDict from django.conf import settings from django.utils import timezone from django.utils.translation import ugettext as _ from djradicale.models import DBProperties logger = logging.getLogger('djradicale') ICAL_TYPES = ( ical.Event, ical.Todo, ical.Journal, # ical.Card, ical.Timezone, ) class FakeRequest: def __init__(self): self.GET = {} self.POST = {} self.session = {} self.user = AnonymousUser() class Collection(ical.Collection): @property def headers(self): return ( ical.Header('PRODID:-//YATS//NONSGML Radicale Server//EN'), ical.Header('VERSION:%s' % self.version)) def delete(self): repid = self._getReportFromUrl(self.path) tickets_reports.objects.get(pk=repid).delete() def append(self, name, text): import pydevd pydevd.settrace('192.168.33.1', 5678) new_items = self._parse(text, ICAL_TYPES, name) timezones = list(filter( lambda x: x.tag == ical.Timezone.tag, new_items.values())) request = self._getRequestFromUrl(self.path) for new_item in new_items.values(): if new_item.tag == ical.Timezone.tag: continue if new_item.name not in self.items: self.items[new_item.name] = new_item text = ical.serialize(self.tag, self.headers, [new_item] + timezones) cal = vobject.readOne(text) # close ticket if hasattr(cal.vtodo, 'status') and cal.vtodo.status.value == 'COMPLETED': ticket = get_ticket_model() try: flow_end = get_flow_end() resolution = get_default_resolution() close_comment = _('closed via CalDAV') tic = ticket.objects.get(uuid=cal.vtodo.uid.value) tic.resolution = resolution tic.closed = True tic.close_date = timezone.now() tic.state = flow_end tic.save(user=request.user) com = tickets_comments() com.comment = _('ticket closed - resolution: %(resolution)s\n\n%(comment)s') % {'resolution': resolution.name, 'comment': close_comment} com.ticket = tic com.action = 1 com.save(user=request.user) check_references(request, com) touch_ticket(request.user, tic.id) add_history(request, tic, 1, close_comment) mail_comment(request, com.pk) jabber_comment(request, com.pk) except Exception: pass # change or new else: params = { 'caption': cal.vtodo.summary.value, 'description': cal.vtodo.description.value if hasattr(cal.vtodo, 'description') else None, 'uuid': cal.vtodo.uid.value, 'show_start': cal.vtodo.due.value if hasattr(cal.vtodo, 'due') else None, 'priority': convertPrio(cal.vtodo.priority.value) if hasattr(cal.vtodo, 'priority') else None } fakePOST = QueryDict(mutable=True) fakePOST.update(params) form = SimpleTickets(fakePOST) if form.is_valid(): cd = form.cleaned_data ticket = get_ticket_model() # change ticket try: tic = ticket.objects.get(uuid=cal.vtodo.uid.value) tic.caption = cd['caption'] tic.description = cd['description'] tic.priority = cd['priority'] # tic.assigned = cd['assigned'] tic.show_start = cd['show_start'] tic.save(user=request.user) # new ticket except ticket.DoesNotExist: tic = ticket() tic.caption = cd['caption'] tic.description = cd['description'] if 'priority' not in cd or not cd['priority']: if hasattr(settings, 'KEEP_IT_SIMPLE_DEFAULT_PRIORITY') and settings.KEEP_IT_SIMPLE_DEFAULT_PRIORITY: tic.priority_id = settings.KEEP_IT_SIMPLE_DEFAULT_PRIORITY else: tic.priority = cd['priority'] tic.assigned = request.user if hasattr(settings, 'KEEP_IT_SIMPLE_DEFAULT_CUSTOMER') and settings.KEEP_IT_SIMPLE_DEFAULT_CUSTOMER: if settings.KEEP_IT_SIMPLE_DEFAULT_CUSTOMER == -1: tic.customer = request.organisation else: tic.customer_id = settings.KEEP_IT_SIMPLE_DEFAULT_CUSTOME if hasattr(settings, 'KEEP_IT_SIMPLE_DEFAULT_COMPO
NENT') and settings.KEEP_IT_SIMPLE_DEFAULT_COMPONENT: tic.component_id = settings.KEEP_IT_SIMPLE_DEFAULT_COMPONENT tic.show_start = cd['show_start'] tic.uuid = cal.vtodo.uid.value tic.save(user=request.user) if tic.assigned: touch_ticket(tic.assigned, tic.pk)
for ele in form.changed_data: form.initial[ele] = '' remember_changes(request, form, tic) touch_ticket(request.user, tic.pk) mail_ticket(request, tic.pk, form, rcpt=settings.TICKET_NEW_MAIL_RCPT, is_api=True) jabber_ticket(request, tic.pk, form, rcpt=settings.TICKET_NEW_JABBER_RCPT, is_api=True) else: raise Exception(form.errors) def remove(self, name): pass def replace(self, name, text): self.append(name, text) @property def text(self): return ical.serialize(self.tag, self.headers, self.items.values()) @classmethod def children(cls, path): """Yield the children of the collection at local ``path``.""" request = cls._getRequestFromUrl(path) children = list(tickets_reports.objects.filter(active_record=True, c_user=request.user).values_list('slug', flat=True)) children = ['%s/%s.ics' % (request.user.username, itm) for itm in children] return map(cls, children) @classmethod def is_node(cls, path): """Return ``True`` if relative ``path`` is a node. A node is a WebDAV collection whose members are other collections. """ request = cls._getRequestFromUrl(path) if path == request.user.username: return True else: return False @classmethod def is_leaf(cls, path): """Return ``True`` if relative ``path`` is a leaf. A leaf is a WebDAV collection whose members are not collections. """ result = False if '.ics' in path: try: request = cls._getRequestFromUrl(path) rep = tickets_reports.objects.get(active_record=True, pk=cls._getReportFromUrl(path)) tic = get_ticket_model().objects.select_related('type', 'state', 'assigned', 'priority', 'cu
our-city-app/oca-backend
src/rogerthat/bizz/payment/to.py
Python
apache-2.0
1,164
0.000859
# -*- coding: utf-8 -*- # Copyright 2020 Green Valley Belgium NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance
with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in
writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.7@@ from mcfw.properties import bool_property, unicode_list_property, unicode_property, typed_property class BankDataTO(object): bankCode = unicode_property('bankCode') name = unicode_property('name') bic = unicode_property('bic') class OpenIbanResultTO(object): valid = bool_property('valid') messages = unicode_list_property('message') iban = unicode_property('iban') bankData = typed_property('bankData', BankDataTO) # type: BankDataTO checkResults = typed_property('checkResults', dict)
laurent-george/weboob
modules/cmso/__init__.py
Python
agpl-3.0
788
0
# -*- coding: utf-8 -*- #
Copyright(C) 2012 Romain Bignon # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU
Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. from .module import CmsoModule __all__ = ['CmsoModule']
pculture/mirocommunity
localtv/models.py
Python
agpl-3.0
55,425
0.000902
import datetime import itertools import re import urllib2 import mimetypes import operator import logging import sys import traceback import warnings import tagging import tagging.models import vidscraper from bs4 import BeautifulSoup from django.conf import settings from django.contrib.auth.models import User from django.contrib.comments.moderation import CommentModerator, moderator from django.contrib.sites.models import Site from django.contrib.contenttypes import generic from django.core.exceptions import ValidationError from django.core.mail import EmailMessage from django.core.signals import request_finished from django.core.validators import ipv4_re from django.db import models from django.template import Context, loader from django.utils.html import escape as html_escape from django.utils.safestring import mark_safe from django.utils.translation import ugettext_lazy as _ from haystack import connections, connection_router from mptt.models import MPTTModel from notification import models as notification from slugify import slugify from localtv import utils, settings as lsettings from localtv.managers import SiteRelatedManager, VideoManager from localtv.signals import post_video_from_vidscraper, submit_finished from localtv.templatetags.filters import sanitize VIDEO_SERVICE_REGEXES = ( ('YouTube', r'http://gdata\.youtube\.com/feeds/'), ('YouTube', r'http://(www\.)?youtube\.com/'), ('blip.tv', r'http://(.+\.)?blip\.tv/'), ('Vimeo', r'http://(www\.)?vimeo\.com/'), ('Dailymotion', r'http://(www\.)?dailymotion\.com/rss')) class Thumbnailable(models.Model): """ A type of Model that has thumbnails generated for it. Now that we're using Daguerre for thumbnails, this is just for backwards compatibility. """ # we set this to "logo" for SiteSettings, 'icon' for WidgetSettings thumbnail_attribute = 'thumbnail' class Meta: abstract = True @property def has_thumbnail(self): warnings.warn("has_thumbnail is deprecated and will be removed in a " "future version.", DeprecationWarning) return bool(getattr(self, self.thumbnail_attribute)) @property def thumbnail_path(self): warnings.warn("thumbnail_path is deprecated and will be removed in a " "future version.", DeprecationWarning) thumb_file = getattr(self, self.thumbnail_attribute) if thumb_file: return thumb_file.name else: return '' class SiteSettings(Thumbnailable): """ A model for storing Site-specific settings (feature switches, custom HTML and CSS, etc) in the database rather than in settings files. Most of these can thus be set by site admins rather than sysadmins. There are also a few fields for storing site event state. """ thumbnail_attribute = 'logo' #: Link to the Site these settings are for. site = models.OneToOneField(Site) ## Site styles ## #: Custom logo image for this site. logo = models.ImageField(upload_to=utils.UploadTo('localtv/sitesettings/logo/%Y/%m/%d/'), blank=True) #: Custom background image for this site. background = models.ImageField(upload_to=utils.UploadTo('localtv/sitesettings/background/%Y/%m/%d/'), blank=True) #: Arbitrary custom css overrides. css = models.TextField(blank=True) ## Custom HTML ## #: Subheader for the site. tagline = models.CharField(max_length=4096, blank=True) #: Arbitrary custom HTML which (currently) is used as a site description #: on the main page. sidebar_html = models.TextField(blank=True) #: Arbitrary custom HTML which displays in the footer of all non-admin pages. footer_html = models.TextField(blank=True) #: Arbitrary custom HTML which displays on the about page. about_html = models.TextField(blank=True) ## Site permissions ## #: A collection of Users who have administrative access to the site. admins = models.ManyToManyField('auth.User', blank=True, related_name='admin_for') #: Whether or not the Submit Video button should display or not. #: Doesn't affect whether videos can be submitted or not. #: See http://bugzilla.pculture.org/show_bug.cgi?id=19809 display_submit_button = models.BooleanField(default=True) #: Whether or not users need to log in to submit videos. submission_requires_login = models.BooleanField(default=False) #: Whether or not an email address needs to be given with an #: unauthenticated video submission. submission_requires_email = models.BooleanField(default=False) ## Feature switches ## #: Whether playlist functionality is enabled. playlists_enabled = models.IntegerField(default=1) #: Whether the original publication date or date added to this site #: should be used for sorting videos. use_original_date = models.BooleanField( default=True, help_text="If set, use the original date the video was posted. " "Otherwise, use the date the video was added to this site.") #: Whether comments should be held for moderation. screen_all_comments = models.BooleanField( verbose_name='Hold comments for moderation', default=True, help_text="Hold all comments for moderation by default?") #: Whether leaving a comment requires you to be logged in. comments_required_login = models.BooleanField( default=False, verbose_name="Require Login", help_text="If True, comments require the user to be logged in.") ## Tracking fields ## #: Whether a user has elected to hide the "get started" section in #: the admin interface. hide_get_started = models.BooleanField(default=False) objects = SiteRelatedManager() def __unicode__(self): return u'%s (%s)' % (self.site.name, self.site.domain) def user_is_admin(self, user): """ Return True if the given User is an admin for this SiteSettings. """ if not user.is_authenticated() or not user.is_active: return False if user.is_superuser: return True return self.admins.filter(pk=user.pk).exists() def should_show_dashboard(self): """Returns True for backwards-compatibility.""" warnings.warn("should_show_dashboard is deprecated and will be " "removed in a future version.", DeprecationWarn
ing) return True class WidgetSettingsManager(SiteRelatedManager): def _new_entry(self, site, using): ws = super(WidgetSettingsManager, self)._new_entry(site, using) try: site_settings = SiteS
ettings.objects.get_cached(site, using) except SiteSettings.DoesNotExist: pass else: if site_settings.logo: site_settings.logo.open() ws.icon = site_settings.logo ws.save() return ws class WidgetSettings(Thumbnailable): """ A Model which represents the options for controlling the widget creator. """ thumbnail_attribute = 'icon' site = models.OneToOneField(Site) title = models.CharField(max_length=250, blank=True) title_editable = models.BooleanField(default=True) icon = models.ImageField(upload_to=utils.UploadTo('localtv/widgetsettings/icon/%Y/%m/%d/'), blank=True) icon_editable = models.BooleanField(default=False) css = models.FileField(upload_to=utils.UploadTo('localtv/widgetsettings/css/%Y/%m/%d/'), blank=True) css_editable = models.BooleanField(default=False) bg_color = models.CharField(max_length=20, blank=True) bg_color_editable = models.BooleanField(default=False) text_color = models.CharField(max_length=20, blank=True) text_color_editable = models.BooleanField(default=False) border_color = models.CharField(max_length=20, blank=True) border_color_editable = models.BooleanField(default=False) objects = WidgetSettingsManager() def get_title_or_reasonable_default(self): # Is the title worth using? If so, use that
caseyrollins/osf.io
api/base/versioning.py
Python
apache-2.0
5,741
0.003135
from rest_framework import exceptions as drf_exceptions from rest_framework import versioning as drf_versioning from rest_framework.compat import unicode_http_header from res
t_framework.utils.mediatypes import _MediaType from api.base import exceptions from api.base import utils from api.base.renderers import BrowsableAPIRendererNoForms from api.base.settings import LATEST_VERSIONS def get_major_version(version): ret
urn int(version.split('.')[0]) def url_path_version_to_decimal(url_path_version): # 'v2' --> '2.0' return str(float(url_path_version.split('v')[1])) def decimal_version_to_url_path(decimal_version): # '2.0' --> 'v2' return 'v{}'.format(get_major_version(decimal_version)) def get_latest_sub_version(major_version): # '2' --> '2.6' return LATEST_VERSIONS.get(major_version, None) class BaseVersioning(drf_versioning.BaseVersioning): def __init__(self): super(BaseVersioning, self).__init__() def get_url_path_version(self, kwargs): invalid_version_message = 'Invalid version in URL path.' version = kwargs.get(self.version_param) if version is None: return self.default_version version = url_path_version_to_decimal(version) if not self.is_allowed_version(version): raise drf_exceptions.NotFound(invalid_version_message) if get_major_version(version) == get_major_version(self.default_version): return self.default_version return version def get_header_version(self, request, major_version): invalid_version_message = 'Invalid version in "Accept" header.' media_type = _MediaType(request.accepted_media_type) version = media_type.params.get(self.version_param) if not version: return None if version == 'latest': return get_latest_sub_version(major_version) version = unicode_http_header(version) if not self.is_allowed_version(version): raise drf_exceptions.NotAcceptable(invalid_version_message) return version def get_default_version(self, request, major_version): """Returns the latest available version for the browsable api, otherwise REST_FRAMEWORK default version""" if request.accepted_renderer.__class__ == BrowsableAPIRendererNoForms: return get_latest_sub_version(major_version) return self.default_version def get_query_param_version(self, request, major_version): invalid_version_message = 'Invalid version in query parameter.' version = request.query_params.get(self.version_param) if not version: return None if version == 'latest': return get_latest_sub_version(major_version) if not self.is_allowed_version(version): raise drf_exceptions.NotFound(invalid_version_message) return version def validate_pinned_versions(self, url_path_version, header_version, query_parameter_version): url_path_major_version = get_major_version(url_path_version) header_major_version = get_major_version(header_version) if header_version else None query_major_version = get_major_version(query_parameter_version) if query_parameter_version else None if header_version and header_major_version != url_path_major_version: raise exceptions.Conflict( detail='Version {} specified in "Accept" header does not fall within URL path version {}'.format( header_version, url_path_version, ), ) if query_parameter_version and query_major_version != url_path_major_version: raise exceptions.Conflict( detail='Version {} specified in query parameter does not fall within URL path version {}'.format( query_parameter_version, url_path_version, ), ) if header_version and query_parameter_version and (header_version != query_parameter_version): raise exceptions.Conflict( detail='Version {} specified in "Accept" header does not match version {} specified in query parameter'.format( header_version, query_parameter_version, ), ) def determine_version(self, request, *args, **kwargs): url_path_version = self.get_url_path_version(kwargs) major_version = get_major_version(url_path_version) header_version = self.get_header_version(request, major_version) query_parameter_version = self.get_query_param_version(request, major_version) version = url_path_version if header_version or query_parameter_version: self.validate_pinned_versions(url_path_version, header_version, query_parameter_version) version = header_version if header_version else query_parameter_version else: version = self.get_default_version(request, major_version) return version def reverse(self, viewname, args=None, kwargs=None, request=None, format=None, **extra): url_path_version = self.get_url_path_version(kwargs) major_version = get_major_version(url_path_version) query_parameter_version = self.get_query_param_version(request, major_version) kwargs = {} if (kwargs is None) else kwargs kwargs[self.version_param] = decimal_version_to_url_path(url_path_version) query_kwargs = {'version': query_parameter_version} if query_parameter_version else None return utils.absolute_reverse( viewname, query_kwargs=query_kwargs, args=args, kwargs=kwargs, )
Guidobelix/pyload
module/plugins/accounts/FilerioCom.py
Python
gpl-3.0
407
0.014742
# -*- coding: utf-8 -*- from module.plugins.internal.XFSAccount import XFSAccount class FilerioCom(XFSAccount):
__name__ = "FilerioCom" __type__ = "account" __version__ = "0.07" __status__ = "testing" __description__ = """FileRio.in account plugin""" __license__ = "GPLv3" __authors__ = [("zoidberg", "[email protected]")] PLUGIN_
DOMAIN = "filerio.in"
ray-project/ray
python/ray/train/tests/test_results_preprocessors.py
Python
apache-2.0
7,269
0.001238
import pytest from ray.train.callbacks.results_preprocessors import ( ExcludedKeysResultsPreprocessor, IndexedResultsPreprocessor, SequentialResultsPreprocessor, AverageResultsPreprocessor, MaxResultsPreprocessor, WeightedAverageResultsPreprocessor, ) def test_excluded_keys_results_preprocessor(): results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}] expected = [{"b": 2}, {"b": 4}] preprocessor = ExcludedKeysResultsPreprocessor("a") preprocessed_results = preprocessor.preprocess(results) assert preprocessed_results == expected def test_indexed_results_preprocessor(): results = [{"a": 1}, {"a": 2}, {"a": 3}, {"a": 4}] expected = [{"a": 1}, {"a": 3}] preprocessor = IndexedResultsPreprocessor([0, 2]) preprocessed_results = preprocessor.preprocess(results) assert preprocessed_results == expected def test_sequential_results_preprocessor(): results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}] expected = [{"b": 2}, {"b": 6}] preprocessor_1 = ExcludedKeysResultsPreprocessor("a") # [{"b": 2}, {"b": 4}, {"b": 6}, {"b": 8}] preprocessor_2 = IndexedResultsPreprocessor([0, 2]) preprocessor = SequentialResultsPreprocessor([preprocessor_1, preprocessor_2]) preprocessed_results = preprocessor.preprocess(results) assert preprocessed_results == expected def test_average_results_preprocessor(): from copy import deepcopy import numpy as np results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}] expected = deepcopy(results) for res in expected: res.update( { "avg(a)": np.mean([result["a"] for result in results]), "avg(b)": np.mean([result["b"] for result in results]), } ) preprocessor = AverageResultsPreprocessor(["a", "b"]) preprocessed_results = preprocessor.preprocess(results) assert preprocessed_results == expected def test_max_results_preprocessor(): from copy import deepcopy import numpy as np results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}] expected = deepcopy(results) for res in expected: res.update( { "max(a)": np.max([result["a"] for result in results]), "max(b)": np.max([result["b"] for result in results]), } ) preprocessor = MaxResultsPreprocessor(["a", "b"]) preprocessed_results = preprocessor.preprocess(results) assert preprocessed_results == expected def test_weighted_average_results_preprocessor(): from copy import deepcopy import numpy as np results = [{"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, {"a": 7, "b": 8}] expected = deepcopy(results) total_weight = np.sum([result["b"] for result in results]) for res in expected: res.update( { "weight_avg_b(a)": np.sum( [result["a"] * result["b"] / total_weight for result in results] ) } ) preprocessor = WeightedAverageResultsPreprocessor(["a"], "b") preprocessed_results = preprocessor.preprocess(results) assert preprocessed_results == expected @pytest.mark.parametrize( ("results_preprocessor", "expected_value"), [(AverageResultsPreprocessor, 2.0), (MaxResultsPreprocessor, 3.0)], ) def test_warning_in_aggregate_results_preprocessors( caplog, results_preprocessor, expected_value ): import logging from copy import deepcopy from ray.util import debug caplog.at_level(logging.WARNING) results1 = [{"a": 1}, {"a": 2}, {"a": 3}, {"a": 4}] results2 = [{"a": 1}, {"a": "invalid"}, {"a": 3}, {"a": "invalid"}] results3 = [{"a": "invalid"}, {"a": "invalid"}, {"a": "invalid"}, {"a": "invalid"}] results4 = [{"a": 1}, {"a": 2}, {"a": 3}, {"c": 4}] # test case 1: metric key `b` is missing from all workers results_preprocessor1 = results_preprocessor(["b"]) results_preprocessor1.preprocess(results1) assert "`b` is not reported from workers, so it is ignored." in caplog.text # test case 2: some values of key `a` have invalid data type results_preprocessor2 = results_preprocessor(["a"]) expected2 = deepcopy(results2) aggregation_key = results_preprocessor2.aggregate_fn.wrap_key("a") for res in expected2: res.update({aggregation_key: expected_value}) assert results_preprocessor2.preprocess(results2) == expected2 # test case 3: all key `a` values are invalid results_preprocessor2.preprocess(results3) assert "`a` value type is not valid, so it is ignored." in caplog.text # test case 4: some workers don't report key `a` expected4 = deepcopy(results4) aggregation_key = results_preprocessor2.aggregate_fn.wrap_key("a") for res in expected4: res.update({aggregation_key: expected_value}) assert results_preprocessor2.preprocess(results4) == expected4 for record in caplog.records: assert record.levelname == "WARNING" debug.reset_log_once("b") debug.reset_log_once("a") def test_warning_in_weighted_average_results_preprocessors(caplog): import logging from copy import deepcopy caplog.at_level(logging.WARNING) results1 = [{"a": 1}, {"a": 2}, {"a": 3}, {"a": 4}] results2 = [{"b": 1}, {"b": 2}, {"b": 3}, {"b": 4}] results3 = [ {"a": 1, "c": 3}, {"a": 2, "c": "invalid"}, {"a": "invalid", "c": 1}, {"a": 4, "c": "invalid"}, ] results4 = [ {"a": 1, "c": "invalid"}, {"a": 2, "c": "invalid"}, {"a": 3, "c": "invalid"}, {"a": 4, "c": "invalid"}, ] # test case 1: weight key `b` is not reported from all workers
results_preprocessor1 = WeightedAverageResultsPreprocessor(["a"], "b") expected1 = deepcopy(results1) for res in expected1: res.update({"weight_avg_b(a)": 2.5}) assert results_preprocessor1.p
reprocess(results1) == expected1 assert ( "Averaging weight `b` is not reported by all workers in `train.report()`." in caplog.text ) assert "Use equal weight instead." in caplog.text # test case 2: metric key `a` (to be averaged) is not reported from all workers results_preprocessor1.preprocess(results2) assert "`a` is not reported from workers, so it is ignored." in caplog.text # test case 3: both metric and weight keys have invalid data type results_preprocessor2 = WeightedAverageResultsPreprocessor(["a"], "c") expected3 = deepcopy(results3) for res in expected3: res.update({"weight_avg_c(a)": 1.0}) assert results_preprocessor2.preprocess(results3) == expected3 # test case 4: all weight values are invalid expected4 = deepcopy(results4) for res in expected4: res.update({"weight_avg_c(a)": 2.5}) assert results_preprocessor2.preprocess(results4) == expected4 assert "Averaging weight `c` value type is not valid." in caplog.text for record in caplog.records: assert record.levelname == "WARNING" if __name__ == "__main__": import pytest import sys sys.exit(pytest.main(["-v", "-x", __file__]))
g2p/tranquil
tranquil/__init__.py
Python
bsd-3-clause
1,717
0.041351
import re from django.core.exceptions import ImproperlyConfigured from sqlalchemy import create_engine, MetaData from sqlalchemy.orm import sessionmaker from tranquil.models import Importer __all__ = ( 'engine', 'meta', 'Session', ) class EngineCache(object): __shared_state = dict( engine = None, meta = None, Session = None, ) _mappings = { 'sqlite3': 'sqlite', 'mysql': 'mysql', 'postgresql': 'postgresql', 'postgresql_psycopg2': 'postgresql+psycopg2', 'oracle': 'oracle', } def __init__(self): from django.conf import settings self.__dict__ = self.__shared_state if self.engine is not None: return if settings.DATABASE_ENGINE == 'django_sqlalchemy.backend': from django_sqlalchemy import backend self.engine = backend.engine else: options = { 'protocol': self._mappings.get( settings.DATABASE_ENGINE ), 'name': settings.DATABASE_NAME, 'user': settings.DATABASE_USER, 'pass': settings.DATABASE_PASSWORD, 'host': settings.DATABASE_HOST, 'port': settings.DATABASE_PORT, } if options['protocol'] is None:
raise ImproperlyConfigured( 'Unknown database engine: %s' % settings.DATABASE_ENGINE ) url = '{protocol}://{user}:{pass}@{host}{port}/{name}' for p in options: if p == 'port' and len( options[p] ) > 0: url = re.sub( '{%s}' % p, ':%s' % options[p], url ) else: url = re.sub( '{%s}' % p, options[p], url ) self.engine = create_engine( url ) self.meta = MetaData(bind=self.engine,reflect=True
) self.Session = sessionmaker( bind=self.engine, autoflush=True, autocommit=False ) self.importer = Importer(self.meta) cache = EngineCache() engine = cache.engine meta = cache.meta Session = cache.Session
jku/telepathy-gabble
tests/twisted/jingle/initial-audio-video.py
Python
lgpl-2.1
7,213
0.004298
""" Tests outgoing calls created with InitialAudio and/or InitialVideo, and exposing the initial contents of incoming calls as values of InitialAudio and InitialVideo """ import operator from servicetest import ( assertContains, assertEquals, assertLength, wrap_channel, EventPattern, call_async, make_channel_proxy) from jingletest2 import JingleTest2, test_all_dialects import constants as cs def outgoing(jp, q, bus, conn, stream): remote_jid = '[email protected]/beyond' jt = JingleTest2(jp, conn, q, stream, 'test@localhost', remote_jid) jt.prepare() self_handle = conn.GetSelfHandle() remote_handle = conn.RequestHandles(cs.HT_CONTACT, [remote_jid])[0] rccs = conn.Properties.Get(cs.CONN_IFACE_REQUESTS, 'RequestableChannelClasses') media_classes = [ rcc for rcc in rccs if rcc[0][cs.CHANNEL_TYPE] == cs.CHANN
EL_TYPE_STREAMED_MEDIA ] assertLength(1, media_classes) fixed, allowed = media_classes[0] assertContains(cs.INITIAL_AU
DIO, allowed) assertContains(cs.INITIAL_VIDEO, allowed) check_neither(q, conn, bus, stream, remote_handle) check_iav(jt, q, conn, bus, stream, remote_handle, True, False) check_iav(jt, q, conn, bus, stream, remote_handle, False, True) check_iav(jt, q, conn, bus, stream, remote_handle, True, True) def check_neither(q, conn, bus, stream, remote_handle): """ Make a channel without specifying InitialAudio or InitialVideo; check that it's announced with both False, and that they're both present and false in GetAll(). """ path, props = conn.Requests.CreateChannel({ cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_STREAMED_MEDIA, cs.TARGET_HANDLE_TYPE: cs.HT_CONTACT, cs.TARGET_HANDLE: remote_handle}) assertContains((cs.INITIAL_AUDIO, False), props.items()) assertContains((cs.INITIAL_VIDEO, False), props.items()) chan = wrap_channel(bus.get_object(conn.bus_name, path), cs.CHANNEL_TYPE_STREAMED_MEDIA, ['MediaSignalling']) props = chan.Properties.GetAll(cs.CHANNEL_TYPE_STREAMED_MEDIA) assertContains(('InitialAudio', False), props.items()) assertContains(('InitialVideo', False), props.items()) # We shouldn't have started a session yet, so there shouldn't be any # session handlers. Strictly speaking, there could be a session handler # with no stream handlers, but... session_handlers = chan.MediaSignalling.GetSessionHandlers() assertLength(0, session_handlers) def check_iav(jt, q, conn, bus, stream, remote_handle, initial_audio, initial_video): """ Make a channel and check that its InitialAudio and InitialVideo properties come out correctly. """ call_async(q, conn.Requests, 'CreateChannel', { cs.CHANNEL_TYPE: cs.CHANNEL_TYPE_STREAMED_MEDIA, cs.TARGET_HANDLE_TYPE: cs.HT_CONTACT, cs.TARGET_HANDLE: remote_handle, cs.INITIAL_AUDIO: initial_audio, cs.INITIAL_VIDEO: initial_video, }) if initial_video and (not jt.jp.can_do_video() or (not initial_audio and not jt.jp.can_do_video_only ())): # Some protocols can't do video event = q.expect('dbus-error', method='CreateChannel') assertEquals(cs.NOT_CAPABLE, event.error.get_dbus_name()) else: path, props = q.expect('dbus-return', method='CreateChannel').value assertContains((cs.INITIAL_AUDIO, initial_audio), props.items()) assertContains((cs.INITIAL_VIDEO, initial_video), props.items()) chan = wrap_channel(bus.get_object(conn.bus_name, path), cs.CHANNEL_TYPE_STREAMED_MEDIA, ['MediaSignalling']) props = chan.Properties.GetAll(cs.CHANNEL_TYPE_STREAMED_MEDIA) assertContains(('InitialAudio', initial_audio), props.items()) assertContains(('InitialVideo', initial_video), props.items()) session_handlers = chan.MediaSignalling.GetSessionHandlers() assertLength(1, session_handlers) path, type = session_handlers[0] assertEquals('rtp', type) session_handler = make_channel_proxy(conn, path, 'Media.SessionHandler') session_handler.Ready() stream_handler_paths = [] stream_handler_types = [] for x in [initial_audio, initial_video]: if x: e = q.expect('dbus-signal', signal='NewStreamHandler') stream_handler_paths.append(e.args[0]) stream_handler_types.append(e.args[2]) if initial_audio: assertContains(cs.MEDIA_STREAM_TYPE_AUDIO, stream_handler_types) if initial_video: assertContains(cs.MEDIA_STREAM_TYPE_VIDEO, stream_handler_types) for x in xrange (0, len(stream_handler_paths)): p = stream_handler_paths[x] t = stream_handler_types[x] sh = make_channel_proxy(conn, p, 'Media.StreamHandler') sh.NewNativeCandidate("fake", jt.get_remote_transports_dbus()) if t == cs.MEDIA_STREAM_TYPE_AUDIO: sh.Ready(jt.get_audio_codecs_dbus()) else: sh.Ready(jt.get_video_codecs_dbus()) sh.StreamState(cs.MEDIA_STREAM_STATE_CONNECTED) e = q.expect('stream-iq', predicate=jt.jp.action_predicate('session-initiate')) jt.parse_session_initiate (e.query) jt.accept() events = reduce(operator.concat, [ [ EventPattern('dbus-signal', signal='SetRemoteCodecs', path=p), EventPattern('dbus-signal', signal='SetStreamPlaying', path=p), ] for p in stream_handler_paths ], []) q.expect_many(*events) chan.Close() def incoming(jp, q, bus, conn, stream): remote_jid = 'skinny.fists@heaven/antennas' jt = JingleTest2(jp, conn, q, stream, 'test@localhost', remote_jid) jt.prepare() self_handle = conn.GetSelfHandle() remote_handle = conn.RequestHandles(cs.HT_CONTACT, [remote_jid])[0] for a, v in [("audio1", None), (None, "video1"), ("audio1", "video1")]: if v!= None and not jp.can_do_video(): continue if a == None and v != None and not jp.can_do_video_only(): continue jt.incoming_call(audio=a, video=v) e = q.expect('dbus-signal', signal='NewChannels', predicate=lambda e: cs.CHANNEL_TYPE_CONTACT_LIST not in e.args[0][0][1].values()) chans = e.args[0] assertLength(1, chans) path, props = chans[0] assertEquals(cs.CHANNEL_TYPE_STREAMED_MEDIA, props[cs.CHANNEL_TYPE]) assertEquals(a != None, props[cs.INITIAL_AUDIO]) assertEquals(v != None, props[cs.INITIAL_VIDEO]) # FIXME: This doesn't check non-Google contacts that can only do one # media type, as such contacts as simulated by JingleTest2 can always # do both. assertEquals(not jp.can_do_video() or not jp.can_do_video_only(), props[cs.IMMUTABLE_STREAMS]) chan = wrap_channel(bus.get_object(conn.bus_name, path), cs.CHANNEL_TYPE_STREAMED_MEDIA) chan.Close() if __name__ == '__main__': test_all_dialects(outgoing) test_all_dialects(incoming)
markflyhigh/incubator-beam
sdks/python/apache_beam/io/textio_test.py
Python
apache-2.0
43,198
0.006898
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Tests for textio module.""" from __future__ import absolute_import from __future__ import division import bz2 import
datetime import glob import gzip import logging import os import shutil import sys import tempfile import unittest import zlib from builtins import range import apache_beam as beam import apache_beam.io.source_test_utils as source_test_utils from apache_beam import coders from apach
e_beam.io import ReadAllFromText from apache_beam.io import iobase from apache_beam.io.filesystem import CompressionTypes from apache_beam.io.textio import _TextSink as TextSink from apache_beam.io.textio import _TextSource as TextSource # Importing following private classes for testing. from apache_beam.io.textio import ReadFromText from apache_beam.io.textio import ReadFromTextWithFilename from apache_beam.io.textio import WriteToText from apache_beam.testing.test_pipeline import TestPipeline from apache_beam.testing.test_utils import TempDir from apache_beam.testing.util import assert_that from apache_beam.testing.util import equal_to from apache_beam.transforms.core import Create class EOL(object): LF = 1 CRLF = 2 MIXED = 3 LF_WITH_NOTHING_AT_LAST_LINE = 4 def write_data( num_lines, no_data=False, directory=None, prefix=tempfile.template, eol=EOL.LF): """Writes test data to a temporary file. Args: num_lines (int): The number of lines to write. no_data (bool): If :data:`True`, empty lines will be written, otherwise each line will contain a concatenation of b'line' and the line number. directory (str): The name of the directory to create the temporary file in. prefix (str): The prefix to use for the temporary file. eol (int): The line ending to use when writing. :class:`~apache_beam.io.textio_test.EOL` exposes attributes that can be used here to define the eol. Returns: Tuple[str, List[str]]: A tuple of the filename and a list of the utf-8 decoded written data. """ all_data = [] with tempfile.NamedTemporaryFile( delete=False, dir=directory, prefix=prefix) as f: sep_values = [b'\n', b'\r\n'] for i in range(num_lines): data = b'' if no_data else b'line' + str(i).encode() all_data.append(data) if eol == EOL.LF: sep = sep_values[0] elif eol == EOL.CRLF: sep = sep_values[1] elif eol == EOL.MIXED: sep = sep_values[i % len(sep_values)] elif eol == EOL.LF_WITH_NOTHING_AT_LAST_LINE: sep = b'' if i == (num_lines - 1) else sep_values[0] else: raise ValueError('Received unknown value %s for eol.' % eol) f.write(data + sep) return f.name, [line.decode('utf-8') for line in all_data] def write_pattern(lines_per_file, no_data=False): """Writes a pattern of temporary files. Args: lines_per_file (List[int]): The number of lines to write per file. no_data (bool): If :data:`True`, empty lines will be written, otherwise each line will contain a concatenation of b'line' and the line number. Returns: Tuple[str, List[str]]: A tuple of the filename pattern and a list of the utf-8 decoded written data. """ temp_dir = tempfile.mkdtemp() all_data = [] file_name = None start_index = 0 for i in range(len(lines_per_file)): file_name, data = write_data(lines_per_file[i], no_data=no_data, directory=temp_dir, prefix='mytemp') all_data.extend(data) start_index += lines_per_file[i] assert file_name return ( file_name[:file_name.rfind(os.path.sep)] + os.path.sep + 'mytemp*', all_data) class TextSourceTest(unittest.TestCase): # Number of records that will be written by most tests. DEFAULT_NUM_RECORDS = 100 @classmethod def setUpClass(cls): # Method has been renamed in Python 3 if sys.version_info[0] < 3: cls.assertCountEqual = cls.assertItemsEqual def _run_read_test(self, file_or_pattern, expected_data, buffer_size=DEFAULT_NUM_RECORDS, compression=CompressionTypes.UNCOMPRESSED): # Since each record usually takes more than 1 byte, default buffer size is # smaller than the total size of the file. This is done to # increase test coverage for cases that hit the buffer boundary. source = TextSource(file_or_pattern, 0, compression, True, coders.StrUtf8Coder(), buffer_size) range_tracker = source.get_range_tracker(None, None) read_data = list(source.read(range_tracker)) self.assertCountEqual(expected_data, read_data) def test_read_single_file(self): file_name, expected_data = write_data(TextSourceTest.DEFAULT_NUM_RECORDS) assert len(expected_data) == TextSourceTest.DEFAULT_NUM_RECORDS self._run_read_test(file_name, expected_data) def test_read_single_file_smaller_than_default_buffer(self): file_name, expected_data = write_data(TextSourceTest.DEFAULT_NUM_RECORDS) self._run_read_test(file_name, expected_data, buffer_size=TextSource.DEFAULT_READ_BUFFER_SIZE) def test_read_single_file_larger_than_default_buffer(self): file_name, expected_data = write_data(TextSource.DEFAULT_READ_BUFFER_SIZE) self._run_read_test(file_name, expected_data, buffer_size=TextSource.DEFAULT_READ_BUFFER_SIZE) def test_read_file_pattern(self): pattern, expected_data = write_pattern( [TextSourceTest.DEFAULT_NUM_RECORDS * 5, TextSourceTest.DEFAULT_NUM_RECORDS * 3, TextSourceTest.DEFAULT_NUM_RECORDS * 12, TextSourceTest.DEFAULT_NUM_RECORDS * 8, TextSourceTest.DEFAULT_NUM_RECORDS * 8, TextSourceTest.DEFAULT_NUM_RECORDS * 4]) assert len(expected_data) == TextSourceTest.DEFAULT_NUM_RECORDS * 40 self._run_read_test(pattern, expected_data) def test_read_single_file_windows_eol(self): file_name, expected_data = write_data(TextSourceTest.DEFAULT_NUM_RECORDS, eol=EOL.CRLF) assert len(expected_data) == TextSourceTest.DEFAULT_NUM_RECORDS self._run_read_test(file_name, expected_data) def test_read_single_file_mixed_eol(self): file_name, expected_data = write_data(TextSourceTest.DEFAULT_NUM_RECORDS, eol=EOL.MIXED) assert len(expected_data) == TextSourceTest.DEFAULT_NUM_RECORDS self._run_read_test(file_name, expected_data) def test_read_single_file_last_line_no_eol(self): file_name, expected_data = write_data( TextSourceTest.DEFAULT_NUM_RECORDS, eol=EOL.LF_WITH_NOTHING_AT_LAST_LINE) assert len(expected_data) == TextSourceTest.DEFAULT_NUM_RECORDS self._run_read_test(file_name, expected_data) def test_read_single_file_single_line_no_eol(self): file_name, expected_data = write_data( 1, eol=EOL.LF_WITH_NOTHING_AT_LAST_LINE) assert len(expected_data) == 1 self._run_read_test(file_name, expected_data) def test_read_empty_single_file(self): file_name, written_data = write_data( 1, no_data=True, eol=EOL.LF_WITH_NOTHING_AT_LAST_LINE) assert len(written_data) == 1 # written data has a single entry with an empty string. Reading the source # should not produce anything since we only wrote a single empty string # without an end of line character. self._run_read_test(file_name, []) def test
awg24/pretix
src/pretix/plugins/paypal/signals.py
Python
apache-2.0
267
0
from django.dispatch import receiver from pretix.base.signals import register_payment_providers @receiver(reg
ister_payment_providers, dispatch_uid="payment_paypal") def register_payment_provider(sender, **kwargs): from .payment import Paypal return
Paypal
idlesign/django-admirarchy
admirarchy/tests/testapp/models.py
Python
bsd-3-clause
633
0.00158
from django.db import models class AdjacencyListModel(models.Model): title = models.CharField(max_length=100) parent = models.ForeignKey( 'self', related_name='%(class)s_parent', on_delete=models.CASCADE, db_index=True, null=True, blank=True) def __str__(self): return 'adjacencylistmodel_%s' % self.title class Nested
SetModel(models.Model): title =
models.CharField(max_length=100) lft = models.IntegerField(db_index=True) rgt = models.IntegerField(db_index=True) level = models.IntegerField(db_index=True) def __str__(self): return 'nestedsetmodel_%s' % self.title
lkmhaqer/gtools-python
netdevice/migrations/0007_auto_20190410_0358.py
Python
mit
567
0
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2019-04-10 03:58 from __future__
import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('netdevice', '0006_auto_20190409_0325'), ] operations = [ migrations.RenameField( model_name='vrf', old_name='vrf_name',
new_name='name', ), migrations.RenameField( model_name='vrf', old_name='vrf_target', new_name='target', ), ]
doffm/dbuf
src/dbuf/util.py
Python
bsd-3-clause
3,108
0.026705
from functools import reduce class ScopedString (object): def __init__ (self): self._stack = [] def push (self, frame): self._stack.append (frame) def pop (s
elf): frame = self._stack.pop() return frame def __str__
(self): return '.'.join (self._stack) class ScopedList (object): def __init__ (self, stack=None): if stack: self._stack = stack else: self._stack = [] self.push() def push (self): self._stack.append ([]) def pop (self): if (len (self._stack) <= 1): raise IndexError ("Attempt to pop global scope") self._stack.pop() def append (self, val): self._stack[-1].append (val) def _normalize (self): return reduce (lambda x, y: x + y, self._stack, []) def __str__ (self): return str (self._normalize()) def __repr__ (self): return "ScopedDict(" + repr(self._stack) + ")" def __iter__ (self): return self._normalize().__iter__() class ScopedDict (object): def __init__ (self, stack=None): if stack: self._stack = stack else: self._stack = [] self.push () def push (self): self._stack.insert (0, {}) def pop (self): if (len (self._stack) <= 1): raise IndexError ("Attempt to pop global scope") temp = self._stack[0] del (self._stack[0]) return temp def _normalize (self): normal = {} for frame in self._stack: for key, value in frame.items(): if key not in normal: normal[key] = value return normal def __getitem__ (self, key): for frame in self._stack: if key in frame: return frame[key] raise KeyError (key) def __setitem__ (self, key, value): self._stack[0][key] = value def __contains__ (self, key): for frame in self._stack: if key in frame: return True return False def __str__ (self): return str (self._normalize()) def __repr__ (self): return "ScopedDict(" + repr(self._stack) + ")" def __iter__ (self): return self._normalize().__iter__() def items (self): return self._normalize().items() def keys (self): return self._normalize().keys() def values (self): return self._normalize().values()
ebilionis/py-best
best/random/_student_t_likelihood_function.py
Python
lgpl-3.0
2,586
0.001933
"""A likelihood function representing a Student-t distribution. Author: Ilias Bilionis Date: 1/21/2013 """ __all__ = ['StudentTLikelihoodFunction'] import numpy as np import scipy import math from . import GaussianLikelihoodFunction class StudentTLikelihoodFunction(GaussianLikelihoodFunction): """An object representing a Student-t likelihood function.""" # The degrees of freedom _nu = None @property def nu(self): """Get the degrees of freedom.""" return self._nu @nu.setter def nu(self, value): """Set the degrees of freedom.""" if not isinstance(value, float): raise TypeError('nu must be a float.') self._nu = value def __init__(self, nu, num_input=None, data=None, mean_function=None, cov=None, name='Student-t Likelihood Function'): """Initialize the object. Arguments: nu --- The degrees of freedom of the distribution. Keyword Arguments num_input --- The number of inputs. Optional, if mean_function is a proper Function. data --- The observed data. A vector. Optional, if mean_function is a proper Function. It can be set later. mean_function --- The mean function. See the super class for the description. cov --- The covariance matrix. It can either be a positive definite matrix, or a number.
The data or a proper mean_funciton is preassumed. name --- A name for the likelihood function. """ self.nu = nu super(StudentTLikelihoodFunction, self).__init__(num_input=num_input,
data=data, mean_function=mean_function, cov=cov, name=name) def __call__(self, x): """Evaluate the function at x.""" mu = self.mean_function(x) y = scipy.linalg.solve_triangular(self.L_cov, self.data - mu) return ( - 0.5 * (self.nu + self.num_data) * math.log(1. + np.dot(y, y) / self.nu))
Azure/azure-sdk-for-python
sdk/search/azure-search-documents/samples/async_samples/sample_index_crud_operations_async.py
Python
mit
3,884
0.00309
# coding: utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- """ FILE: sample_index_crud_operations_async.py DESCRIPTION: This sample demonstrates how to get, create, update, or delete an index. USAGE: python sample_index_crud_operations_async.py Set the environment variables with your own values before running the sample: 1) AZURE_SEARCH_SERVICE_ENDPOINT - the endpoint of your Azure Cognitive Search service 2) AZURE_SEARCH_API_KEY - your search API key """ import os import asyncio service_endpoint = os.getenv("AZURE_SEARCH_SERVICE_ENDPOINT") key = os.getenv("AZURE_SEARCH_API_KEY") from azure.core.credentials import AzureKeyCredential from azure.search.documents.indexes.aio import SearchIndexClient from azure.search.documents.indexes.models import ( ComplexField, CorsOptions, SearchIndex, ScoringProfile, SearchFieldDataType, SimpleField, SearchableField ) client = SearchIndexClient(service_endpoint, AzureKeyCredential(key)) async def create_index(): # [START create_index_async] name = "hotels" fields = [ SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), SimpleField(name="baseRate", type=SearchFieldDataType.Double), SearchableField(name="description", type=SearchFieldDataType.String, collection=True), ComplexField(name="address", fields=[ SimpleField(name="streetAddress", type=SearchFieldDataType.String), SimpleField(name="city", type=SearchFieldDataType.String), ], collection=True) ] cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) scoring_profiles = [] index = SearchIndex( name=name, fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) result = await client.create_index(index) # [END create_index_async] async def get_index(): # [START get_index_async] name = "hotels" result = await client.get_index(name) # [END get_index_async] async def update_index(): # [START update_index_async] name = "hotels" fields = [ SimpleField(name="hotelId", type=SearchFieldDataType.String, key=True), SimpleField(name="baseRate", type=SearchFieldDataType.Double), SearchableField(name="description", type=SearchFieldDataType.String, collection=True), SearchableField(name="hotelName", type=SearchFieldDataType.String), ComplexField(name="address", fields=[ SimpleField(name="streetAddress", type=SearchFieldDataType.String), SimpleField(name="city", type=SearchFieldDataType.String), SimpleField(n
ame="state", type=SearchFieldDataType.String), ], collection=True) ] cors_options = CorsOptions(allowed_origins=["*"], max_age_in_seconds=60) scoring_profile = ScoringProfile( name="MyProfile" ) scoring_profiles = [] scoring_profiles.append(scoring_profile) index = SearchIndex( name=name,
fields=fields, scoring_profiles=scoring_profiles, cors_options=cors_options) result = await client.create_or_update_index(index=index) # [END update_index_async] async def delete_index(): # [START delete_index_async] name = "hotels" await client.delete_index(name) # [END delete_index_async] async def main(): await create_index() await get_index() await update_index() await delete_index() await client.close() if __name__ == '__main__': loop = asyncio.get_event_loop() loop.run_until_complete(main()) loop.close()
bohdon/maya-pulse
src/pulse/scripts/pulse/colors.py
Python
mit
381
0
def RGB01ToHex(rgb): """ Return an RGB color value as a hex color string. """ return '#%02x%02x%02x' % tuple([int(x * 255) for x in rgb]) def hexToRGB01(hexColor): """ Return a hex color string as an RGB tuple of floats in the
range 0..1 """ h = hexColor.lstrip('#') return tuple([x / 255.0 for x in [int(h[i:i + 2], 16)
for i in (0, 2, 4)]])
arenadata/ambari
ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/service_check.py
Python
apache-2.0
1,514
0.004624
""" Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from resource_management import * class ECSServiceCheck(Script): def service_check(self, env):
import params env.set_params(params) # run fs list command to make sure ECS client can talk to ECS backend list_command = format("fs -ls /") if params.security_enabled: Execute(forma
t("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_principal_name}"), user=params.hdfs_user ) ExecuteHadoop(list_command, user=params.hdfs_user, logoutput=True, conf_dir=params.hadoop_conf_dir, try_sleep=3, tries=20, bin_dir=params.hadoop_bin_dir ) if __name__ == "__main__": ECSServiceCheck().execute()
abo-abo/edx-platform
common/djangoapps/mitxmako/middleware.py
Python
agpl-3.0
1,006
0
# Copyright (c) 2008 Mikeal Rogers # # Licensed under the Apache License, Version 2.0 (the "Licen
se"); #
you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distribuetd under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from dealer.git import git from django.template import RequestContext requestcontext = None class MakoMiddleware(object): def process_request(self, request): global requestcontext requestcontext = RequestContext(request) requestcontext['is_secure'] = request.is_secure() requestcontext['site'] = request.get_host() requestcontext['REVISION'] = git.revision
maciejkula/spotlight
spotlight/cross_validation.py
Python
mit
6,519
0
""" Module with functionality for splitting and shuffling datasets. """ import numpy as np from sklearn.utils import murmurhash3_32 from spotlight.interactions import Interactions def _index_or_none(array, shuffle_index): if array is None: return None else: return array[shuffle_index] def shuffle_interactions(interactions, random_state=None): """ Shuffle interactions. Parameters ---------- interactions: :class:`spotlight.interactions.Interactions` The interactions to shuffle. random_state: np.random.RandomState, optional The random state use
d for the shuffle. Returns ------- interactions: :class:`spotlight.interactions.Interactions` The shuffled interactions. """ if random_state is None: random_state = np.random.RandomState() shuffle_indices = np.arange(len(interactions.user_ids)) random_state.shuffle(shuffle_indices) return Interactions(interactions.user_ids[shuffle_indices], interactions.item_ids[shuffle_indices], ratings=_index_or_none(i
nteractions.ratings, shuffle_indices), timestamps=_index_or_none(interactions.timestamps, shuffle_indices), weights=_index_or_none(interactions.weights, shuffle_indices), num_users=interactions.num_users, num_items=interactions.num_items) def random_train_test_split(interactions, test_percentage=0.2, random_state=None): """ Randomly split interactions between training and testing. Parameters ---------- interactions: :class:`spotlight.interactions.Interactions` The interactions to shuffle. test_percentage: float, optional The fraction of interactions to place in the test set. random_state: np.random.RandomState, optional The random state used for the shuffle. Returns ------- (train, test): (:class:`spotlight.interactions.Interactions`, :class:`spotlight.interactions.Interactions`) A tuple of (train data, test data) """ interactions = shuffle_interactions(interactions, random_state=random_state) cutoff = int((1.0 - test_percentage) * len(interactions)) train_idx = slice(None, cutoff) test_idx = slice(cutoff, None) train = Interactions(interactions.user_ids[train_idx], interactions.item_ids[train_idx], ratings=_index_or_none(interactions.ratings, train_idx), timestamps=_index_or_none(interactions.timestamps, train_idx), weights=_index_or_none(interactions.weights, train_idx), num_users=interactions.num_users, num_items=interactions.num_items) test = Interactions(interactions.user_ids[test_idx], interactions.item_ids[test_idx], ratings=_index_or_none(interactions.ratings, test_idx), timestamps=_index_or_none(interactions.timestamps, test_idx), weights=_index_or_none(interactions.weights, test_idx), num_users=interactions.num_users, num_items=interactions.num_items) return train, test def user_based_train_test_split(interactions, test_percentage=0.2, random_state=None): """ Split interactions between a train and a test set based on user ids, so that a given user's entire interaction history is either in the train, or the test set. Parameters ---------- interactions: :class:`spotlight.interactions.Interactions` The interactions to shuffle. test_percentage: float, optional The fraction of users to place in the test set. random_state: np.random.RandomState, optional The random state used for the shuffle. Returns ------- (train, test): (:class:`spotlight.interactions.Interactions`, :class:`spotlight.interactions.Interactions`) A tuple of (train data, test data) """ if random_state is None: random_state = np.random.RandomState() minint = np.iinfo(np.uint32).min maxint = np.iinfo(np.uint32).max seed = random_state.randint(minint, maxint, dtype=np.int64) in_test = ((murmurhash3_32(interactions.user_ids, seed=seed, positive=True) % 100 / 100.0) < test_percentage) in_train = np.logical_not(in_test) train = Interactions(interactions.user_ids[in_train], interactions.item_ids[in_train], ratings=_index_or_none(interactions.ratings, in_train), timestamps=_index_or_none(interactions.timestamps, in_train), weights=_index_or_none(interactions.weights, in_train), num_users=interactions.num_users, num_items=interactions.num_items) test = Interactions(interactions.user_ids[in_test], interactions.item_ids[in_test], ratings=_index_or_none(interactions.ratings, in_test), timestamps=_index_or_none(interactions.timestamps, in_test), weights=_index_or_none(interactions.weights, in_test), num_users=interactions.num_users, num_items=interactions.num_items) return train, test
alirizakeles/memopol-core
memopol/meps/migrations/0017_auto__del_field_mep_stg_office.py
Python
gpl-3.0
12,677
0.008046
# encoding: utf-8 from south.db import db from south.v2 import SchemaMigration class Migration(SchemaMigration): def forwards(self, orm): # Deleting field 'MEP.stg_office' db.delete_column('meps_mep', 'stg_office') def backwards(self, orm): # User chose to not deal with backwards NULL issues for 'MEP.stg_office' raise RuntimeError("Cannot reverse this migration. 'MEP.stg_office' and its values cannot be restored.") models = { 'meps.building': { 'Meta': {'object_name': 'Building'}, 'id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'street': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'meps.committee': { 'Meta': {'object_name': 'Committee'}, 'abbreviation': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, 'meps.committeerole': { 'Meta': {'object_name': 'CommitteeRole'}, 'begin': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'committee': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.Committee']"}), 'end': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mep': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.MEP']"}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'meps.country': { 'Meta': {'object_name': 'Country'}, 'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '2'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'meps.countrymep': { 'Meta': {'object_name': 'CountryMEP'}, 'begin': ('django.db.models.fields.DateField', [], {}), 'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.Country']"}), 'end': ('django.db.models.fields.DateField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mep': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.MEP']"}), 'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['reps.Party']"}) }, 'meps.delegation': { 'Meta': {'object_name': 'Delegation'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, 'meps.delegationrole': { 'Meta': {'object_name': 'DelegationRole'}, 'begin': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'delegation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.Delegation']"}), 'end': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mep': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.MEP']"}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'meps.group': { 'Meta': {'object_name': 'Group'}, 'abbreviation': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}) }, 'meps.groupmep': { 'Meta': {'object_name': 'GroupMEP'}, 'begin': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'end': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mep': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['meps.MEP']"}), 'role': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'meps.mep': { 'Meta': {'ordering': "['last_name']", 'object_name': 'MEP', '_ormbases': ['reps.Representative']}, 'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'bxl_building': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bxl_building'", 'to': "orm['meps.Building']"}), 'bxl_fax': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'bxl_floor': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'bxl_office_number': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'bxl_phone1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'bxl_phone2': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'committees': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['meps.Committee']", 'through': "orm['meps.CommitteeRole']", 'symmetrical':
'False'}), 'countries': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['meps.Country']", 'through': "orm['meps.CountryMEP']", 'symmetrical': 'False'}), 'delegations': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['meps.Delegation']", 'through': "orm['meps.Delegati
onRole']", 'symmetrical': 'False'}), 'ep_debates': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'ep_declarations': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'ep_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True'}), 'ep_motions': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'ep_opinions': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'ep_questions': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'ep_reports': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'ep_webpage': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['meps.Group']", 'through': "orm['meps.GroupMEP']", 'symmetrical': 'False'}), 'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['meps.Organization']", 'through': "orm['meps.OrganizationMEP']", 'symmetrical': 'False'}), 'position': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True'}), 'representative_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['reps.Representative']", 'unique': 'True', 'primary_key': 'True'}), 'stg_building': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stg_building'", 'to': "orm['meps.Building']"}), 'stg_fax': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'stg_floor': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'stg_office_number': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'stg_phone1': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'stg_phone2': ('django.
SqueezeStudioAnimation/dpAutoRigSystem
dpAutoRigSystem/Scripts/dpArm.py
Python
gpl-2.0
3,972
0.007301
# importing libraries: import maya.cmds as cmds import maya.mel as mel # global variables to this module: CLASS_NAME = "Arm" TITLE = "m028_arm" DESCRIPTION = "m029_armDesc" ICON = "/Icons/dp_arm.png" def Arm(dpAutoRigInst): """ This function will create all guides needed to compose an arm. """ # check modules integrity: guideDir = 'Modules' checkModuleList = ['dpLimb', 'dpFinger'] checkResultList = dpAutoRigInst.startGuideModules(guideDir, "check", None, checkModuleList=checkModuleList) if len(checkResultList) == 0: # creating module instances: armLimbInstance = dpAutoRigInst.initGuide('dpLimb', guideDir) # change name to arm: dpAutoRigInst.guide.Limb.editUserName(armLimbInstance, checkText=dpAutoRigInst.langDic[dpAutoRigInst.langName]['m028_arm'].capitalize()) # create finger instances: indexFingerInstance = dpAutoRigInst.initGuide('dpFinger', guideDir) dpAutoRigInst.guide.Finger.editUserName(indexFingerInstance, checkText=dpAutoRigInst.langDic[dpAutoRigInst.langName]['m032_index']) middleFingerInstance = dpAutoRigInst.initGuide('dpFinger', guideDir) dpAutoRigInst.guide.Finger.editUserName(middleFingerInstance, checkText=dpAutoRigInst.langDic[dpAutoRigInst.langName]['m033_middle']) ringFingerInstance = dpAutoRigInst.initGuide('dpFinger', guideDir) dpAuto
RigInst.guide.Finger.editUserName(ringFingerInstance, checkText=dpAutoRigInst.langDic[dpAutoRigInst.langName]['m034_ring']) pinkFingerInstance = dpAutoRigInst.initGuide('dpFinge
r', guideDir) dpAutoRigInst.guide.Finger.editUserName(pinkFingerInstance, checkText=dpAutoRigInst.langDic[dpAutoRigInst.langName]['m035_pink']) thumbFingerInstance = dpAutoRigInst.initGuide('dpFinger', guideDir) dpAutoRigInst.guide.Finger.editUserName(thumbFingerInstance, checkText=dpAutoRigInst.langDic[dpAutoRigInst.langName]['m036_thumb']) # edit arm limb guide: armBaseGuide = armLimbInstance.moduleGrp cmds.setAttr(armBaseGuide+".translateX", 2.5) cmds.setAttr(armBaseGuide+".translateY", 16) cmds.setAttr(armBaseGuide+".displayAnnotation", 0) cmds.setAttr(armLimbInstance.cvExtremLoc+".translateZ", 7) cmds.setAttr(armLimbInstance.radiusCtrl+".translateX", 1.5) # edit finger guides: fingerInstanceList = [indexFingerInstance, middleFingerInstance, ringFingerInstance, pinkFingerInstance, thumbFingerInstance] fingerTZList = [0.6, 0.2, -0.2, -0.6, 0.72] for n, fingerInstance in enumerate(fingerInstanceList): cmds.setAttr(fingerInstance.moduleGrp+".translateX", 11) cmds.setAttr(fingerInstance.moduleGrp+".translateY", 16) cmds.setAttr(fingerInstance.moduleGrp+".translateZ", fingerTZList[n]) cmds.setAttr(fingerInstance.moduleGrp+".displayAnnotation", 0) cmds.setAttr(fingerInstance.radiusCtrl+".translateX", 0.3) cmds.setAttr(fingerInstance.annotation+".visibility", 0) if n == len(fingerInstanceList)-1: # correct not commun values for thumb guide: cmds.setAttr(thumbFingerInstance.moduleGrp+".translateX", 10.1) cmds.setAttr(thumbFingerInstance.moduleGrp+".rotateX", 60) dpAutoRigInst.guide.Finger.changeJointNumber(thumbFingerInstance, 2) cmds.setAttr(thumbFingerInstance.moduleGrp+".nJoints", 2) # parent finger guide to the arm wrist guide: cmds.parent(fingerInstance.moduleGrp, armLimbInstance.cvExtremLoc, absolute=True) # select the armGuide_Base: cmds.select(armBaseGuide) else: # error checking modules in the folder: mel.eval('error \"'+ dpAutoRigInst.langDic[dpAutoRigInst.langName]['e001_GuideNotChecked'] +' - '+ (", ").join(checkResultList) +'\";')
madprime/genevieve
genevieve_client/migrations/0004_auto_20160328_1526.py
Python
mit
1,731
0.001733
# -*- coding: utf-8 -*- # Generated by Django 1.9.4 on 2016-03-28 15:26 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('genevieve_client', '0003_variant_myvariant_dbsnp'), ] operations = [ migrations.CreateModel( name='OpenHumansUser', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('access_token', model
s.CharField(blank=True, max_length=30)), ('refresh_token', models.CharField(blank=True, max_length=30)), ('token_expiration', models.DateTimeField(null=True)),
('connected_id', models.CharField(max_length=30, unique=True)), ('openhumans_username', models.CharField(blank=True, max_length=30)), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.RemoveField( model_name='gennoteseditor', name='gennotes_id', ), migrations.RemoveField( model_name='gennoteseditor', name='genome_storage_enabled', ), migrations.AddField( model_name='gennoteseditor', name='connected_id', field=models.CharField(default=0, max_length=30, unique=True), preserve_default=False, ), ]
dr-bigfatnoob/quirk
language/functions.py
Python
unlicense
1,698
0.009423
from __future__ import print_function, division import sys import os sys.path.append(os.path.abspath(".")) sys.dont_write_bytecode = True from distribution import * import operator as o from utils.lib import gt, lt, gte, lte, neq, eq __author__ = "bigfatnoob" def sample(values, size=100): return np.random.choice(values, size=size) def expected_value(values, size=1000): means = [] for _ in range(1000): samples = sample(values, int(size)) means.append(np.mean(samples)) return np.mean(means) def standard_deviation(values): return np.std(values) def percentile(values, percent): return np.percentile(values, percent) def probability(values): return sum([1 if v >= 1 else 0 for v in values]) / len(values) def lambda_ev(*args): return lambda x: expected_value(x, *args) def lambda_std(): return lambda x: standard_deviation(x) def lambda_percentile(*args): return lambda x: percentile(x, *args) def lambda_probability(): return lambda x: probability(x) def to_int(func): return lambda a, b: 1 if func(a, b) else 0 evaluations = { "EV": lambda_ev, "STD": lambda_std, "PERCENTILE": lambda_percentile, "PROBABILITY": lambda_probability } distributions = { "constant": Constant, "normal": Normal,
"normalCI": NormalCI, "uniform": Uniform, "random": Random, "exp": Exponential, "binomial": Binomial, "geometric": Geometric, "triangular": Triangular }
operations = { "+": o.add, "-": o.sub, "*": o.mul, "/": o.div, "|": max, "&": o.mul, ">": to_int(gt), "<": to_int(lt), ">=": to_int(gte), "<=": to_int(lte), "==": to_int(eq), "!=": to_int(neq) }
timohtey/mediadrop_copy
mediacore_env/Lib/site-packages/distribute-0.7.3/setup.py
Python
gpl-3.0
1,879
0.000532
#!/usr/bin/env python """Distutils setup file, used to install or test 'setuptools'""" import textwrap import sys try: import setuptools except ImportError: sys.stderr.write("Distribute 0.7 may only upgrade an existing " "Distribute 0.6 installation")
raise SystemExit(1) long_description = textwrap.dedent(""" Distribute - legacy package This package is a simple c
ompatibility layer that installs Setuptools 0.7+. """).lstrip() setup_params = dict( name="distribute", version='0.7.3', description="distribute legacy wrapper", author="The fellowship of the packaging", author_email="[email protected]", license="PSF or ZPL", long_description=long_description, keywords="CPAN PyPI distutils eggs package management", url="http://packages.python.org/distribute", zip_safe=True, classifiers=textwrap.dedent(""" Development Status :: 5 - Production/Stable Intended Audience :: Developers License :: OSI Approved :: Python Software Foundation License License :: OSI Approved :: Zope Public License Operating System :: OS Independent Programming Language :: Python :: 2.4 Programming Language :: Python :: 2.5 Programming Language :: Python :: 2.6 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.1 Programming Language :: Python :: 3.2 Programming Language :: Python :: 3.3 Topic :: Software Development :: Libraries :: Python Modules Topic :: System :: Archiving :: Packaging Topic :: System :: Systems Administration Topic :: Utilities """).strip().splitlines(), install_requires=[ 'setuptools>=0.7', ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
shazadan/mood-map
tweets/management/commands/stream.py
Python
gpl-2.0
361
0.00554
from django.core.mana
gement.base import BaseCommand, CommandError from tweets.tasks import stream
#The class must be named Command, and subclass BaseCommand class Command(BaseCommand): # Show this when the user types help help = "My twitter stream command" # A command must define handle() def handle(self, *args, **options): stream()
yytang2012/novels-crawler
novelsCrawler/spiders/piaotian.py
Python
mit
1,931
0.000518
#!/usr/bin/env python # coding=utf-8 """ Created on April 15 2017 @author: yytang """ from scrapy import Selector from libs.misc import get_spider_name_from_domain from libs.polish import polish_title, polish_subtitle, polish_content from novelsCrawler.spiders.novelSpider import NovelSpider class PiaotianSpider(NovelSpider): """ classdocs example: https://www.piaotian.com/html/9/9459/index.html """ allowed_domains = ['www.piaotian.com'] name = get_spider_name_from_domain(allowed_domains[0]) # custom_setti
ngs = { # 'DOWNLOAD_DELAY': 0.3, # } def parse_title(self, response): sel = Selector(response) title = sel.xpath('//h1/text()').extract()[0] title = polish_title(title, self.name) return title def parse_episodes(self, response): sel = Selector(response) episodes = [] subtitle_selectors = sel.xpath('//div[@class="centent"]/ul/li/a') for page_id, subtitle_selector in enumerat
e(subtitle_selectors): subtitle_url = subtitle_selector.xpath('@href').extract()[0] subtitle_url = response.urljoin(subtitle_url.strip()) subtitle_name = subtitle_selector.xpath('text()').extract()[0] subtitle_name = polish_subtitle(subtitle_name) episodes.append((page_id, subtitle_name, subtitle_url)) return episodes def parse_content(self, response): # sel = Selector(response) # content = sel.xpath('//div[@id="content"]/p/text()').extract() # content = polish_content(content) html = str(response.body.decode('GBK')) pattern = r'&nbsp;&nbsp;&nbsp;&nbsp;(.*)' import re m = re.search(pattern, html) if m: content = m.group(1) else: content = '' content = content.replace('<br /><br />&nbsp;&nbsp;&nbsp;&nbsp;', '\n\n') return content
signed/intellij-community
python/testData/inspections/PyTupleAssignmentBalanceInspectionTest/unpackNonePy3.py
Python
apache-2.0
65
0.076923
a, b = <warning d
escr="Need more values to unpa
ck">None</warning>
dhoomakethu/apocalypse
apocalypse/server/__init__.py
Python
mit
86
0.011628
""" @author: dhoomakethu """ from __futur
e__ import absolute_import, u
nicode_literals
rokubun/android_rinex
setup.py
Python
bsd-2-clause
547
0.007313
#!/usr/env/bin/ python3 from setuptools import setup, Extension # #CXX_FLAGS = "-O3 -std=gnu
++11 -Wall -Wno-comment" # ## List of C/C++ sources that will conform the library #sources = [ # # "andrnx/clib/android.c", # #] setup(name="andrnx", version="0.1", description="Package to convert from GNSS logger to Rinex files", author='Miquel Garcia', author_ema
il='[email protected]', url='https://www.rokubun.cat', packages=['andrnx'], test_suite="andrnx.test", scripts=['bin/gnsslogger_to_rnx'])
ulikoehler/FreeCAD_drawing_dimensioning
InitGui.py
Python
gpl-3.0
3,832
0.008351
class DrawingDimensioningWorkbench (Workbench): # Icon generated using by converting linearDimension.svg to xpm format using Gimp Icon = ''' /* XPM */ static char * linearDimension_xpm[] = { "32 32 10 1", " c None", ". c #000000", "+ c #0008FF", "@ c #0009FF", "# c #000AFF", "$ c #00023D", "% c #0008F7", "& c #0008EE", "* c #000587", "= c #000001", ". .", ". .", ". .", ". .", ". .", ". .", ". .", ". .", ". .", ". .", ". .", ". .", ". +@@ + .", ". @+@@+ +@@+@ .", ". +@+@@@@@@ @@@@@@@# .", "$%@@@@@@@@@+@@@@@@@@@@@@@@@@@@&$", ". #@@@@@@@@ #+@@@@@@@@*=", ". @+@@+ +@@@@@ .", ". +@ #@++ .", ". # .", ". .", ". .", ". .", ". .", ".
.", ". .", ". .", ". .", ". .", ".
.", ". .", ". ."}; ''' MenuText = 'Drawing Dimensioning' def Initialize(self): import importlib, os from dimensioning import __dir__, debugPrint, iconPath import linearDimension import linearDimension_stack import deleteDimension import circularDimension import grabPointAdd import textAdd import textEdit import textMove import escapeDimensioning import angularDimension import radiusDimension import centerLines import noteCircle import toleranceAdd commandslist = [ 'dd_linearDimension', #where dd is short-hand for drawing dimensioning 'dd_linearDimensionStack', 'dd_circularDimension', 'dd_radiusDimension', 'dd_angularDimension', 'dd_centerLines', 'dd_centerLine', 'dd_noteCircle', 'dd_grabPoint', 'dd_addText', 'dd_editText', 'dd_moveText', 'dd_addTolerance', 'dd_deleteDimension', 'dd_escapeDimensioning', ] self.appendToolbar('Drawing Dimensioning', commandslist) import unfold import unfold_bending_note import unfold_export_to_dxf unfold_cmds = [ 'dd_unfold', 'dd_bendingNote', ] if hasattr(os,'uname') and os.uname()[0] == 'Linux' : #this command only works on Linux systems unfold_cmds.append('dd_exportToDxf') self.appendToolbar( 'Drawing Dimensioning Folding', unfold_cmds ) import weldingSymbols if int( FreeCAD.Version()[1] > 15 ) and int( FreeCAD.Version()[2].split()[0] ) > 5165: weldingCommandList = ['dd_weldingGroupCommand'] else: weldingCommandList = weldingSymbols.weldingCmds self.appendToolbar('Drawing Dimensioning Welding Symbols', weldingCommandList) self.appendToolbar('Drawing Dimensioning Help', [ 'dd_help' ]) FreeCADGui.addIconPath(iconPath) FreeCADGui.addPreferencePage( os.path.join( __dir__, 'Resources', 'ui', 'drawing_dimensioing_prefs-base.ui'),'Drawing Dimensioning' ) Gui.addWorkbench(DrawingDimensioningWorkbench())
gilshwartz/tortoisehg-caja
tortoisehg/hgqt/filelistmodel.py
Python
gpl-2.0
7,732
0.002069
# Copyright (c) 2009-2010 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:[email protected] # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from tortoisehg.util import hglib, patchctx from tortoisehg.hgqt.qtlib import geticon, getoverlaidicon from PyQt4.QtCore import * from PyQt4.QtGui import * nullvariant = QVariant() def getSubrepoIcoDict(): 'Return a dictionary mapping each subrepo type to the corresponding icon' _subrepoType2IcoMap = { 'hg': 'hg', 'git': 'thg-git-subrepo', 'svn': 'thg-svn-subrepo', 'hgsubversion': 'thg-svn-subrepo', 'empty': 'hg' } icOverlay = geticon('thg-subrepo') subrepoIcoDict = {} for
stype in _subrepoType2IcoMap: ic = geticon(_subrepoType2IcoMap[stype]) ic = getoverlaidicon(ic, icOverlay) subrepoIcoDict[stype] = ic return subrepoIcoDict class HgFileListModel(QAbstractTableModel): """ Model used for listing (modi
fied) files of a given Hg revision """ showMessage = pyqtSignal(QString) def __init__(self, parent): QAbstractTableModel.__init__(self, parent) self._boldfont = parent.font() self._boldfont.setBold(True) self._ctx = None self._files = [] self._filesdict = {} self._fulllist = False self._subrepoIcoDict = getSubrepoIcoDict() @pyqtSlot(bool) def toggleFullFileList(self, value): self._fulllist = value self.loadFiles() self.layoutChanged.emit() def __len__(self): return len(self._files) def rowCount(self, parent=None): return len(self) def columnCount(self, parent=None): return 1 def file(self, row): return self._files[row]['path'] def setContext(self, ctx): reload = False if not self._ctx: reload = True elif self._ctx.rev() is None: reload = True elif ctx.thgid() != self._ctx.thgid(): reload = True if reload: self._ctx = ctx self.loadFiles() self.layoutChanged.emit() def fileFromIndex(self, index): if not index.isValid() or index.row()>=len(self) or not self._ctx: return None row = index.row() return self._files[row]['path'] def dataFromIndex(self, index): if not index.isValid() or index.row()>=len(self) or not self._ctx: return None row = index.row() return self._files[row] def indexFromFile(self, filename): if filename in self._filesdict: row = self._files.index(self._filesdict[filename]) return self.index(row, 0) return QModelIndex() def _buildDesc(self, parent): files = [] ctxfiles = self._ctx.files() modified, added, removed = self._ctx.changesToParent(parent) ismerge = bool(self._ctx.p2()) # Add the list of modified subrepos to the top of the list if not isinstance(self._ctx, patchctx.patchctx): if ".hgsubstate" in ctxfiles or ".hgsub" in ctxfiles: from mercurial import subrepo # Add the list of modified subrepos for s, sd in self._ctx.substate.items(): srev = self._ctx.substate.get(s, subrepo.nullstate)[1] stype = self._ctx.substate.get(s, subrepo.nullstate)[2] sp1rev = self._ctx.p1().substate.get(s, subrepo.nullstate)[1] sp2rev = '' if ismerge: sp2rev = self._ctx.p2().substate.get(s, subrepo.nullstate)[1] if srev != sp1rev or (sp2rev != '' and srev != sp2rev): wasmerged = ismerge and s in ctxfiles files.append({'path': s, 'status': 'S', 'parent': parent, 'wasmerged': wasmerged, 'stype': stype}) # Add the list of missing subrepos subreposet = set(self._ctx.substate.keys()) subrepoparent1set = set(self._ctx.p1().substate.keys()) missingsubreposet = subrepoparent1set.difference(subreposet) for s in missingsubreposet: wasmerged = ismerge and s in ctxfiles stype = self._ctx.p1().substate.get(s, subrepo.nullstate)[2] files.append({'path': s, 'status': 'S', 'parent': parent, 'wasmerged': wasmerged, 'stype': stype}) if self._fulllist and ismerge: func = lambda x: True else: func = lambda x: x in ctxfiles for lst, flag in ((added, 'A'), (modified, 'M'), (removed, 'R')): for f in filter(func, lst): wasmerged = ismerge and f in ctxfiles f = self._ctx.removeStandin(f) files.append({'path': f, 'status': flag, 'parent': parent, 'wasmerged': wasmerged}) return files def loadFiles(self): self._files = [] try: self._files = self._buildDesc(0) if bool(self._ctx.p2()): _paths = [x['path'] for x in self._files] _files = self._buildDesc(1) self._files += [x for x in _files if x['path'] not in _paths] except EnvironmentError, e: self.showMessage.emit(hglib.tounicode(str(e))) self._filesdict = dict([(f['path'], f) for f in self._files]) def data(self, index, role): if not index.isValid() or index.row()>len(self) or not self._ctx: return nullvariant if index.column() != 0: return nullvariant row = index.row() column = index.column() current_file_desc = self._files[row] current_file = current_file_desc['path'] if role in (Qt.DisplayRole, Qt.ToolTipRole): return QVariant(hglib.tounicode(current_file)) elif role == Qt.DecorationRole: if self._fulllist and bool(self._ctx.p2()): if current_file_desc['wasmerged']: icn = geticon('thg-file-merged') elif current_file_desc['parent'] == 0: icn = geticon('thg-file-p0') elif current_file_desc['parent'] == 1: icn = geticon('thg-file-p1') return QVariant(icn.pixmap(20,20)) elif current_file_desc['status'] == 'A': return QVariant(geticon('fileadd')) elif current_file_desc['status'] == 'R': return QVariant(geticon('filedelete')) elif current_file_desc['status'] == 'S': stype = current_file_desc.get('stype', 'hg') return QVariant(self._subrepoIcoDict[stype]) #else: # return QVariant(geticon('filemodify')) elif role == Qt.FontRole: if current_file_desc['wasmerged']: return QVariant(self._boldfont) else: return nullvariant
pyblish/pyblish-mindbender
mindbender/maya/pythonpath/userSetup.py
Python
mit
488
0
"""Maya initialis
ation for Mindbender pipeline""" from maya import cmds def setup(): assert __import__("pyblish_maya").is_setup(), ( "pyblish-mindbender depends on pyblish_maya which has not " "yet been setup. Run pyblish_maya.setup()") from pyblish import api api.register_gui("pyblish_lite") from mindbender
import api, maya api.install(maya) # Allow time for dependencies (e.g. pyblish-maya) # to be installed first. cmds.evalDeferred(setup)
balancehero/kinsumer
kinsumer/checkpointer.py
Python
mit
1,882
0.000531
""":mod:`kinsumer.checkpointer` --- Persisting positions for Kinesis shards ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """ import abc import json import os.path from typing import Optional, Dict class Checkpointer(abc.ABC, object): """Checkpointer is the interface for persisting positions for Kinesis shards """ @abc.abstractmethod def get_checkpoints(self) -> Dict[str, str]: """Get a dictionary whose keys are all the shard ids we are aware of, and whose values are the sequence id of the last record processed for its shard """ @abc.abstractmethod def checkpoint(self, shard_id: str, sequence: str) -> None: """Persist the sequence number for a given shard""" @abc.abstractmethod def get_checkpoint(self, shard_id: str) -> Optional[str]: """Get the sequence number of the last successfully processed record""" class InMemoryCheckpointer(Checkpointer): def __init__(self) -> None: self._checkpoints = {} def get_checkpoints(self) -> Dict[str, str]: return self._checkpoints.copy() def checkpoint(self, shard_id: str, sequence: str) -> None: self._checkpoints[shard_i
d] = sequence def get_checkpoint(self, shard_id: str) -> Optional[str]: return self._checkpoints.get(shard_id) class FileCheckpointer(InMemoryCheckpointer): def __init__(self, file: str) -> None:
super().__init__() self.file = os.path.expanduser(file) if os.path.exists(self.file): with open(self.file, 'rb') as f: self._checkpoints = json.load(f) def checkpoint(self, shard_id: str, sequence: str) -> None: super().checkpoint(shard_id, sequence) with open(self.file, 'wb') as f: f.write(json.dumps(self._checkpoints, ensure_ascii=False).encode())
gribvirus74/Bee-per
Error.py
Python
gpl-3.0
534
0.035581
class ParametrizedError(Exception): def __i
nit__(self, problem, invalid): self.problem = str(problem) self.invalid = str(invalid) def __str__(self): print('--- Error: {0}\n--- Caused by: {1}'.format(self.problem, self.invalid)) class InvalidToken(ParametrizedError): pass class ToneError(ParametrizedError): pass class IntervalError(ParametrizedError): pass class TriolaError(ParametrizedError): pass class ConfigError(ParametrizedError): pass class ComposingError(Param
etrizedError): pass
F5Networks/f5-common-python
f5/bigip/tm/cm/test/functional/test_failover_status.py
Python
apache-2.0
1,104
0.000906
# Copyright 2016 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,
software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class TestFailoverStatus(object): def test_get_status(self, request, mgmt_root): failover_status = mgmt_root.tm.cm.failover_status assert failover_status._meta_data['uri'].endswith( "/mgmt/tm/c
m/failover-status/") failover_status.refresh() des =\ (failover_status.entries['https://localhost/mgmt/tm/cm/failover-status/0'] ['nestedStats'] ['entries'] ['status'] ['description']) assert des == "ACTIVE"
OpenMined/PySyft
tests/integration/smpc/tensor/share_tensor_test.py
Python
apache-2.0
975
0
# third party # third party import numpy as np import pytest # syft absolute # absolute from syft.core.tensor.smpc.share_tensor import ShareTensor @pytest.mark.smpc def test_bit_extraction() -> None: share = ShareTensor(rank=0, parties_info=[], ring_size=2**32) data = np
.array([[21, 32], [-54, 89]], dtype=np.int32) share.child = data exp_res1 = np.array([[False, False], [True, False]], dtype=np.bool_) res = share.bit_extraction(31).child assert (res == exp_res1).all() exp_res2 = np.array([[True, False], [False, False]], dtype=np.bool_)
res = share.bit_extraction(2).child assert (res == exp_res2).all() @pytest.mark.smpc def test_bit_extraction_exception() -> None: share = ShareTensor(rank=0, parties_info=[], ring_size=2**32) data = np.array([[21, 32], [-54, 89]], dtype=np.int32) share.child = data with pytest.raises(Exception): share >> 33 with pytest.raises(Exception): share >> -1
nsalomonis/BAM-to-Junction-BED
multiBAMtoBED.py
Python
apache-2.0
19,040
0.024685
### hierarchical_clustering.py #Author Nathan Salomonis - [email protected] #Permission is hereby granted, free of charge, to any person obtaining a copy #of this software and associated documentation files (the "Software"), to deal #in the Software without restriction, including without limitation the rights #to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #copies of the Software, and to permit persons to whom the Software is furnished #to do so, subject to the following conditions: #THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, #INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A #PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT #HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION #OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE #SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ Batch script for extracting many junction.bed and building exon.bed files from an input set of BAM files in a directory. Requires a reference text file containing exon regions (currently provided from AltAnalyze - see ReferenceExonCoordinates folder). Can produce only junction.bed files, only a combined exon reference or only exon.bed files optionally. Can run using a single processor or multiple simultaneous processes (--m flag).""" import export import string import time import sys, os import shutil import unique import subprocess import BAMtoJunctionBED import BAMtoExonBED import getopt import traceback ################# General data import methods ################# def filepath(filename): fn = unique.filepath(filename) return fn def cleanUpLine(line): data = string.replace(line,'\n','') data = string.replace(data,'\c','') data = string.replace(data,'\r','') data = string.replace(data,'"','') return data def getFolders(sub_dir): dir_list = unique.read_directory(sub_dir); dir_list2 = [] ###Only get folder names for entry in dir_list: if '.' not in entry: dir_list2.append(entry) return dir_list2 def getFiles(sub_dir): dir_list = unique.read_directory(sub_dir); dir_list2 = [] ###Only get folder names for entry in dir_list: if '.' in entry: dir_list2.append(entry) return dir_list2 def getFolders(sub_dir): dir_list = unique.read_directory(sub_dir); dir_list2 = [] ###Only get folder names for entry in dir_list: if '.' not in entry: dir_list2.append(entry) return dir_list2 def parallelBAMProcessing(directory,refExonCoordinateFile,bed_reference_dir,analysisType=[],useMultiProcessing=False,MLP=None,root=None): paths_to_run=[] errors=[] if '.bam' in directory: ### Allow a single BAM file to be specifically analyzed (e.g., bsub operation) bam_file = directory bam_file = string.replace(directory,'\\','/') directory = string.join(string.split(directory,'/')[:-1],'/') else: bam_file = None outputExonCoordinateRefBEDfile = str(bed_reference_dir) bed_reference_dir = string.replace(bed_reference_dir,'\\','/') ### Check if the BAM files are located in the target folder (not in subdirectories) files = getFiles(directory) for file in files: if '.bam' in file and '.bai' not in file: source_file = directory+'/'+file source_file = filepath(source_file) output_filename = string.replace(file,'.bam','') output_filename = string.replace(output_filename,'=','_') destination_file = directory+'/'+output_filename+'__exon.bed' destination_file = filepath(destination_file) paths_to_run.append((source_file,refExonCoordinateFile,bed_reference_dir,destination_file)) ### Otherwise, check subdirectories for BAM files folders = getFolders(directory) if len(paths_to_run)==0: for top_level in folders: try: files = getFiles(directory+'/'+top_level) for file in files: if '.bam' in file and '.bai' not in file: source_file = directory+'/'+file source_file = filepath(source_file) destination_file = directory+'/'+top_level+'__exon.bed' destination_file = filepath(destination_file) paths_to_run.append((source_file,refExonCoordinateFile,bed_reference_d
ir,destination_file)) except Exception: pass ### If a single BAM file is indicated
if bam_file != None: output_filename = string.replace(bam_file,'.bam','') output_filename = string.replace(output_filename,'=','_') destination_file = output_filename+'__exon.bed' paths_to_run = [(bam_file,refExonCoordinateFile,bed_reference_dir,destination_file)] if 'reference' in analysisType and len(analysisType)==1: augmentExonReferences(directory,refExonCoordinateFile,outputExonCoordinateRefBEDfile) sys.exit() if useMultiProcessing: pool_size = MLP.cpu_count() if len(paths_to_run)<pool_size: pool_size = len(paths_to_run) print 'Using %d processes' % pool_size if len(paths_to_run) > pool_size: pool_size = len(paths_to_run) if len(analysisType) == 0 or 'junction' in analysisType: print 'Extracting junction alignments from BAM files...', pool = MLP.Pool(processes=pool_size) try: results = pool.map(runBAMtoJunctionBED, paths_to_run) ### worker jobs initiated in tandem except ValueError: print_out = '\WARNING!!! No Index found for the BAM files (.bam.bai). Sort and Index using Samtools prior to loading in AltAnalyze' print traceback.format_exc() if root!=None: import UI UI.WarningWindow(print_out,'Exit');sys.exit() try:pool.close(); pool.join(); pool = None except Exception: pass print_out=None for sample,missing in results: if len(missing)>1: print_out = '\nWarning!!! %s chromosomes not found in: %s (PySam platform-specific error)' % (string.join(missing,', '),sample) if root!=None and print_out!=None: try: import UI UI.WarningWindow(print_out,'Continue') except Exception: pass print len(paths_to_run), 'BAM files','processed' if len(analysisType) == 0 or 'reference' in analysisType: #print 'Building exon reference coordinates from Ensembl/UCSC and all junctions...', augmentExonReferences(directory,refExonCoordinateFile,outputExonCoordinateRefBEDfile) #print 'completed' print 'Extracting exon alignments from BAM files...', if len(analysisType) == 0 or 'exon' in analysisType: pool = MLP.Pool(processes=pool_size) results = pool.map(runBAMtoExonBED, paths_to_run) ### worker jobs initiated in tandem try:pool.close(); pool.join(); pool = None except Exception: pass print len(paths_to_run), 'BAM files','processed' else: if len(analysisType) == 0 or 'junction' in analysisType: for i in paths_to_run: runBAMtoJunctionBED(i) if len(analysisType) == 0 or 'reference' in analysisType: augmentExonReferences(directory,refExonCoordinateFile,outputExonCoordinateRefBEDfile) if len(analysisType) == 0 or 'exon' in analysisType: for i in paths_to_run: runBAMtoExonBED(i) def runBAMtoJunctionBED(paths_to_run): bamfile_dir,refExonCoordinateFile,bed_reference_dir,output_bedfile_path = paths_to_run output_bedfile_path = string.replace(bamfile_dir,'.bam','__junction.bed') #if os.path.exists(output_bedfile_path) == False: ### Onl
kagenZhao/cnBeta
CnbetaApi/CnbetaApis/views.py
Python
mit
3,183
0.002513
#!/usr/bin/env python3 from django.shortcuts import render # Create your views here. from CnbetaApis.datas.Models import * from CnbetaApis.datas.get_letv_json import get_letv_json from CnbetaApis.datas.get_youku_json import get_youku_json from django.views.decorators.csrf import csrf_exempt from django.http import * from datetime import timezone, timedelta import json def getrelate(ids, session): relateds = session.query(Article).filter(Article.id.in_(ids)) relateds_arr = [] for related in relateds: relateds_arr.append({ 'id': related.id, 'title': related.title, 'url': related.url, }) return relateds_arr def get_home_data(request): if not request.method == 'GET': raise HttpResponseNotAllowed('GET') lastID = request.GET.get('lastid') limit = request
.GET.get('limit') or 20 session = DBSession() datas = None if lastID: datas = session.query(Article).order_by(desc(
Article.id)).filter(and_(Article.introduction != None, Article.id < lastID)).limit(limit).all() else: datas = session.query(Article).order_by(desc(Article.id)).limit(limit).all() values = [] for data in datas: values.append({ 'id': data.id, 'title': data.title, 'url': data.url, 'source': data.source, 'imgUrl': data.imgUrl, 'introduction': data.introduction, 'createTime': data.createTime.replace(tzinfo=timezone(timedelta(hours=8))).astimezone(timezone.utc).timestamp(), 'related': getrelate(data.related.split(','), session), 'readCount': data.readCount, 'opinionCount': data.opinionCount, }) session.close() return JsonResponse({"result": values}) def get_article_content(request): if not request.method == 'GET': raise HttpResponseNotAllowed('GET') article_id = request.GET.get('id') session = DBSession() datas = session.query(Article).filter(Article.id == article_id).all() if not len(datas): raise Http404('Article not exist') data = datas[0] result = {'result': { 'id': data.id, 'title': data.title, 'url': data.url, 'imgUrl': data.imgUrl, 'source': data.source, 'introduction': data.introduction, 'createTime': data.createTime.replace(tzinfo=timezone(timedelta(hours=8))).astimezone(timezone.utc).timestamp(), 'related': getrelate(data.related.split(','), session), 'readCount': data.readCount, 'opinionCount': data.opinionCount, 'content': json.loads(data.content), }} session.close() return JsonResponse(result) @csrf_exempt def get_video_realUrl(req): if not req.method == 'POST': raise HttpResponseNotAllowed('POST') source_url = req.POST.get('url') source_type = req.POST.get('type') if source_type == "youku": source_url = get_youku_json(source_url) elif source_type == "letv": source_url = get_letv_json(source_url) else: raise Http404('Article not exist') return JsonResponse({"result": source_url})
fbradyirl/home-assistant
tests/components/google/test_calendar.py
Python
apache-2.0
11,260
0.000977
"""The tests for the google calendar platform.""" import copy from unittest.mock import Mock, patch import httplib2 import pytest from homeassistant.components.google import ( CONF_CAL_ID, CONF_CLIENT_ID, CONF_CLIENT_SECRET, CONF_DEVICE_ID, CONF_ENTITIES, CONF_NAME, CONF_TRACK, DEVICE_SCHEMA, SERVICE_SCAN_CALENDARS, do_setup, ) from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.helpers.template import DATE_STR_FORMAT from homeassistant.setup import async_setup_component from homeassistant.util import slugify import homeassistant.util.dt as dt_util from tests.common import async_mock_service GOOGLE_CONFIG = {CONF_CLIENT_ID: "client_id", CONF_CLIENT_SECRET: "client_secret"} TEST_ENTITY = "calendar.we_are_we_are_a_test_calendar" TEST_ENTITY_NAME = "We are, we are, a... Test Calendar" TEST_EVENT = { "summary": "Test All Day Event", "start": {}, "end": {}, "location": "Test Cases", "description": "test event", "kind": "calendar#event", "created": "2016-06-23T16:37:57.000Z", "transparency": "transparent", "updated": "2016-06-24T01:57:21.045Z", "reminders": {"useDefault": True}, "organizer": { "email": "[email protected]", "displayName": "Organizer Name", "self": True, }, "sequence": 0, "creator": { "email": "[email protected]", "displayName": "Organizer Name", "self": True, }, "id": "_c8rinwq863h45qnucyoi43ny8", "etag": '"2933466882090000"', "htmlLink": "https://www.google.com/calendar/event?eid=*******", "iCalUID": "[email protected]", "status": "confirmed", } def get_calendar_info(calendar): """Convert data from Google into DEVICE_SCHEMA.""" calendar_info = DEVICE_SCHEMA( { CONF_CAL_ID: calendar["id"], CONF_ENTITIES: [ { CONF_TRACK: calendar["track"], CONF_NAME: calendar["summary"], CONF_DEVICE_ID: slugify(calendar["summary"]), } ], } ) return calendar_info @pytest.fixture(autouse=True) def mock_google_setup(hass, test_calendar): """Mock the google set up functions.""" hass.loop.run_until_complete(async_setup_component(hass, "group", {"group": {}})) calendar = get_calendar_info(test_calendar) calendars = {calendar[CONF_CAL_ID]: calendar} patch_google_auth = patch( "homeassistant.components.google.do_authentication", side_effect=do_setup ) patch_google_load = patch( "homeassistant.components.google.load_config", return_value=calendars ) patch_google_services = patch("homeassistant.components.google.setup_services") async_mock_service(hass, "google", SERVICE_SCAN_CALENDARS) with patch_google_auth, patch_google_load, patch_google_services: yield @pytest.fixture(autouse=True) def mock_http(hass): """Mock the http component.""" hass.http = Mock() @pytest.fixture(autouse=True) def set_time_zone(): """Set the time zone for the tests.""" # Set our timezone to CST/Regina so we can check calculations # This keeps UTC-6 all year round dt_util.set_default_time_zone(dt_util.get_time_zone("America/Regina")) yield dt_util.set_default_time_zone(dt_util.get_time_zone("UTC")) @pytest.fixture(name="google_service") def mock_google_service(): """Mock google service.""" patch_google_service = patch( "homeassistant.components.google.calendar.GoogleCalendarService" ) with patch_google_service as mock_service: yield mock_service async def test_all_day_event(hass, mock_next_event): """Test that we can create an event trigger on device.""" week_from_today = dt_util.dt.date.today() + dt_util.dt.timedelta(days=7) end_event = week_from_today + dt_util.dt.timedelta(days=1) event = copy.deepcopy(TEST_EVENT) start = week_from_today.isoformat() end = end_event.isoformat() event["start"]["date"] = start event["end"]["date"] = end mock_next_event.return_value.event = event assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG}) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY) assert state.name == TEST_ENTITY_NAME assert state.state == STATE_OFF assert dict(state.attributes) == { "friendly_name": TEST_ENTITY_NAME, "message": event["summary"], "all_day": True, "offset_reached": False, "start_time": week_from_today.strftime(DATE_STR_FORMAT), "end_time": end_event.strftime(DATE_STR_FORMAT), "location": event["location"], "description": event["description"], } async def test_future_event(hass, mock_next_event): """Test that we can create an event trigger on device.""" one_hour_from_now = dt_util.now() + dt_util.dt.timedelta(minutes=30) end_event = one_hour_from_now + dt_util.dt.timedelta(minutes=60) start = one_hour_from_now.isoformat() end = end_event.isoformat() event = copy.deepcopy(TEST_EVENT) event["start"]["dateTime"] = start event["end"]["dateTime"] = end mock_next_event.return_value.event = event assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG}) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY) assert state.name == TEST_ENTITY_NAME assert state.state == STATE_OFF assert dict(state.attributes) == { "friendly_name": TEST_ENTITY_NAME, "message": event["summary"], "all_day": False, "offset_reached": False, "start_time": one_hour_from_now.strftime(DATE_STR_FORMAT), "end_time": end_event.strftime(DATE_STR_FORMAT), "location": event["location"], "description": event["description"], } async def test_in_progress_event(hass, mock_next_event): """Test that we can create an event trigger on device.""" middle_of_event = dt_util.now() - dt_util.dt.timedelta(minutes=30) end_event = middle
_of_event + dt_util.dt.timedelta(minutes=60) start = middle_of_event.isoformat() end = end_event.isoformat() event = copy.deepcopy(TEST_EVENT) event["start"]["dateTime"] = start event["end"]["dateTime"] = end mock_next_event.return_value.event = event assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG
}) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY) assert state.name == TEST_ENTITY_NAME assert state.state == STATE_ON assert dict(state.attributes) == { "friendly_name": TEST_ENTITY_NAME, "message": event["summary"], "all_day": False, "offset_reached": False, "start_time": middle_of_event.strftime(DATE_STR_FORMAT), "end_time": end_event.strftime(DATE_STR_FORMAT), "location": event["location"], "description": event["description"], } async def test_offset_in_progress_event(hass, mock_next_event): """Test that we can create an event trigger on device.""" middle_of_event = dt_util.now() + dt_util.dt.timedelta(minutes=14) end_event = middle_of_event + dt_util.dt.timedelta(minutes=60) start = middle_of_event.isoformat() end = end_event.isoformat() event_summary = "Test Event in Progress" event = copy.deepcopy(TEST_EVENT) event["start"]["dateTime"] = start event["end"]["dateTime"] = end event["summary"] = "{} !!-15".format(event_summary) mock_next_event.return_value.event = event assert await async_setup_component(hass, "google", {"google": GOOGLE_CONFIG}) await hass.async_block_till_done() state = hass.states.get(TEST_ENTITY) assert state.name == TEST_ENTITY_NAME assert state.state == STATE_OFF assert dict(state.attributes) == { "friendly_name": TEST_ENTITY_NAME, "message": event_summary, "all_day": False, "offset_reached": True, "start_time": middle_of_event.strftime(DATE_STR_FORMAT), "
capoe/espressopp.soap
src/Int3D.py
Python
gpl-3.0
3,721
0.017468
# Copyright (C) 2012,2013 # Max Planck Institute for Polymer Research # Copyright (C) 2008,2009,2010,2011 # Max-Planck-Institute for Polymer Research & Fraunhofer SCAI # # This file is part of ESPResSo++. # # ESPResSo++ is free software: you can redistr
ibute it and/or modify # it under th
e terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo++ is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. r""" ******************** **espressopp.Int3D** ******************** .. function:: espressopp.__Int3D(\*args) :param \*args: :type \*args: .. function:: espressopp.__Int3D.x(v, [0) :param v: :param [0: :type v: :type [0: :rtype: .. function:: espressopp.__Int3D.y(v, [1) :param v: :param [1: :type v: :type [1: :rtype: .. function:: espressopp.__Int3D.z(v, [2) :param v: :param [2: :type v: :type [2: :rtype: .. function:: espressopp.toInt3DFromVector(\*args) :param \*args: :type \*args: .. function:: espressopp.toInt3D(\*args) :param \*args: :type \*args: """ from _espressopp import Int3D from espressopp import esutil # This injects additional methods into the Int3D class and pulls it # into this module class __Int3D(Int3D) : __metaclass__ = esutil.ExtendBaseClass __originit = Int3D.__init__ def __init__(self, *args): if len(args) == 0: x = y = z = 0.0 elif len(args) == 1: arg0 = args[0] if isinstance(arg0, Int3D): x = arg0.x y = arg0.y z = arg0.z # test whether the argument is iterable and has 3 elements elif hasattr(arg0, '__iter__') and len(arg0) == 3: x, y, z = arg0 elif isinstance(arg0, int): x = y = z = arg0 else : raise TypeError("Cannot initialize Int3D from %s" % (args)) elif len(args) == 3 : x, y, z = args else : raise TypeError("Cannot initialize Int3D from %s" % (args)) return self.__originit(x, y, z) # create setters and getters @property def x(self): return self[0] @x.setter def x(self, v): self[0] = v @property def y(self) : return self[1] @y.setter def y(self, v) : self[1] = v @property def z(self) : return self[2] @z.setter def z(self, v) : self[2] = v # string conversion def __str__(self) : return str((self[0], self[1], self[2])) def __repr__(self) : return 'Int3D' + str(self) def toInt3DFromVector(*args): """Try to convert the arguments to a Int3D. This function will only convert to a Int3D if x, y and z are specified.""" if len(args) == 1: arg0 = args[0] if isinstance(arg0, Int3D): return arg0 elif hasattr(arg0, '__iter__') and len(arg0) == 3: return Int3D(*args) elif len(args) == 3: return Int3D(*args) raise TypeError("Specify x, y and z.") def toInt3D(*args): """Try to convert the arguments to a Int3D, returns the argument, if it is already a Int3D.""" if len(args) == 1 and isinstance(args[0], Int3D): return args[0] else: return Int3D(*args)
areeda/gwpy
gwpy/plot/tests/test_segments.py
Python
gpl-3.0
5,755
0
# -*- coding: utf-8 -*- # Copyright (C) Duncan Macleod (2018-2020) # # This file is part of GWpy. # # GWpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # GWpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for mo
re details. # # You should have received a copy of the GNU General Public License # along with GWpy. If not, see <http://www.gnu.org/licenses/>. """Tests for `gwpy.plot.segments` """ import pytest import numpy from matplotlib import rcParams from matplotlib.colo
rs import ColorConverter from matplotlib.collections import PatchCollection from ...segments import (Segment, SegmentList, SegmentListDict, DataQualityFlag, DataQualityDict) from ...time import to_gps from .. import SegmentAxes from ..segments import SegmentRectangle from .test_axes import TestAxes as _TestAxes # extract color cycle COLOR_CONVERTER = ColorConverter() COLOR_CYCLE = rcParams['axes.prop_cycle'].by_key()['color'] COLOR0 = COLOR_CONVERTER.to_rgba(COLOR_CYCLE[0]) class TestSegmentAxes(_TestAxes): AXES_CLASS = SegmentAxes @staticmethod @pytest.fixture() def segments(): return SegmentList([Segment(0, 3), Segment(6, 7)]) @staticmethod @pytest.fixture() def flag(): known = SegmentList([Segment(0, 3), Segment(6, 7)]) active = SegmentList([Segment(1, 2), Segment(3, 4), Segment(5, 7)]) return DataQualityFlag(name='Test segments', known=known, active=active) def test_plot_flag(self, ax, flag): c = ax.plot_flag(flag) assert c.get_label() == flag.texname assert len(ax.collections) == 2 assert ax.collections[0] is c flag.isgood = False c = ax.plot_flag(flag) assert tuple(c.get_facecolors()[0]) == (1., 0., 0., 1.) c = ax.plot_flag(flag, known={'facecolor': 'black'}) c = ax.plot_flag(flag, known='fancy') def test_plot_dqflag(self, ax, flag): with pytest.deprecated_call(): ax.plot_dqflag(flag) assert ax.collections # make sure it plotted something def test_plot_dict(self, ax, flag): dqd = DataQualityDict() dqd['a'] = flag dqd['b'] = flag colls = ax.plot_dict(dqd) assert len(colls) == len(dqd) assert all(isinstance(c, PatchCollection) for c in colls) assert colls[0].get_label() == 'a' assert colls[1].get_label() == 'b' colls = ax.plot_dict(dqd, label='name') assert colls[0].get_label() == 'Test segments' colls = ax.plot_dict(dqd, label='anything') assert colls[0].get_label() == 'anything' def test_plot_dqdict(self, ax, flag): with pytest.deprecated_call(): ax.plot_dqdict(DataQualityDict(a=flag)) def test_plot_segmentlist(self, ax, segments): c = ax.plot_segmentlist(segments) assert isinstance(c, PatchCollection) assert numpy.isclose(ax.dataLim.x0, 0.) assert numpy.isclose(ax.dataLim.x1, 7.) assert len(c.get_paths()) == len(segments) assert ax.get_epoch() == segments[0][0] # test y p = ax.plot_segmentlist(segments).get_paths()[0].get_extents() assert p.y0 + p.height/2. == 1. p = ax.plot_segmentlist(segments, y=8).get_paths()[0].get_extents() assert p.y0 + p.height/2. == 8. # test kwargs c = ax.plot_segmentlist(segments, label='My segments', rasterized=True) assert c.get_label() == 'My segments' assert c.get_rasterized() is True # test collection=False c = ax.plot_segmentlist(segments, collection=False, label='test') assert isinstance(c, list) assert not isinstance(c, PatchCollection) assert c[0].get_label() == 'test' assert c[1].get_label() == '' assert len(ax.patches) == len(segments) # test empty c = ax.plot_segmentlist(type(segments)()) def test_plot_segmentlistdict(self, ax, segments): sld = SegmentListDict() sld['TEST'] = segments ax.plot(sld) def test_plot(self, ax, segments, flag): dqd = DataQualityDict(a=flag) ax.plot(segments) ax.plot(flag) ax.plot(dqd) ax.plot(flag, segments, dqd) def test_insetlabels(self, ax, segments): ax.plot(segments) ax.set_insetlabels(True) def test_fmt_data(self, ax): # just check that the LIGOTimeGPS repr is in place value = 1234567890.123 assert ax.format_xdata(value) == str(to_gps(value)) # -- disable tests from upstream def test_imshow(self): return NotImplemented def test_segmentrectangle(): patch = SegmentRectangle((1.1, 2.4), 10) assert patch.get_xy(), (1.1, 9.6) assert numpy.isclose(patch.get_height(), 0.8) assert numpy.isclose(patch.get_width(), 1.3) assert patch.get_facecolor() == COLOR0 # check kwarg passing patch = SegmentRectangle((1.1, 2.4), 10, facecolor='red') assert patch.get_facecolor() == COLOR_CONVERTER.to_rgba('red') # check valign patch = SegmentRectangle((1.1, 2.4), 10, valign='top') assert patch.get_xy() == (1.1, 9.2) patch = SegmentRectangle((1.1, 2.4), 10, valign='bottom') assert patch.get_xy() == (1.1, 10.0) with pytest.raises(ValueError): patch = SegmentRectangle((0, 1), 0, valign='blah')
kumar303/rockit
vendor-local/boto/mturk/qualification.py
Python
bsd-3-clause
6,761
0.005177
# Copyright (c) 2008 Chris Moyer http://coredumped.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. class Qualifications: def __init__(self, requirements=None): if requirements == None: requirements = [] self.requirements = requirements def add(self, req): self.requirements.append(req) def get_as_params(self): params = {} assert(len(self.requirements) <= 10) for n, req in enumerate(self.requirements): reqparams = req.get_as_params() for rp in reqparams: params['QualificationRequirement.%s.%s' % ((n+1),rp) ] = reqparams[rp] return params class Requirement(object): """ Representation of a single requirement """ def __init__(self, qualification_type_id, comparator, integer_value=None, required_to_preview=False): self.qualification_type_id = qualification_type_id self.comparator = comparator self.integer_value = integer_value self.required_to_preview = required_to_preview def get_as_params(self): params = { "QualificationTypeId": self.qualification_type_id, "Comparator": self.comparator, } if self.comparator != 'Exists' and self.integer_value is not None: params['IntegerValue'] = self.integer_value if self.required_to_preview: params['RequiredToPreview'] = "true" return params class PercentAssignmentsSubmittedRequirement(Requirement): """ The percentage of assignments the Worker has submitted, over all assignments the Worker has accepted. The value is an integer between 0 and 100. """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="00000000000000000000", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview) class PercentAssignmentsAbandonedRequirement(Requirement): """ The percentage of assignments the Worker has abandoned (allowed the deadline to elapse), over all assignments the Worker has
accepted. The value is an integer between 0 and 100. """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="00000000000000000070", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview) class PercentAssignmentsReturne
dRequirement(Requirement): """ The percentage of assignments the Worker has returned, over all assignments the Worker has accepted. The value is an integer between 0 and 100. """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="000000000000000000E0", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview) class PercentAssignmentsApprovedRequirement(Requirement): """ The percentage of assignments the Worker has submitted that were subsequently approved by the Requester, over all assignments the Worker has submitted. The value is an integer between 0 and 100. """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="000000000000000000L0", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview) class PercentAssignmentsRejectedRequirement(Requirement): """ The percentage of assignments the Worker has submitted that were subsequently rejected by the Requester, over all assignments the Worker has submitted. The value is an integer between 0 and 100. """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="000000000000000000S0", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview) class NumberHitsApprovedRequirement(Requirement): """ Specifies the total number of HITs submitted by a Worker that have been approved. The value is an integer greater than or equal to 0. """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="00000000000000000040", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview) class LocaleRequirement(Requirement): """ A Qualification requirement based on the Worker's location. The Worker's location is specified by the Worker to Mechanical Turk when the Worker creates his account. """ def __init__(self, comparator, locale, required_to_preview=False): Requirement.__init__(self, qualification_type_id="00000000000000000071", comparator=comparator, integer_value=None, required_to_preview=required_to_preview) self.locale = locale def get_as_params(self): params = { "QualificationTypeId": self.qualification_type_id, "Comparator": self.comparator, 'LocaleValue.Country': self.locale, } if self.required_to_preview: params['RequiredToPreview'] = "true" return params class AdultRequirement(Requirement): """ Requires workers to acknowledge that they are over 18 and that they agree to work on potentially offensive content. The value type is boolean, 1 (required), 0 (not required, the default). """ def __init__(self, comparator, integer_value, required_to_preview=False): Requirement.__init__(self, qualification_type_id="00000000000000000060", comparator=comparator, integer_value=integer_value, required_to_preview=required_to_preview)
ammzen/SolveLeetCode
9PalindromeNumber.py
Python
mit
727
0.008253
# Determine whether an integer is a palindrome. Do this without extra space. class Solution: # @return a boolean def isPalindrome1(self, x): if x < 0 or x % 10 == 0 and x: return False xhalf = 0 while x > xhalf: xhalf = xhalf
* 10 + x % 10 x /= 10 return (x == xhalf or x == xhalf/10 ) def isPalindrome(self, x): if x < 0: return False size, xreverse = x, 0 while size: xreverse = xreverse * 10 + size % 10 si
ze = (size - (size % 10)) / 10 return True if xreverse==x else False if __name__ == '__main__': s = Solution() print s.isPalindrome1(0)
Trinak/PyHopeEngine
src/pyHopeEngine/event/guiEvents.py
Python
gpl-3.0
550
0.007273
''' Created o
n Aug 27, 2013 @author: Devon Define gui events ''' from pyHopeEngine import BaseEvent class Event_ButtonPressed(BaseEvent): '''Sent when a button is pressed''' eventType = "ButtonPressed" def __init__(self, value): '''Contains a value identifying the button''' self.value = value class Event_ScreenResize(BaseEvent): '''Sent when a screen resize is requestsed''' eventType = "ScreenResize" def __init__(self, width, height): self.width = width
self.height = height
gandalf-the-white/foundation
amaryl/scripts/initdatabase.py
Python
mit
1,705
0.021114
#!/usr/bin/env python # -*- coding: utf-8 -*- import mysql.connector import time import datetime conn = mysql.connector.connect(host="localhost",user="spike",password="valentine", database="drupal
") cann = mysql.connector.connect(host="localhost",user="spike",password="valentine", database="content_delivery_weather") cursor = conn.cursor() cursar = cann.cursor() cursor.execute("""SELECT uid, mail FROM users""") rows = cursor.fetchall() for row in rows: if row[0] != 0: p
rint('{0} : {1} '.format(row[0], row[1])) #print('UPDATE new_v4_users_probes_edit SET email = {0} WHERE uid = {1}'.format(row[1], row[0])) cursar.execute("""UPDATE new_v4_users_probes_edit SET email = %s WHERE userid = %s""",(row[1], row[0])) cursar.execute("""SELECT probename, probeid FROM new_v4_sonde""") rows = cursar.fetchall() for row in rows: cursar.execute("""SHOW TABLES LIKE %s""",("%" + row[0] + "%",)) rowsbis = cursar.fetchall() for rowbis in rowsbis: result = rowbis[0].split("_") month = 1 + int(result[4]) s = "01/" + str(month) + "/" + result[3] timestamp = time.mktime(datetime.datetime.strptime(s, "%d/%m/%Y").timetuple()) print('{0} : {1} year: {2} month: {3} timestamp: {4}'.format(row[0], rowbis[0], result[3], result[4], round(timestamp,0))) cursar.execute("""SELECT firsttime FROM new_v4_sonde WHERE probeid = %s""",(row[1],)) rowsbisbis = cursar.fetchall() for rowbisbis in rowsbisbis: if rowbisbis[0] == None: cursar.execute("""UPDATE new_v4_sonde SET firsttime = %s WHERE probeid = %s""",(timestamp,row[1])) print('firsttime: {0}'.format(rowbisbis[0],)) conn.close() cann.close()
marchdf/dotfiles
mypython/mypython/mytermcolor.py
Python
mit
5,928
0.000337
# I made some modifications to termcolor so you can pass HEX colors to # the colored function. It then chooses the nearest xterm 256 color to # that HEX color. This requires some color functions that I have added # in my python path. # # 2015/02/16 # # # coding: utf-8 # Copyright (c) 2008-2011 Volvox Development Team # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # Author: Konstantin Lepa <[email protected]> """ANSII Color formatting for output in terminal.""" from __future__ import print_function import os import re from hexrgb_conversion import rgb from x256 import from_rgb __ALL__ = ["colored", "cprint"] VERSION = (1, 1, 0) ATTRIBUTES = dict( list( zip( ["bold", "dark", "", "underline", "blink", "", "reverse", "concealed"], list(range(1, 9)), ) ) ) del ATTRIBUTES[""] HIGHLIGHTS = dict( list( zip( [ "on_grey", "on_red", "on_green", "on_yellow", "on_blue", "on_magenta", "on_cyan", "on_white", ], list(range(40, 48)), ) ) ) COLORS = dict( list( zip( ["grey", "red", "green", "yellow", "blue", "magenta", "cyan", "white"], list(range(30, 38)), ) ) ) RESET = "\033[0m" def colored(text, color=None, on_color=None, attrs=None): """Colorize text. I made some modification so you can pass HEX colors too Available text colors: red, green, yellow, bl
ue, magenta, cyan, white. Available text highlights: on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white. Available attributes:
bold, dark, underline, blink, reverse, concealed. Example: colored('Hello, World!', 'red', 'on_grey', ['blue', 'blink']) colored('Hello, World!', 'green') """ if os.getenv("ANSI_COLORS_DISABLED") is None: fmt_str = "\033[%dm%s" if color is not None: if "#" in color: color = re.sub("[#]", "", color) RGB = rgb(color) x256_color_index = from_rgb(RGB[0], RGB[1], RGB[2]) text = "\033[38;5;%dm%s" % (x256_color_index, text) else: text = fmt_str % (COLORS[color], text) if on_color is not None: if "#" in on_color: on_color = re.sub("[#]", "", on_color) RGB = rgb(on_color) x256_color_index = from_rgb(RGB[0], RGB[1], RGB[2]) text = "\033[48;5;%dm%s" % (x256_color_index, text) else: text = fmt_str % (HIGHLIGHTS[on_color], text) if attrs is not None: for attr in attrs: text = fmt_str % (ATTRIBUTES[attr], text) text += RESET return text def cprint(text, color=None, on_color=None, attrs=None, **kwargs): """Print colorize text. It accepts arguments of print function. """ print((colored(text, color, on_color, attrs)), **kwargs) if __name__ == "__main__": print("Current terminal type: %s" % os.getenv("TERM")) print("Test basic colors:") cprint("Grey color", "grey") cprint("Red color", "red") cprint("Green color", "green") cprint("Yellow color", "yellow") cprint("Blue color", "blue") cprint("Magenta color", "magenta") cprint("Cyan color", "cyan") cprint("White color", "white") print(("-" * 78)) print("Test highlights:") cprint("On grey color", on_color="on_grey") cprint("On red color", on_color="on_red") cprint("On green color", on_color="on_green") cprint("On yellow color", on_color="on_yellow") cprint("On blue color", on_color="on_blue") cprint("On magenta color", on_color="on_magenta") cprint("On cyan color", on_color="on_cyan") cprint("On white color", color="grey", on_color="on_white") print("-" * 78) print("Test attributes:") cprint("Bold grey color", "grey", attrs=["bold"]) cprint("Dark red color", "red", attrs=["dark"]) cprint("Underline green color", "green", attrs=["underline"]) cprint("Blink yellow color", "yellow", attrs=["blink"]) cprint("Reversed blue color", "blue", attrs=["reverse"]) cprint("Concealed Magenta color", "magenta", attrs=["concealed"]) cprint( "Bold underline reverse cyan color", "cyan", attrs=["bold", "underline", "reverse"], ) cprint( "Dark blink concealed white color", "white", attrs=["dark", "blink", "concealed"], ) print(("-" * 78)) print("Test mixing:") cprint("Underline red on grey color", "red", "on_grey", ["underline"]) cprint("Reversed green on red color", "green", "on_red", ["reverse"]) print("Using HEX colors:") cprint("Use HEX color EE2E2F", "#EE2E2F")
shub0/algorithm-data-structure
python/find_minimum.py
Python
bsd-3-clause
1,730
0.004046
#! /usr/bin/python ''' Suppose a sorted array is rotated at some pivot unknown to you beforehand. (i.e., 0 1 2 4 5 6 7 might become 4 5 6 7 0 1 2). Find the minimum element. ''' class Solution: # @param num, a list of integer # @return an integer # You may assume no duplicate exists in the array. def findMinNoDuplicate(self, num): INT_MIN_VALUE = -(2**32) size = len(num) if size == 0: return INT_MIN_VALUE elif size == 1: return num[0] low_index = 0 high_index = size - 1 while (low_index < high_index - 1): mid_index = low_index + (high_index - low_index) / 2 if (num[mid_index] > num[high_index]): low_index = mid_index else: high_index = mid_index return min(num[low_index], num[high_index]) # @param num, a list of integer # @return an integer # You may assume duplicate exists in the array. def findMinDuplicate(self, num): INT_MIN_VALUE = -(
2**32) size = len(num) if size == 0: return INT_MIN_VALUE elif size == 1: return num[0] low_index = 0 high_index = size - 1 while (low_index < high_index - 1): mid_index = low_index + (high_index - low_index) / 2 if (num[mid_index] > num[high_index]): low_index = mid_index elif (num[mid_index] < num[high_index]): high_index = mid_index else:
high_index -= 1 return min(num[low_index], num[high_index]) if __name__ == '__main__': solution = Solution() print solution.findMinDuplicate([3,3,1,2,2])
End of preview. Expand in Data Studio

github python code fim

Generated from tomekkorbak/python-github-code, limiting context length to 8192.

Downloads last month
109