commit
stringlengths 40
40
| old_file
stringlengths 5
117
| new_file
stringlengths 5
117
| old_contents
stringlengths 0
1.93k
| new_contents
stringlengths 19
3.3k
| subject
stringlengths 17
320
| message
stringlengths 18
3.28k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
42.4k
| completion
stringlengths 19
3.3k
| prompt
stringlengths 21
3.65k
|
---|---|---|---|---|---|---|---|---|---|---|---|
a144706bc53f439b7c45b31eb9e6ca5241e3b1a3 | scripts/check_process.py | scripts/check_process.py | #!/usr/bin/env python
'''Checks processes'''
#===============================================================================
# Import modules
#===============================================================================
# Standard Library
import os
import subprocess
import logging
# Third party modules
# Application modules
#===============================================================================
# Check script is running
#===============================================================================
def is_running(script_name):
'''Checks list of processes for script name and filters out lines with the
PID and parent PID. Returns a TRUE if other script with the same name is
found running.'''
try:
logger = logging.getLogger('root')
cmd1 = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
cmd2 = subprocess.Popen(['grep', '-v', 'grep'], stdin=cmd1.stdout,
stdout=subprocess.PIPE)
cmd3 = subprocess.Popen(['grep', '-v', str(os.getpid())], stdin=cmd2.stdout,
stdout=subprocess.PIPE)
cmd4 = subprocess.Popen(['grep', '-v', str(os.getppid())], stdin=cmd3.stdout,
stdout=subprocess.PIPE)
cmd5 = subprocess.Popen(['grep', script_name], stdin=cmd4.stdout,
stdout=subprocess.PIPE)
other_script_found = cmd5.communicate()[0]
if other_script_found:
logger.error('Script already runnning. Exiting...')
logger.error(other_script_found)
return True
return False
except Exception, e:
logger.error('System check failed ({error_v}). Exiting...'.format(
error_v=e))
return True
| #!/usr/bin/env python
'''Checks processes'''
#===============================================================================
# Import modules
#===============================================================================
# Standard Library
import os
import subprocess
import logging
# Third party modules
# Application modules
#===============================================================================
# Check script is running
#===============================================================================
def is_running(script_name):
'''Checks list of processes for script name and filters out lines with the
PID and parent PID. Returns a TRUE if other script with the same name is
found running.'''
try:
logger = logging.getLogger('root')
cmd1 = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
cmd2 = subprocess.Popen(['grep', '-v', 'grep'], stdin=cmd1.stdout,
stdout=subprocess.PIPE)
cmd3 = subprocess.Popen(['grep', '-v', str(os.getpid())], stdin=cmd2.stdout,
stdout=subprocess.PIPE)
cmd4 = subprocess.Popen(['grep', '-v', str(os.getppid())], stdin=cmd3.stdout,
stdout=subprocess.PIPE)
cmd5 = subprocess.Popen(['grep', script_name], stdin=cmd4.stdout,
stdout=subprocess.PIPE)
other_script_found = cmd5.communicate()[0]
if other_script_found:
logger.info('Script already runnning. Exiting...')
logger.info(other_script_found)
return True
return False
except Exception, e:
logger.error('System check failed ({error_v}). Exiting...'.format(
error_v=e))
return True
| Downgrade script already running to info | Downgrade script already running to info
| Python | mit | ninjawil/weather-station,ninjawil/weather-station,ninjawil/weather-station,ninjawil/weather-station,ninjawil/weather-station,ninjawil/weather-station,ninjawil/weather-station | #!/usr/bin/env python
'''Checks processes'''
#===============================================================================
# Import modules
#===============================================================================
# Standard Library
import os
import subprocess
import logging
# Third party modules
# Application modules
#===============================================================================
# Check script is running
#===============================================================================
def is_running(script_name):
'''Checks list of processes for script name and filters out lines with the
PID and parent PID. Returns a TRUE if other script with the same name is
found running.'''
try:
logger = logging.getLogger('root')
cmd1 = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
cmd2 = subprocess.Popen(['grep', '-v', 'grep'], stdin=cmd1.stdout,
stdout=subprocess.PIPE)
cmd3 = subprocess.Popen(['grep', '-v', str(os.getpid())], stdin=cmd2.stdout,
stdout=subprocess.PIPE)
cmd4 = subprocess.Popen(['grep', '-v', str(os.getppid())], stdin=cmd3.stdout,
stdout=subprocess.PIPE)
cmd5 = subprocess.Popen(['grep', script_name], stdin=cmd4.stdout,
stdout=subprocess.PIPE)
other_script_found = cmd5.communicate()[0]
if other_script_found:
logger.info('Script already runnning. Exiting...')
logger.info(other_script_found)
return True
return False
except Exception, e:
logger.error('System check failed ({error_v}). Exiting...'.format(
error_v=e))
return True
| Downgrade script already running to info
#!/usr/bin/env python
'''Checks processes'''
#===============================================================================
# Import modules
#===============================================================================
# Standard Library
import os
import subprocess
import logging
# Third party modules
# Application modules
#===============================================================================
# Check script is running
#===============================================================================
def is_running(script_name):
'''Checks list of processes for script name and filters out lines with the
PID and parent PID. Returns a TRUE if other script with the same name is
found running.'''
try:
logger = logging.getLogger('root')
cmd1 = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)
cmd2 = subprocess.Popen(['grep', '-v', 'grep'], stdin=cmd1.stdout,
stdout=subprocess.PIPE)
cmd3 = subprocess.Popen(['grep', '-v', str(os.getpid())], stdin=cmd2.stdout,
stdout=subprocess.PIPE)
cmd4 = subprocess.Popen(['grep', '-v', str(os.getppid())], stdin=cmd3.stdout,
stdout=subprocess.PIPE)
cmd5 = subprocess.Popen(['grep', script_name], stdin=cmd4.stdout,
stdout=subprocess.PIPE)
other_script_found = cmd5.communicate()[0]
if other_script_found:
logger.error('Script already runnning. Exiting...')
logger.error(other_script_found)
return True
return False
except Exception, e:
logger.error('System check failed ({error_v}). Exiting...'.format(
error_v=e))
return True
|
a5409ca51e95b4d6ca99a63e0422ca1fe8d344f8 | tags/templatetags/tags_tags.py | tags/templatetags/tags_tags.py | # -*- coding: utf8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django import template
from django.db.models.loading import get_model
from ..models import CustomTag
register = template.Library()
@register.assignment_tag
def get_obj_list(app, model, obj):
'''
Return list of all objects of type app.model tagged with a tag pointing to
obj (an object in the db, e.g. Person, Family, ...).
'''
try:
return get_model(app, model).objects.filter(
tags__slug='%s.%s-%d' % (
obj._meta.app_label, obj._meta.model_name, obj.id))
except:
return []
@register.assignment_tag
def get_tag_list(app, model, tag):
'''
Return list of all objects of type app.model tagged with the tag "tag".
'''
try:
return get_model(app, model).objects.filter(tags__slug='%s' % tag)
except:
return []
@register.filter
def as_tag_text(slug):
tag = CustomTag.objects.get(slug=slug)
return tag.as_tag_text()
| # -*- coding: utf8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.loading import get_model
from django.http import Http404
from ..models import CustomTag
register = template.Library()
@register.assignment_tag
def get_obj_list(app, model, obj):
'''
Return list of all objects of type app.model tagged with a tag pointing to
obj (an object in the db, e.g. Person, Family, ...).
'''
try:
return get_model(app, model).objects.filter(
tags__slug='%s.%s-%d' % (
obj._meta.app_label, obj._meta.model_name, obj.id))
except:
return []
@register.assignment_tag
def get_tag_list(app, model, tag):
'''
Return list of all objects of type app.model tagged with the tag "tag".
'''
try:
return get_model(app, model).objects.filter(tags__slug='%s' % tag)
except:
return []
@register.filter
def as_tag_text(slug):
try:
tag = CustomTag.objects.get(slug=slug)
return tag.as_tag_text()
except ObjectDoesNotExist:
raise Http404
| Fix server error in tag search for non-existing tag. | Fix server error in tag search for non-existing tag.
| Python | bsd-3-clause | ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio,ugoertz/django-familio | # -*- coding: utf8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django import template
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.loading import get_model
from django.http import Http404
from ..models import CustomTag
register = template.Library()
@register.assignment_tag
def get_obj_list(app, model, obj):
'''
Return list of all objects of type app.model tagged with a tag pointing to
obj (an object in the db, e.g. Person, Family, ...).
'''
try:
return get_model(app, model).objects.filter(
tags__slug='%s.%s-%d' % (
obj._meta.app_label, obj._meta.model_name, obj.id))
except:
return []
@register.assignment_tag
def get_tag_list(app, model, tag):
'''
Return list of all objects of type app.model tagged with the tag "tag".
'''
try:
return get_model(app, model).objects.filter(tags__slug='%s' % tag)
except:
return []
@register.filter
def as_tag_text(slug):
try:
tag = CustomTag.objects.get(slug=slug)
return tag.as_tag_text()
except ObjectDoesNotExist:
raise Http404
| Fix server error in tag search for non-existing tag.
# -*- coding: utf8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from django import template
from django.db.models.loading import get_model
from ..models import CustomTag
register = template.Library()
@register.assignment_tag
def get_obj_list(app, model, obj):
'''
Return list of all objects of type app.model tagged with a tag pointing to
obj (an object in the db, e.g. Person, Family, ...).
'''
try:
return get_model(app, model).objects.filter(
tags__slug='%s.%s-%d' % (
obj._meta.app_label, obj._meta.model_name, obj.id))
except:
return []
@register.assignment_tag
def get_tag_list(app, model, tag):
'''
Return list of all objects of type app.model tagged with the tag "tag".
'''
try:
return get_model(app, model).objects.filter(tags__slug='%s' % tag)
except:
return []
@register.filter
def as_tag_text(slug):
tag = CustomTag.objects.get(slug=slug)
return tag.as_tag_text()
|
73d0225b64ec82c7a8142dbac023be499b41fe0f | figures.py | figures.py | #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| Make the python script silent | Make the python script silent
| Python | mit | PoisotLab/PLMT | #! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
| Make the python script silent
#! /usr/bin/env python
import sys
import re
import yaml
FILE = sys.argv[1]
YAML = sys.argv[2]
TYPE = sys.argv[3]
header = open(YAML, "r")
text = open(FILE, "r")
copy = open(FILE+"_NEW", "wt")
docs = yaml.load_all(header)
for doc in docs:
if not doc == None:
if 'figure' in doc.keys():
for line in text:
mfig = False
for f in doc['figure']:
my_regex = r"^!\{" + re.escape(f['id']) + r"\}$"
if re.search(my_regex, line, re.IGNORECASE):
mfig = True
print line
if TYPE == 'preprint':
ftype = "figure"
fwidth = "\\columnwidth"
if "wide" in f.keys():
ftype = "figure*"
fwidth = "\\textwidth"
copy.write("\n\\begin{" + ftype + "}[bt]\n")
copy.write("\t\\centering\n")
print f
copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n")
copy.write("\t\\caption{" + f['caption'] + "}\n")
copy.write("\t\\label{" + f['id'] + "}\n")
copy.write("\\end{" + ftype + "}\n\n")
if not mfig:
copy.write(line)
header.close()
text.close()
copy.close()
|
248023106d4e881110a646e9d078ecad4f58e24d | pipelogger.py | pipelogger.py | #!/usr/bin/env python
#
import argparse
import os
import syslog
parser = argparse.ArgumentParser(
description='Syslog messages as read from a pipe')
parser.add_argument('-i', '--ident',
help='Use the given identifier for syslogging',
required=True)
parser.add_argument('pipe', help='Pipe file to read log records from')
args = parser.parse_args()
syslog.openlog(args.ident, 0)
if not os.path.exists(args.pipe):
os.mkfifo(args.pipe)
while os.path.exists(args.pipe):
f = open(args.pipe, 'r')
for l in f:
syslog.syslog(l)
f.close()
syslog.closelog()
| Add a Python program which reads from a pipe and writes the data it gets to syslog. | Add a Python program which reads from a pipe and writes the data it gets
to syslog.
| Python | bsd-3-clause | tonnerre/pipelogger | #!/usr/bin/env python
#
import argparse
import os
import syslog
parser = argparse.ArgumentParser(
description='Syslog messages as read from a pipe')
parser.add_argument('-i', '--ident',
help='Use the given identifier for syslogging',
required=True)
parser.add_argument('pipe', help='Pipe file to read log records from')
args = parser.parse_args()
syslog.openlog(args.ident, 0)
if not os.path.exists(args.pipe):
os.mkfifo(args.pipe)
while os.path.exists(args.pipe):
f = open(args.pipe, 'r')
for l in f:
syslog.syslog(l)
f.close()
syslog.closelog()
| Add a Python program which reads from a pipe and writes the data it gets
to syslog.
|
|
0d2c04790fb6c97b37f6e0700bb0162796e3dc4c | tests/web_api/test_scale_serialization.py | tests/web_api/test_scale_serialization.py | # -*- coding: utf-8 -*-
from openfisca_web_api.loader.parameters import walk_node
from openfisca_core.parameters import ParameterNode, Scale
def test_amount_scale():
parameters = []
metadata = {'location':'foo', 'version':'1', 'repository_url':'foo'}
root_node = ParameterNode(data = {})
amount_scale_data = {'brackets':[{'amount':{'2014-01-01':{'value':0}},'threshold':{'2014-01-01':{'value':1}}}]}
scale = Scale('scale', amount_scale_data, 'foo')
root_node.children['scale'] = scale
walk_node(root_node, parameters, [], metadata)
assert parameters == [{'description': None, 'id': 'scale', 'metadata': {}, 'source': 'foo/blob/1', 'brackets': {'2014-01-01': {1: 0}}}]
| Add unit tests for AmountTaxScale serialization | Add unit tests for AmountTaxScale serialization
| Python | agpl-3.0 | openfisca/openfisca-core,openfisca/openfisca-core | # -*- coding: utf-8 -*-
from openfisca_web_api.loader.parameters import walk_node
from openfisca_core.parameters import ParameterNode, Scale
def test_amount_scale():
parameters = []
metadata = {'location':'foo', 'version':'1', 'repository_url':'foo'}
root_node = ParameterNode(data = {})
amount_scale_data = {'brackets':[{'amount':{'2014-01-01':{'value':0}},'threshold':{'2014-01-01':{'value':1}}}]}
scale = Scale('scale', amount_scale_data, 'foo')
root_node.children['scale'] = scale
walk_node(root_node, parameters, [], metadata)
assert parameters == [{'description': None, 'id': 'scale', 'metadata': {}, 'source': 'foo/blob/1', 'brackets': {'2014-01-01': {1: 0}}}]
| Add unit tests for AmountTaxScale serialization
|
|
1bbffc2152ea1c48b47153005beeb2974b682f3c | bot/actions/action.py | bot/actions/action.py | from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| Fix for_each incorrectly using lazy map operator | Fix for_each incorrectly using lazy map operator
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
for action in self.actions:
func(action)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
| Fix for_each incorrectly using lazy map operator
from bot.api.api import Api
from bot.storage import Config, State, Cache
from bot.utils.dictionaryobject import DictionaryObject
class Event(DictionaryObject):
pass
class Update(Event):
def __init__(self, update, is_pending):
super().__init__()
self.update = update
self.is_pending = is_pending
class Action:
def __init__(self):
pass
def get_name(self):
return self.__class__.__name__
def setup(self, api: Api, config: Config, state: State, cache: Cache):
self.api = api
self.config = config
self.state = state
self.cache = cache
self.post_setup()
def post_setup(self):
pass
def process(self, event):
pass
class ActionGroup(Action):
def __init__(self, *actions):
super().__init__()
self.actions = list(actions)
def add(self, *actions):
self.actions.extend(actions)
def setup(self, *args):
self.for_each(lambda action: action.setup(*args))
super().setup(*args)
def process(self, event):
self.for_each(lambda action: action.process(event._copy()))
def for_each(self, func):
map(func, self.actions)
class IntermediateAction(ActionGroup):
def __init__(self):
super().__init__()
def then(self, *next_actions):
self.add(*next_actions)
return self
def _continue(self, event):
super().process(event)
|
a0dd71a6b56ed8f88f29691edd7a65d84656e019 | anki-blitz.py | anki-blitz.py | # -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1:
return 3
if elapsed_time < 3:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
| # -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
| Increase time limits to 2 and 5 seconds | Increase time limits to 2 and 5 seconds
| Python | mit | jaksz/anki-blitz | # -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 2:
return 3
if elapsed_time < 5:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
| Increase time limits to 2 and 5 seconds
# -*- coding: utf-8 -*-
# Blitz speed reading trainer add-on for Anki
#
# Copyright (C) 2016 Jakub Szypulka, Dave Shifflett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from anki.hooks import addHook
from aqt.reviewer import Reviewer
import time
start_time = None
def onShowQuestion():
global start_time
start_time = time.time()
addHook('showQuestion', onShowQuestion)
def myDefaultEase(self):
elapsed_time = time.time() - start_time
if elapsed_time < 1:
return 3
if elapsed_time < 3:
return 2
else:
return 1
Reviewer._defaultEase = myDefaultEase
|
66ea7be70d37c8431d6daef976c6d5c9a7407ea0 | examples/example_injury.py | examples/example_injury.py | #!/usr/bin/env python
from tabulate import tabulate
from mlbgame import injury
import dateutil.parser
from datetime import datetime
team_id = 117 # Houston Astros
i = injury.Injury(team_id)
injuries = []
for inj in i.injuries:
team = inj.team_name
injury = ['{0}, {1} ({2})'.format(inj.name_last, inj.name_first,
inj.position), inj.insert_ts, inj.injury_status, inj.due_back,
inj.injury_desc, inj.injury_update]
injuries.append(injury)
print tabulate(injuries, headers=[team, 'Updated', 'Status', 'Due Back',
'Injury', 'Notes'])
print
print 'Last Updated: %s' % i.last_update
"""
"""
| Add example for injury class | Add example for injury class
| Python | mit | panzarino/mlbgame,zachpanz88/mlbgame | #!/usr/bin/env python
from tabulate import tabulate
from mlbgame import injury
import dateutil.parser
from datetime import datetime
team_id = 117 # Houston Astros
i = injury.Injury(team_id)
injuries = []
for inj in i.injuries:
team = inj.team_name
injury = ['{0}, {1} ({2})'.format(inj.name_last, inj.name_first,
inj.position), inj.insert_ts, inj.injury_status, inj.due_back,
inj.injury_desc, inj.injury_update]
injuries.append(injury)
print tabulate(injuries, headers=[team, 'Updated', 'Status', 'Due Back',
'Injury', 'Notes'])
print
print 'Last Updated: %s' % i.last_update
"""
"""
| Add example for injury class
|
|
6b358e001c270b4ee735550c829a47c4ee4118b4 | setup.py | setup.py | from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
| # -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
| Include README content as long description. | Include README content as long description.
| Python | mit | Emantor/syslog2irc,homeworkprod/syslog2irc | # -*- coding: utf-8 -*-
import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
long_description=long_description,
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
| Include README content as long description.
from setuptools import setup
setup(
name='syslog2IRC',
version='0.8',
description='A proxy to forward syslog messages to IRC',
url='http://homework.nwsnet.de/releases/c474/#syslog2irc',
author='Jochen Kupperschmidt',
author_email='[email protected]',
license='MIT',
classifiers=[
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Internet',
'Topic :: System :: Logging',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
],
)
|
5c442db8a6352c21325f372486409d44ad3f5b76 | ServerBackup.py | ServerBackup.py | #!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
| #!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message) | Backup directory is made, and a notification is sent and logged if the directory doesn't exist | Backup directory is made, and a notification is sent and logged if the directory doesn't exist
| Python | mit | dwieeb/usr-local-bin | #!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message) | Backup directory is made, and a notification is sent and logged if the directory doesn't exist
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
|
7c37d4f95897ddbc061ec0a84185a19899b85b89 | compile_for_dist.py | compile_for_dist.py | #!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
| #!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
| Update shebang to use /usr/bin/env. | Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.
| Python | apache-2.0 | quattor/aquilon-protocols,quattor/aquilon-protocols | #!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
| Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.
#!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
35111353ab8d8cae320b49520fe693114fed160f | bin/parsers/DeploysServiceLookup.py | bin/parsers/DeploysServiceLookup.py |
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
|
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
| Add more service lookups for Deploys | Add more service lookups for Deploys
| Python | apache-2.0 | guardian/alerta,0312birdzhang/alerta,skob/alerta,mrkeng/alerta,guardian/alerta,mrkeng/alerta,guardian/alerta,0312birdzhang/alerta,mrkeng/alerta,0312birdzhang/alerta,skob/alerta,skob/alerta,mrkeng/alerta,guardian/alerta,skob/alerta |
if alert['resource'].startswith('R1'):
alert['service'] = [ 'R1' ]
elif alert['resource'].startswith('R2'):
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif alert['resource'].startswith('frontend'):
alert['service'] = [ 'Frontend' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif alert['resource'].startswith('Identity'):
alert['service'] = [ 'Identity' ]
elif alert['resource'].startswith('Mobile'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Android'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('iOS'):
alert['service'] = [ 'Mobile' ]
elif alert['resource'].startswith('Soulmates'):
alert['service'] = [ 'Soulmates' ]
elif alert['resource'].startswith('Microapps'):
alert['service'] = [ 'MicroApp' ]
elif alert['resource'].startswith('Mutualisation'):
alert['service'] = [ 'Mutualisation' ]
elif alert['resource'].startswith('Ophan'):
alert['service'] = [ 'Ophan' ]
else:
alert['service'] = [ 'Unknown' ]
| Add more service lookups for Deploys
if 'r2' in alert['resource'].lower():
alert['service'] = [ 'R2' ]
elif 'content-api' in alert['resource'].lower():
alert['service'] = [ 'ContentAPI' ]
elif 'flexible' in alert['resource'].lower():
alert['service'] = [ 'FlexibleContent' ]
elif 'frontend' in alert['resource'].lower():
alert['service'] = [ 'Frontend' ]
elif 'mobile' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'android' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'ios' in alert['resource'].lower():
alert['service'] = [ 'Mobile' ]
elif 'identity' in alert['resource'].lower():
alert['service'] = [ 'Identity' ]
elif 'microapps' in alert['resource'].lower():
alert['service'] = [ 'MicroApp' ]
else:
alert['service'] = [ 'Unknown' ]
|
a38f46566b18803d0b5ab0d75a267ee9ac3ceea3 | doc/examples/viennagrid_wrapper/io.py | doc/examples/viennagrid_wrapper/io.py | #!/usr/bin/env python
#
# This example shows how to read and write mesh files using the low-level ViennaGrid
# wrapper for Python (viennagrid.wrapper).
from __future__ import print_function
# In this example, we will set up a domain of triangles in the cartesian 3D
# space from the contents of a Netgen mesh file.
#
# For that purpose, we need to define a domain and, eventually, also a segmentation
# (in case we want to read segmentation data from the mesh file), and we need the
# Netgen reader function, too.
#
# (Notice that the 'read_netgen' function and all other I/O functions
# work with any type of domain and segmentation without name change.)
from viennagrid.wrapper import TriangularCartesian3D_Domain as Domain
from viennagrid.wrapper import TriangularCartesian3D_Segmentation as Segmentation
from viennagrid.wrapper import read_netgen
# In case we want to read only the domain information from the mesh file, we would
# just create an empty domain and call the Netgen reader on it with the file path
# where the mesh file can be found.
domain = Domain()
read_netgen('../data/half-trigate.mesh', domain)
# In case we want to read not only the domain information, but also the segmentation
# information from the mesh file, we would have to create an empty domain and an
# empty segmentation on that domain, and then call the Netgen reader.
domain = Domain()
segmentation = Segmentation(domain)
read_netgen('../data/half-trigate.mesh', domain, segmentation)
| Write an example of the use of the Netgen reader. | Write an example of the use of the Netgen reader.
| Python | mit | jonancm/viennagrid-python,jonancm/viennagrid-python,jonancm/viennagrid-python | #!/usr/bin/env python
#
# This example shows how to read and write mesh files using the low-level ViennaGrid
# wrapper for Python (viennagrid.wrapper).
from __future__ import print_function
# In this example, we will set up a domain of triangles in the cartesian 3D
# space from the contents of a Netgen mesh file.
#
# For that purpose, we need to define a domain and, eventually, also a segmentation
# (in case we want to read segmentation data from the mesh file), and we need the
# Netgen reader function, too.
#
# (Notice that the 'read_netgen' function and all other I/O functions
# work with any type of domain and segmentation without name change.)
from viennagrid.wrapper import TriangularCartesian3D_Domain as Domain
from viennagrid.wrapper import TriangularCartesian3D_Segmentation as Segmentation
from viennagrid.wrapper import read_netgen
# In case we want to read only the domain information from the mesh file, we would
# just create an empty domain and call the Netgen reader on it with the file path
# where the mesh file can be found.
domain = Domain()
read_netgen('../data/half-trigate.mesh', domain)
# In case we want to read not only the domain information, but also the segmentation
# information from the mesh file, we would have to create an empty domain and an
# empty segmentation on that domain, and then call the Netgen reader.
domain = Domain()
segmentation = Segmentation(domain)
read_netgen('../data/half-trigate.mesh', domain, segmentation)
| Write an example of the use of the Netgen reader.
|
|
13208d4656adcf52a5842200ee1d9e079fdffc2b | bin/rate_limit_watcher.py | bin/rate_limit_watcher.py | #!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
| Print rate limits from new JSON response url in a pretty, parsable format | Print rate limits from new JSON response url in a pretty, parsable format
| Python | agpl-3.0 | paulocheque/guides-cms,paulocheque/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,pluralsight/guides-cms,paulocheque/guides-cms | #!/usr/bin/env python
"""
Script to print out Github API rate limit for REPO_OWNER user i.e. the main
github user account used for the guides-cms application.
"""
import argparse
from datetime import datetime
import requests
DOMAIN = 'http://tutorials.pluralsight.com/'
URL = '/gh_rate_limit'
def main(domain):
response = get_rate_limit(domain)
if response:
pprint(response)
def get_rate_limit(domain=DOMAIN):
"""Get rate limit as dictionary"""
url = '%s%s' % (domain, URL)
resp = requests.get(url)
if resp.status_code == 200:
return resp.json()
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
return {}
def pprint(rate_limit):
"""
Pretty print rate limit dictionary to be easily parsable and readable
across multiple lines
"""
# Ignoring the 'rate' key b/c github API claims this will be removed in
# next major version:
# https://developer.github.com/v3/rate_limit/#deprecation-notice
def print_(name, limits):
date_ = datetime.utcfromtimestamp(limits[name]['reset'])
print '%8s remaining: %4s limit: %4s reset: %s' % (
name,
limits[name]['remaining'],
limits[name]['limit'],
date_.strftime('%d-%m-%Y %H:%M:%S'))
print_('core', rate_limit['resources'])
print_('search', rate_limit['resources'])
#u'resources': {u'core': {u'reset': 1462781427, u'limit': 5000, u'remaining': 4923}, u'search': {u'reset': 1462780271, u'limit': 30, u'remaining': 30}}}
def _parse_args():
"""Parse args and get dictionary back"""
parser = argparse.ArgumentParser(description='Get Github.com rate limit')
parser.add_argument('-d', '--domain', action='store', required=False,
default=DOMAIN,
help='Domain to ping for rate limit JSON response (default: %s)' % (DOMAIN))
# Turn odd argparse namespace object into a plain dict
return vars(parser.parse_args())
if __name__ == '__main__':
main(_parse_args()['domain'])
| Print rate limits from new JSON response url in a pretty, parsable format
#!/usr/bin/env python
import requests
URL = 'http://tutorials.pluralsight.com/gh_rate_limit'
def main():
resp = requests.get(URL)
if resp.status_code == 200:
print resp.content
else:
print 'Failed checking rate limit, status_code: %d' % (resp.status_code)
if __name__ == '__main__':
main()
|
84ccc5489b4d3dfdf1883bb777cd597bd9cb8e53 | src/test/testclasses.py | src/test/testclasses.py |
from nose.tools import *
from libeeyore.builtins import add_builtins
from libeeyore.classvalues import *
from libeeyore.environment import EeyEnvironment
from libeeyore.cpp.cppvalues import *
from libeeyore.cpp.cpprenderer import EeyCppRenderer
from eeyasserts import assert_multiline_equal
def test_Static_variable_can_be_read():
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyInit( EeyType( EeyInt ), EeySymbol( "i" ), EeyInt( "7" ) ),
)
)
assert_equal( decl.render( env ), "" )
value = EeySymbol( "MyClass.i" )
assert_equal( value.render( env ), "7" )
|
from nose.tools import *
from libeeyore.builtins import add_builtins
from libeeyore.classvalues import *
from libeeyore.environment import EeyEnvironment
from libeeyore.cpp.cppvalues import *
from libeeyore.cpp.cpprenderer import EeyCppRenderer
from eeyasserts import assert_multiline_equal
def test_Static_variable_can_be_read():
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyInit( EeyType( EeyInt ), EeySymbol( "i" ), EeyInt( "7" ) ),
)
)
assert_equal( decl.render( env ), "" )
value = EeySymbol( "MyClass.i" )
assert_equal( value.render( env ), "7" )
def test_Member_function_can_be_executed():
"""
Note this test may turn out to be incorrect. Python would respond with:
TypeError: unbound method myfunc() must be called with X instance as
first argument (got int instance instead)
"""
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyDef(
EeyType( EeyInt ),
EeySymbol( "myfunc" ),
(
( EeyType( EeyInt ), EeySymbol( "x" ) ),
),
(
EeyReturn( EeySymbol( "x" ) ),
)
),
)
)
assert_equal( decl.render( env ), "" )
value3 = EeyFunctionCall(
EeySymbol( "MyClass.myfunc" ),
(
EeyInt( "3" ),
)
)
value5 = EeyFunctionCall(
EeySymbol( "MyClass.myfunc" ),
(
EeyInt( "5" ),
)
)
assert_equal( value5.render( env ), "5" )
| Add a test that demonstrates calling a function within a class definition. | Add a test that demonstrates calling a function within a class definition.
| Python | mit | andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper,andybalaam/pepper |
from nose.tools import *
from libeeyore.builtins import add_builtins
from libeeyore.classvalues import *
from libeeyore.environment import EeyEnvironment
from libeeyore.cpp.cppvalues import *
from libeeyore.cpp.cpprenderer import EeyCppRenderer
from eeyasserts import assert_multiline_equal
def test_Static_variable_can_be_read():
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyInit( EeyType( EeyInt ), EeySymbol( "i" ), EeyInt( "7" ) ),
)
)
assert_equal( decl.render( env ), "" )
value = EeySymbol( "MyClass.i" )
assert_equal( value.render( env ), "7" )
def test_Member_function_can_be_executed():
"""
Note this test may turn out to be incorrect. Python would respond with:
TypeError: unbound method myfunc() must be called with X instance as
first argument (got int instance instead)
"""
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyDef(
EeyType( EeyInt ),
EeySymbol( "myfunc" ),
(
( EeyType( EeyInt ), EeySymbol( "x" ) ),
),
(
EeyReturn( EeySymbol( "x" ) ),
)
),
)
)
assert_equal( decl.render( env ), "" )
value3 = EeyFunctionCall(
EeySymbol( "MyClass.myfunc" ),
(
EeyInt( "3" ),
)
)
value5 = EeyFunctionCall(
EeySymbol( "MyClass.myfunc" ),
(
EeyInt( "5" ),
)
)
assert_equal( value5.render( env ), "5" )
| Add a test that demonstrates calling a function within a class definition.
from nose.tools import *
from libeeyore.builtins import add_builtins
from libeeyore.classvalues import *
from libeeyore.environment import EeyEnvironment
from libeeyore.cpp.cppvalues import *
from libeeyore.cpp.cpprenderer import EeyCppRenderer
from eeyasserts import assert_multiline_equal
def test_Static_variable_can_be_read():
env = EeyEnvironment( EeyCppRenderer() )
decl = EeyClass(
name=EeySymbol( "MyClass" ),
base_classes=(),
body_stmts=(
EeyInit( EeyType( EeyInt ), EeySymbol( "i" ), EeyInt( "7" ) ),
)
)
assert_equal( decl.render( env ), "" )
value = EeySymbol( "MyClass.i" )
assert_equal( value.render( env ), "7" )
|
9467cfc4fa3f0bd2c269f3d7b61460ddc6851f9f | tests/test_dfw_uncomparables.py | tests/test_dfw_uncomparables.py | """Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
| """Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
| Add test for exception to uncomparable check | Add test for exception to uncomparable check
| Python | bsd-3-clause | jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint | """Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
| Add test for exception to uncomparable check
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
|
f9f41ec4f27ba5fd19ca82d4c04b13bed6627d23 | app/PRESUBMIT.py | app/PRESUBMIT.py | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
| #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
| Make all changes to app/ run on all trybot platforms, not just the big three. Anyone who's changing a header here may break the chromeos build. | Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | rogerwang/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,dushu1203/chromium.src,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,Fireblend/chromium-crosswalk,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,anirudhSK/chromium,hujiajie/pa-chromium,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,rogerwang/chromium,jaruba/chromium.src,littlstar/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,robclark/chromium,keishi/chromium,nacl-webkit/chrome_deps,M4sse/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,junmin-zhu/chromium-rivertrail,dednal/chromium.src,jaruba/chromium.src,zcbenz/cefode-chromium,keishi/chromium,zcbenz/cefode-chromium,markYoungH/chromium.src,patrickm/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,anirudhSK/chromium,keishi/chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,patrickm/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,Just-D/chromium-1,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,Jonekee/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,robclark/chromium,anirudhSK/chromium,robclark/chromium,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,robclark/chromium,Pluto-tv/chromium-crosswalk,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,dednal/chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,Chilledheart/chromium,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,dednal/chromium.src,littlstar/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,rogerwang/chromium,keishi/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ChromiumWebApps/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,Chilledheart/chromium,Just-D/chromium-1,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,keishi/chromium,hgl888/chromium-crosswalk,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,patrickm/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,keishi/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,robclark/chromium,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,littlstar/chromium.src,Just-D/chromium-1,patrickm/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,ondra-novak/chromium.src,robclark/chromium,ltilve/chromium,jaruba/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,keishi/chromium,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,rogerwang/chromium,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,timopulkkinen/BubbleFish,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,markYoungH/chromium.src,robclark/chromium,M4sse/chromium.src,Just-D/chromium-1,hujiajie/pa-chromium,timopulkkinen/BubbleFish,dednal/chromium.src,mogoweb/chromium-crosswalk,junmin-zhu/chromium-rivertrail,zcbenz/cefode-chromium,M4sse/chromium.src,robclark/chromium,ChromiumWebApps/chromium,jaruba/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,keishi/chromium,fujunwei/chromium-crosswalk,littlstar/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,timopulkkinen/BubbleFish,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,anirudhSK/chromium,chuan9/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ltilve/chromium,hgl888/chromium-crosswalk,ltilve/chromium,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,Jonekee/chromium.src,nacl-webkit/chrome_deps,ltilve/chromium,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,dushu1203/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,rogerwang/chromium,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,Jonekee/chromium.src,zcbenz/cefode-chromium,dednal/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,littlstar/chromium.src,anirudhSK/chromium,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,robclark/chromium,axinging/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,markYoungH/chromium.src,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,dushu1203/chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,hujiajie/pa-chromium,nacl-webkit/chrome_deps,patrickm/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,Just-D/chromium-1,littlstar/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,M4sse/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,keishi/chromium,dednal/chromium.src,keishi/chromium,ChromiumWebApps/chromium,rogerwang/chromium,M4sse/chromium.src,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,rogerwang/chromium,rogerwang/chromium,robclark/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,keishi/chromium | #!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
def GetPreferredTrySlaves():
return ['win', 'linux', 'linux_view', 'linux_chromeos', 'mac']
| Make all changes to app/ run on all trybot platforms, not just the big three.
Anyone who's changing a header here may break the chromeos build.
BUG=none
TEST=none
Review URL: http://codereview.chromium.org/2838027
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@51000 0039d316-1c4b-4281-b951-d872f2087c98
#!/usr/bin/python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the app/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Autogenerated window resources files are off limits
r'.*resource.h$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
|
27b1d403540503f6e9d0ccd679918e3efe63ecf7 | tests/test_navigation.py | tests/test_navigation.py | def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
| def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
| Delete debug comments and tool | Delete debug comments and tool
| Python | agpl-3.0 | PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis | def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
flag = False
test_check_titles(page)
else:
break
| Delete debug comments and tool
def get_menu_titles(page) -> list:
page.wait_for_load_state()
menu_list = page.query_selector_all("//*[@class='toctree-wrapper compound']/ul/li/a")
return [title.as_element().inner_text() for title in menu_list]
flag = True
def test_check_titles(page):
global flag
if(flag):
page.goto("index.html")
page.set_viewport_size({"width": 1050, "height": 600})
menu_list = get_menu_titles(page)
page.wait_for_load_state()
for menu_item in menu_list:
right_arrow = page.query_selector("//*[@id='relations-next']/a")
if(right_arrow):
page.click("//*[@id='relations-next']/a")
page.wait_for_load_state()
page_title = page.title().split(" — ")[0]
assert page_title == menu_item
if("toctree" in page.url):
# check titles for all sub-toctree content
# list_url = page.split("/")[3::]
# new_url = "/".join(list_url)
# test_check_titles(new_url)
flag = False
test_check_titles(page)
else:
break
|
7d130a447786c61c7bfbe6bfe2d87b2c28e32eb6 | shut-up-bird.py | shut-up-bird.py | #!/usr/bin/env python
#
from __future__ import print_function
import os
import sys
import argparse
import logging
| #!/usr/bin/env python
from __future__ import print_function
import os
import sys
import argparse
import json
import tweepy
import pystache
import webbrowser
CONFIG_FILE = '.shut-up-bird.conf'
def tweep_login(consumer_key, consumer_secret, token='', secret=''):
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
if token and secret:
auth.set_access_token(token, secret)
else:
try:
print ("Authenticating ...please wait")
redirect_url = auth.get_authorization_url()
print ("Opening url - {0} ...".format(redirect_url))
webbrowser.open(redirect_url)
verify_code = raw_input("Verification PIN code: ".format(redirect_url))
auth.get_access_token(verify_code)
except tweepy.TweepError as e:
raise Exception("Failed to get request token!", e)
return auth
def tweep_getAPI(auth):
api = tweepy.API(auth)
print("Authenticated as: {0}".format(api.me().screen_name))
return api
def tweep_delete(api):
print ("TEST")
def config_load(config_path):
if not os.path.exists(config_path):
return False
with open(config_path, 'r') as infile:
return json.load(infile)
def config_save(config_path, consumer_key, consumer_secret, token, secret):
data = {'ck': consumer_key, 'cs': consumer_secret, \
't': token, 's': secret }
with open(config_path, 'w') as outfile:
json.dump(data, outfile, indent=2, ensure_ascii=False)
def get_input(message):
return raw_input(message)
###########################
# Main
#
if __name__ == "__main__":
try:
home_dir = os.path.expanduser('~')
config = config_load(os.path.join(home_dir, CONFIG_FILE))
if (config and config['t'] and config['s']):
auth = tweep_login(config['ck'], config['cs'], config['t'], config['s'])
else:
print ("Please provide your Twitter app access keys\n")
consumer_key = get_input("Consumer Key (API Key): ")
consumer_secret = get_input("Consumer Secret (API Secret): ")
auth = tweep_login(consumer_key, consumer_secret)
config_save(os.path.join(home_dir, CONFIG_FILE), consumer_key, \
consumer_secret, auth.access_token, auth.access_token_secret)
api = tweep_getAPI(auth)
except Exception as e:
print ("[ERROR] {0}".format(e))
| Add OAuth authentication and config settings load/save | Add OAuth authentication and config settings load/save
| Python | mit | petarov/shut-up-bird | #!/usr/bin/env python
from __future__ import print_function
import os
import sys
import argparse
import json
import tweepy
import pystache
import webbrowser
CONFIG_FILE = '.shut-up-bird.conf'
def tweep_login(consumer_key, consumer_secret, token='', secret=''):
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
if token and secret:
auth.set_access_token(token, secret)
else:
try:
print ("Authenticating ...please wait")
redirect_url = auth.get_authorization_url()
print ("Opening url - {0} ...".format(redirect_url))
webbrowser.open(redirect_url)
verify_code = raw_input("Verification PIN code: ".format(redirect_url))
auth.get_access_token(verify_code)
except tweepy.TweepError as e:
raise Exception("Failed to get request token!", e)
return auth
def tweep_getAPI(auth):
api = tweepy.API(auth)
print("Authenticated as: {0}".format(api.me().screen_name))
return api
def tweep_delete(api):
print ("TEST")
def config_load(config_path):
if not os.path.exists(config_path):
return False
with open(config_path, 'r') as infile:
return json.load(infile)
def config_save(config_path, consumer_key, consumer_secret, token, secret):
data = {'ck': consumer_key, 'cs': consumer_secret, \
't': token, 's': secret }
with open(config_path, 'w') as outfile:
json.dump(data, outfile, indent=2, ensure_ascii=False)
def get_input(message):
return raw_input(message)
###########################
# Main
#
if __name__ == "__main__":
try:
home_dir = os.path.expanduser('~')
config = config_load(os.path.join(home_dir, CONFIG_FILE))
if (config and config['t'] and config['s']):
auth = tweep_login(config['ck'], config['cs'], config['t'], config['s'])
else:
print ("Please provide your Twitter app access keys\n")
consumer_key = get_input("Consumer Key (API Key): ")
consumer_secret = get_input("Consumer Secret (API Secret): ")
auth = tweep_login(consumer_key, consumer_secret)
config_save(os.path.join(home_dir, CONFIG_FILE), consumer_key, \
consumer_secret, auth.access_token, auth.access_token_secret)
api = tweep_getAPI(auth)
except Exception as e:
print ("[ERROR] {0}".format(e))
| Add OAuth authentication and config settings load/save
#!/usr/bin/env python
#
from __future__ import print_function
import os
import sys
import argparse
import logging
|
e0695ca25c4f9f51233ee006c2a3e00bee473203 | all-domains/algorithms/sorting/insertion-sort-part-1/solution.py | all-domains/algorithms/sorting/insertion-sort-part-1/solution.py | # https://www.hackerrank.com/challenges/insertionsort1
# Python 3
def formatted_print(items):
formatted = ' '.join([str(item) for item in items])
print(formatted)
def insertionSort(items):
# The value to insert is the right most element
length = len(items)-1
value_to_insert = items[length]
start = length-1 # we start at the second last item
for index in range(start, -1, -1):
item = items[index]
items[index+1] = item
if item < value_to_insert:
items[index+1] = value_to_insert
formatted_print(items)
return
formatted_print(items)
# If all the elements are greater than the value to insert,
# insert value at the start of the list
items[0] = value_to_insert
formatted_print(items)
n = input()
x = input()
# x = '2 4 6 8 3'
# x = '2 3 4 5 6 7 8 9 10 1'
items = [int(item) for item in x.split(' ')]
insertionSort(items)
| Implement the beginning of insertion sort | Implement the beginning of insertion sort
https://www.hackerrank.com/challenges/insertionsort1
| Python | mit | arvinsim/hackerrank-solutions | # https://www.hackerrank.com/challenges/insertionsort1
# Python 3
def formatted_print(items):
formatted = ' '.join([str(item) for item in items])
print(formatted)
def insertionSort(items):
# The value to insert is the right most element
length = len(items)-1
value_to_insert = items[length]
start = length-1 # we start at the second last item
for index in range(start, -1, -1):
item = items[index]
items[index+1] = item
if item < value_to_insert:
items[index+1] = value_to_insert
formatted_print(items)
return
formatted_print(items)
# If all the elements are greater than the value to insert,
# insert value at the start of the list
items[0] = value_to_insert
formatted_print(items)
n = input()
x = input()
# x = '2 4 6 8 3'
# x = '2 3 4 5 6 7 8 9 10 1'
items = [int(item) for item in x.split(' ')]
insertionSort(items)
| Implement the beginning of insertion sort
https://www.hackerrank.com/challenges/insertionsort1
|
|
09b5a3f531a3d0498aae21f2c8014b77df5f8d41 | version.py | version.py | # Update uProxy version in all relevant places.
#
# Run with:
# python version.py <new version>
# e.g. python version.py 0.8.10
import json
import collections
import sys
import re
manifest_files = [
'src/chrome/app/dist_build/manifest.json',
'src/chrome/app/dev_build/manifest.json',
'src/chrome/extension/dist_build/manifest.json',
'src/chrome/extension/dev_build/manifest.json',
'src/firefox/package.json',
'package.json',
'bower.json',
]
validVersion = re.match('[0-9]+\.[0-9]+\.[0-9]+', sys.argv[1])
if validVersion == None:
print 'Please enter a valid version number.'
sys.exit()
for filename in manifest_files:
print filename
with open(filename) as manifest:
manifest_data = json.load(manifest, object_pairs_hook=collections.OrderedDict)
manifest_data['version'] = sys.argv[1]
with open(filename, 'w') as dist_manifest:
json.dump(manifest_data, dist_manifest, indent=2, separators=(',', ': '))
dist_manifest.write('\n');
| # Update uProxy version in all relevant places.
#
# Run with:
# python version.py <new version>
# e.g. python version.py 0.8.10
import json
import collections
import sys
import re
manifest_files = [
'src/chrome/app/manifest.json',
'src/chrome/extension/manifest.json',
'src/firefox/package.json',
'package.json',
'bower.json',
]
validVersion = re.match('[0-9]+\.[0-9]+\.[0-9]+', sys.argv[1])
if validVersion == None:
print 'Please enter a valid version number.'
sys.exit()
for filename in manifest_files:
print filename
with open(filename) as manifest:
manifest_data = json.load(manifest, object_pairs_hook=collections.OrderedDict)
manifest_data['version'] = sys.argv[1]
with open(filename, 'w') as dist_manifest:
json.dump(manifest_data, dist_manifest, indent=2, separators=(',', ': '))
dist_manifest.write('\n');
| Update manifest files being bumped. | Update manifest files being bumped.
| Python | apache-2.0 | itplanes/uproxy,chinarustin/uproxy,uProxy/uproxy,dhkong88/uproxy,dhkong88/uproxy,MinFu/uproxy,itplanes/uproxy,jpevarnek/uproxy,dhkong88/uproxy,jpevarnek/uproxy,chinarustin/uproxy,roceys/uproxy,roceys/uproxy,dhkong88/uproxy,uProxy/uproxy,uProxy/uproxy,qida/uproxy,chinarustin/uproxy,roceys/uproxy,chinarustin/uproxy,MinFu/uproxy,itplanes/uproxy,uProxy/uproxy,qida/uproxy,uProxy/uproxy,jpevarnek/uproxy,itplanes/uproxy,dhkong88/uproxy,MinFu/uproxy,chinarustin/uproxy,qida/uproxy,roceys/uproxy,MinFu/uproxy,qida/uproxy,jpevarnek/uproxy,roceys/uproxy,jpevarnek/uproxy,itplanes/uproxy,MinFu/uproxy,qida/uproxy | # Update uProxy version in all relevant places.
#
# Run with:
# python version.py <new version>
# e.g. python version.py 0.8.10
import json
import collections
import sys
import re
manifest_files = [
'src/chrome/app/manifest.json',
'src/chrome/extension/manifest.json',
'src/firefox/package.json',
'package.json',
'bower.json',
]
validVersion = re.match('[0-9]+\.[0-9]+\.[0-9]+', sys.argv[1])
if validVersion == None:
print 'Please enter a valid version number.'
sys.exit()
for filename in manifest_files:
print filename
with open(filename) as manifest:
manifest_data = json.load(manifest, object_pairs_hook=collections.OrderedDict)
manifest_data['version'] = sys.argv[1]
with open(filename, 'w') as dist_manifest:
json.dump(manifest_data, dist_manifest, indent=2, separators=(',', ': '))
dist_manifest.write('\n');
| Update manifest files being bumped.
# Update uProxy version in all relevant places.
#
# Run with:
# python version.py <new version>
# e.g. python version.py 0.8.10
import json
import collections
import sys
import re
manifest_files = [
'src/chrome/app/dist_build/manifest.json',
'src/chrome/app/dev_build/manifest.json',
'src/chrome/extension/dist_build/manifest.json',
'src/chrome/extension/dev_build/manifest.json',
'src/firefox/package.json',
'package.json',
'bower.json',
]
validVersion = re.match('[0-9]+\.[0-9]+\.[0-9]+', sys.argv[1])
if validVersion == None:
print 'Please enter a valid version number.'
sys.exit()
for filename in manifest_files:
print filename
with open(filename) as manifest:
manifest_data = json.load(manifest, object_pairs_hook=collections.OrderedDict)
manifest_data['version'] = sys.argv[1]
with open(filename, 'w') as dist_manifest:
json.dump(manifest_data, dist_manifest, indent=2, separators=(',', ': '))
dist_manifest.write('\n');
|
bf7daa5f6695f6150d65646592ffb47b35fb45db | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='lightstep',
version='2.2.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift==0.9.2',
'jsonpickle',
'pytest',
'basictracer>=2.2,<2.3',
'opentracing>=1.2,<1.3'],
tests_require=['sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
| from setuptools import setup, find_packages
setup(
name='lightstep',
version='2.2.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift==0.9.2',
'jsonpickle',
'pytest',
'basictracer>=2.2,<2.3'],
tests_require=['sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
| Remove explicit OT dep; we get it via basictracer | Remove explicit OT dep; we get it via basictracer
| Python | mit | lightstephq/lightstep-tracer-python | from setuptools import setup, find_packages
setup(
name='lightstep',
version='2.2.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift==0.9.2',
'jsonpickle',
'pytest',
'basictracer>=2.2,<2.3'],
tests_require=['sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
| Remove explicit OT dep; we get it via basictracer
from setuptools import setup, find_packages
setup(
name='lightstep',
version='2.2.0',
description='LightStep Python OpenTracing Implementation',
long_description='',
author='LightStep',
license='',
install_requires=['thrift==0.9.2',
'jsonpickle',
'pytest',
'basictracer>=2.2,<2.3',
'opentracing>=1.2,<1.3'],
tests_require=['sphinx',
'sphinx-epytext'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
keywords=[ 'opentracing', 'lightstep', 'traceguide', 'tracing', 'microservices', 'distributed' ],
packages=find_packages(exclude=['docs*', 'tests*', 'sample*']),
)
|
286cba2b3e7cf323835acd07f1e3bb510d74bcb2 | biopsy/tests.py | biopsy/tests.py | # -*- coding: utf-8 -*-
from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
| # -*- coding: utf-8 -*-
from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda",
status = "status",
exam = "exame"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
self.assertEquals("status",biopsy.status)
self.assertEquals("exame",biopsy.exam)
| Add status and exam in test Biopsy | Add status and exam in test Biopsy
| Python | mit | msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub | # -*- coding: utf-8 -*-
from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda",
status = "status",
exam = "exame"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
self.assertEquals("status",biopsy.status)
self.assertEquals("exame",biopsy.exam)
| Add status and exam in test Biopsy
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
|
d6f6d41665f58e68833b57d8b0d04d113f2c86a9 | ideascube/conf/idb_jor_zaatari.py | ideascube/conf/idb_jor_zaatari.py | """Ideaxbox for Zaatari, Jordan"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['SY', 'JO']
TIME_ZONE = 'Asia/Amman'
LANGUAGE_CODE = 'ar'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
| """Ideaxbox for Zaatari, Jordan"""
from .idb_jor_azraq import * # noqa
ENTRY_ACTIVITY_CHOICES = []
| Make zaatari import from azraq | Make zaatari import from azraq
| Python | agpl-3.0 | ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube | """Ideaxbox for Zaatari, Jordan"""
from .idb_jor_azraq import * # noqa
ENTRY_ACTIVITY_CHOICES = []
| Make zaatari import from azraq
"""Ideaxbox for Zaatari, Jordan"""
from .idb import * # noqa
from django.utils.translation import ugettext_lazy as _
IDEASCUBE_PLACE_NAME = _("city")
COUNTRIES_FIRST = ['SY', 'JO']
TIME_ZONE = 'Asia/Amman'
LANGUAGE_CODE = 'ar'
LOAN_DURATION = 14
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'user_id', 'birth_year', 'gender']
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'birth_year', 'gender', 'id_card_number']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the town'), ['current_occupation', 'school_level']),
(_('Language skills'), ['en_level']),
)
|
97f70e4d285a2ce231442f6544927671ca959c38 | Graphs/nodes_at_same_level.py | Graphs/nodes_at_same_level.py | import unittest
"""
Write a function to connect all adjacent nodes at same level in binary tree.
"""
class Node:
def __init__(self, key, left=None, right=None):
self.key = key
self.left = left
self.right = right
self.next_right = None
def connect_level(root):
if root is None:
return None
left_answer = connect_level(root.left)
right_answer = connect_level(root.right)
if left_answer is not None and right_answer is not None:
left_answer.next_right = right_answer
a = None
b = None
c = None
d = None
if left_answer is not None:
a = left_answer.left
b = left_answer.right
if right_answer is not None:
c = right_answer.left
d = right_answer.right
if a is not None and b is None:
if c is not None:
a.next_right = c
else:
a.next_right = d
elif b is not None:
if c is not None:
b.next_right = c
else:
b.next_right = d
return root
class TestLevelConnection(unittest.TestCase):
def test_level(self):
root = Node(10)
root.left = Node(3)
root.right = Node(5)
root.left.left = Node(4)
root.left.right = Node(1)
root.right.right = Node(2)
root = connect_level(root)
self.assertEqual(root.left.next_right, root.right)
self.assertEqual(root.left.left.next_right, root.left.right)
self.assertEqual(root.left.right.next_right, root.right.right)
| Connect nodes at same level in binary tree | Connect nodes at same level in binary tree
| Python | mit | prathamtandon/g4gproblems | import unittest
"""
Write a function to connect all adjacent nodes at same level in binary tree.
"""
class Node:
def __init__(self, key, left=None, right=None):
self.key = key
self.left = left
self.right = right
self.next_right = None
def connect_level(root):
if root is None:
return None
left_answer = connect_level(root.left)
right_answer = connect_level(root.right)
if left_answer is not None and right_answer is not None:
left_answer.next_right = right_answer
a = None
b = None
c = None
d = None
if left_answer is not None:
a = left_answer.left
b = left_answer.right
if right_answer is not None:
c = right_answer.left
d = right_answer.right
if a is not None and b is None:
if c is not None:
a.next_right = c
else:
a.next_right = d
elif b is not None:
if c is not None:
b.next_right = c
else:
b.next_right = d
return root
class TestLevelConnection(unittest.TestCase):
def test_level(self):
root = Node(10)
root.left = Node(3)
root.right = Node(5)
root.left.left = Node(4)
root.left.right = Node(1)
root.right.right = Node(2)
root = connect_level(root)
self.assertEqual(root.left.next_right, root.right)
self.assertEqual(root.left.left.next_right, root.left.right)
self.assertEqual(root.left.right.next_right, root.right.right)
| Connect nodes at same level in binary tree
|
|
b8f03556991cabab858bb31e5c8cb2f043ad14ce | packages/pcl-reference-assemblies.py | packages/pcl-reference-assemblies.py | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| Use a versioned filename for the PCL profiles. | Use a versioned filename for the PCL profiles.
| Python | mit | mono/bockbuild,mono/bockbuild | import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles-2013-10-25',
version='2013-10-25',
sources=['http://storage.bos.xamarin.com/bot-provisioning/mono-pcl-profiles-2013-10-25.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
| Use a versioned filename for the PCL profiles.
import glob
import os
import shutil
class PCLReferenceAssembliesPackage(Package):
def __init__(self):
Package.__init__(self,
name='mono-pcl-profiles',
version='2013-10-23',
sources=['http://storage.bos.xamarin.com/mono-pcl/58/5825e0404974d87799504a0df75ea4dca91f9bfe/mono-pcl-profiles.tar.gz'])
self.source_dir_name = "mono-pcl-profiles"
def prep(self):
self.extract_archive(self.sources[0],
validate_only=False,
overwrite=True)
def build(self):
pass
# A bunch of shell script written inside python literals ;(
def install(self):
dest = os.path.join(self.prefix, "lib", "mono", "xbuild-frameworks", ".NETPortable")
if not os.path.exists(dest):
os.makedirs(dest)
shutil.rmtree(dest, ignore_errors=True)
pcldir = os.path.join(self.package_build_dir(), self.source_dir_name, ".NETPortable")
self.sh("rsync -abv -q %s/* %s" % (pcldir, dest))
PCLReferenceAssembliesPackage()
|
039f6fa4b26b747432138a8bf9e2754c6daafec3 | byceps/blueprints/api/decorators.py | byceps/blueprints/api/decorators.py | """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended | Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended
"""
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
|
5aa48facaf77d8fb6919c960659dfa41f3f1ad78 | fabfile.py | fabfile.py | import os
from fabric.api import *
def unit():
current_dir = os.path.dirname(__file__)
command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir,
"nosetests", "-s", "--verbose", "--with-coverage",
"--cover-package=videolog", "tests/unit/*"])
local(command)
| import os
from fabric.api import *
def clean():
current_dir = os.path.dirname(__file__)
local("find %s -name '*.pyc' -exec rm -f {} \;" % current_dir)
local("rm -rf %s/build" % current_dir)
def unit():
clean()
current_dir = os.path.dirname(__file__)
command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir,
"nosetests", "-s", "--verbose", "--with-coverage",
"--cover-package=videolog", "tests/unit/*"])
local(command)
| Add task clean() to remove *.pyc files | Add task clean() to remove *.pyc files
| Python | mit | rcmachado/pyvideolog | import os
from fabric.api import *
def clean():
current_dir = os.path.dirname(__file__)
local("find %s -name '*.pyc' -exec rm -f {} \;" % current_dir)
local("rm -rf %s/build" % current_dir)
def unit():
clean()
current_dir = os.path.dirname(__file__)
command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir,
"nosetests", "-s", "--verbose", "--with-coverage",
"--cover-package=videolog", "tests/unit/*"])
local(command)
| Add task clean() to remove *.pyc files
import os
from fabric.api import *
def unit():
current_dir = os.path.dirname(__file__)
command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir,
"nosetests", "-s", "--verbose", "--with-coverage",
"--cover-package=videolog", "tests/unit/*"])
local(command)
|
40711777de24d30cfe771f172b221cfdf460d8eb | rng.py | rng.py | from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
| def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
| Revert "Fix python random number generator." | Revert "Fix python random number generator."
| Python | mit | 1yvT0s/illacceptanything,dushmis/illacceptanything,dushmis/illacceptanything,ultranaut/illacceptanything,caioproiete/illacceptanything,triggerNZ/illacceptanything,dushmis/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,ultranaut/illacceptanything,TheWhiteLlama/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,caioproiete/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,illacceptanything/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,triggerNZ/illacceptanything,oneminot/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,TheWhiteLlama/illacceptanything,triggerNZ/illacceptanything,JeffreyCA/illacceptanything,ultranaut/illacceptanything,1yvT0s/illacceptanything,ultranaut/illacceptanything,caioproiete/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,dushmis/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,tjhorner/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,dushmis/illacceptanything,JeffreyCA/illacceptanything,tjhorner/illacceptanything,1yvT0s/illacceptanything,tjhorner/illacceptanything,ultranaut/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,illacceptanything/illacceptanything,1yvT0s/illacceptanything,JeffreyCA/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,paladique/illacceptanything,triggerNZ/illacceptanything,1yvT0s/illacceptanything,illacceptanything/illacceptanything,oneminot/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,dushmis/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,JeffreyCA/illacceptanything,tjhorner/illacceptanything,illacceptanything/illacceptanything,ds84182/illacceptanything,tjhorner/illacceptanything,illacceptanything/illacceptanything,dushmis/illacceptanything,caioproiete/illacceptanything,oneminot/illacceptanything,JeffreyCA/illacceptanything,ds84182/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,1yvT0s/illacceptanything,triggerNZ/illacceptanything,paladique/illacceptanything,caioproiete/illacceptanything,TheWhiteLlama/illacceptanything,JeffreyCA/illacceptanything,triggerNZ/illacceptanything,illacceptanything/illacceptanything,caioproiete/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,ultranaut/illacceptanything,ultranaut/illacceptanything,paladique/illacceptanything,dushmis/illacceptanything,triggerNZ/illacceptanything,TheWhiteLlama/illacceptanything,TheWhiteLlama/illacceptanything,tjhorner/illacceptanything,ultranaut/illacceptanything,TheWhiteLlama/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,caioproiete/illacceptanything,oneminot/illacceptanything,triggerNZ/illacceptanything,ds84182/illacceptanything,ds84182/illacceptanything,oneminot/illacceptanything,ultranaut/illacceptanything,caioproiete/illacceptanything,1yvT0s/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,illacceptanything/illacceptanything,triggerNZ/illacceptanything,JeffreyCA/illacceptanything,JeffreyCA/illacceptanything,tjhorner/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,ds84182/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,ultranaut/illacceptanything,ds84182/illacceptanything,ultranaut/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,illacceptanything/illacceptanything,dushmis/illacceptanything,oneminot/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,TheWhiteLlama/illacceptanything,illacceptanything/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,oneminot/illacceptanything,ultranaut/illacceptanything,triggerNZ/illacceptanything,ultranaut/illacceptanything,triggerNZ/illacceptanything,JeffreyCA/illacceptanything,dushmis/illacceptanything,ultranaut/illacceptanything,ds84182/illacceptanything,1yvT0s/illacceptanything,caioproiete/illacceptanything,ultranaut/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,triggerNZ/illacceptanything,dushmis/illacceptanything | def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
| Revert "Fix python random number generator."
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
5398a864449db0a1d6ec106ddb839fff3b6afcda | mopidy_frontpanel/frontend.py | mopidy_frontpanel/frontend.py | from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
| from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
if (input == "play"):
pass
elif (input == "pause"):
pass
elif (input == "stop"):
pass
elif (input == "vol_up"):
pass
elif (input == "vol_down"):
pass
else:
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
| Handle playback changes in FrontPanel | Handle playback changes in FrontPanel
| Python | apache-2.0 | nick-bulleid/mopidy-frontpanel | from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
if (input == "play"):
pass
elif (input == "pause"):
pass
elif (input == "stop"):
pass
elif (input == "vol_up"):
pass
elif (input == "vol_down"):
pass
else:
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
| Handle playback changes in FrontPanel
from __future__ import unicode_literals
import logging
from mopidy.core import CoreListener
import pykka
import .menu import BrowseMenu
import .painter import Painter
logger = logging.getLogger(__name__)
class FrontPanel(pykka.ThreadingActor, CoreListener):
def __init__(self, config, core):
super(FrontPanel, self).__init__()
self.core = core
self.painter = Painter(core, self)
self.menu = BrowseMenu(core)
def on_start(self):
self.painter.start()
def handleInput(self, input):
self.menu.handleInput(input)
self.painter.update()
def track_playback_started(self, tl_track):
self.painter.update()
def track_playback_ended(self, tl_track, time_position):
self.painter.update()
|
017e7cae2aac65e405edf341c00a7052b8b13fa6 | minimal/ipython_notebook_config.py | minimal/ipython_notebook_config.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
| Set up an IPython config for the minimal image | Set up an IPython config for the minimal image
| Python | bsd-3-clause | mjbright/docker-demo-images,danielballan/docker-demo-images,Zsailer/docker-jupyter-teaching,odewahn/docker-demo-images,modulexcite/docker-demo-images,parente/docker-demo-images,CognitiveScale/docker-demo-images,ericdill/docker-demo-images,pelucid/docker-demo-images,willjharmer/docker-demo-images,CognitiveScale/docker-demo-images,odewahn/docker-demo-images,ericdill/docker-demo-images,mjbright/docker-demo-images,iamjakob/docker-demo-images,tanyaschlusser/docker-demo-images,parente/docker-demo-images,vanceb/docker-demo-images,mjbright/docker-demo-images,Zsailer/docker-jupyter-teaching,jupyter/docker-demo-images,CognitiveScale/docker-demo-images,rgbkrk/docker-demo-images,philipz/docker-demo-images,dietmarw/jupyter-docker-images,danielballan/docker-demo-images,vanceb/docker-demo-images,parente/docker-demo-images,CognitiveScale/docker-demo-images,dietmarw/jupyter-docker-images,iamjakob/docker-demo-images,dietmarw/jupyter-docker-images,rgbkrk/docker-demo-images,iamjakob/docker-demo-images,rgbkrk/docker-demo-images,tanyaschlusser/docker-demo-images,willjharmer/docker-demo-images,tanyaschlusser/docker-demo-images,modulexcite/docker-demo-images,Zsailer/docker-demo-images,danielballan/docker-demo-images,philipz/docker-demo-images,Zsailer/docker-demo-images,Zsailer/docker-jupyter-teaching,philipz/docker-demo-images,modulexcite/docker-demo-images,jupyter/docker-demo-images,pelucid/docker-demo-images,vanceb/docker-demo-images,odewahn/docker-demo-images,ericdill/docker-demo-images,jupyter/docker-demo-images,willjharmer/docker-demo-images,pelucid/docker-demo-images,Zsailer/docker-jupyter-teaching,Zsailer/docker-demo-images | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Configuration file for ipython-notebook.
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.open_browser = False
c.NotebookApp.port = 8888
# Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-
# For headerssent by the upstream reverse proxy. Necessary if the proxy handles
# SSL
c.NotebookApp.trust_xheaders = True
# Supply overrides for the tornado.web.Application that the IPython notebook
# uses.
c.NotebookApp.tornado_settings = {
'template_path':['/srv/ga/', '/srv/ipython/IPython/html',
'/srv/ipython/IPython/html/templates']
}
| Set up an IPython config for the minimal image
|
|
e4dd679f20a066c86a87a42199f66b288a314fcf | scons-tools/gmcs.py | scons-tools/gmcs.py | import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
| import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
| Use -platform:anycpu while compiling .NET assemblies | Use -platform:anycpu while compiling .NET assemblies | Python | lgpl-2.1 | eyecreate/tapcfg,juhovh/tapcfg,zhanleewo/tapcfg,eyecreate/tapcfg,zhanleewo/tapcfg,juhovh/tapcfg,juhovh/tapcfg,zhanleewo/tapcfg,eyecreate/tapcfg,zhanleewo/tapcfg,juhovh/tapcfg,zhanleewo/tapcfg,juhovh/tapcfg,eyecreate/tapcfg,eyecreate/tapcfg,juhovh/tapcfg | import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCLIBFLAGS'] = SCons.Util.CLVar('-platform:anycpu')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
| Use -platform:anycpu while compiling .NET assemblies
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
csccom = "$CSC $CSCFLAGS $_CSCLIBPATH -r:$_CSCLIBS -out:${TARGET.abspath} $SOURCES"
csclibcom = "$CSC -t:library $CSCLIBFLAGS $_CSCLIBPATH $_CSCLIBS -out:${TARGET.abspath} $SOURCES"
McsBuilder = SCons.Builder.Builder(action = '$CSCCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.exe')
McsLibBuilder = SCons.Builder.Builder(action = '$CSCLIBCOM',
source_factory = SCons.Node.FS.default_fs.Entry,
suffix = '.dll')
def generate(env):
env['BUILDERS']['CLIProgram'] = McsBuilder
env['BUILDERS']['CLILibrary'] = McsLibBuilder
env['CSC'] = 'gmcs'
env['_CSCLIBS'] = "${_stripixes('-r:', CILLIBS, '', '-r', '', __env__)}"
env['_CSCLIBPATH'] = "${_stripixes('-lib:', CILLIBPATH, '', '-r', '', __env__)}"
env['CSCFLAGS'] = SCons.Util.CLVar('')
env['CSCCOM'] = SCons.Action.Action(csccom)
env['CSCLIBCOM'] = SCons.Action.Action(csclibcom)
def exists(env):
return internal_zip or env.Detect('gmcs')
|
fc2aafecf45716067c5bf860a877be2dfca4b7d3 | satsolver/hamilton.py | satsolver/hamilton.py | #!/usr/bin/python
"""
Conversion of the Hamiltonian cycle problem to SAT.
"""
from boolean import *
def hamiltonian_cycle(l):
"""
Convert a directed graph to an instance of SAT that is satisfiable
precisely when the graph has a Hamiltonian cycle.
The graph is given as a list of ordered tuples representing directed edges.
Parallel edges (in the same direction) are not supported. The vertices of
the graph are assumed to be the endpoints of the listed edges (e.g., no
isolated vertices can be specified).
The function returns a boolean expression whose literals are of two types:
- ("e", u, v), where (u, v) is a directed edge in the given graph, and
- ("v", u, i), where u is a vertex and i is an integer between 0 and n-1,
where n is the number of vertices of the graph.
The returned expression is satisfiable precisely when the graph has a
Hamiltonian cycle. If a satisfying valuation is found, a Hamiltonian
cycle can be retrieved as follows:
- the set of all literals ("e", u, v) whose value is true corresponds to
the set of directed edges (u, v) in the Hamiltonian cycle, or
- the set of all literals ("v", u_i, i) whose value is true corresponds to
the cyclically ordered sequence (u_0, u_1, ..., u_{n-1}) of vertices
visited by the Hamiltonian cycle.
"""
terms = []
vertices = set(sum([list(e) for e in l], []))
lin = {u: [] for u in vertices}
lout = {u: [] for u in vertices}
for u, v in l:
lin[v].append(u)
lout[u].append(v)
n = len(vertices)
terms.append(("v", next(iter(vertices)), 0))
for u in vertices:
terms.append(Or([("v", u, i) for i in range(n)]))
terms.append(Or([("e", v, u) for v in lin[u]]))
terms.append(Or([("e", u, v) for v in lout[u]]))
for i in range(n):
for j in range(i+1, n):
terms.append(Not(And(("v", u, i), ("v", u, j))))
ll = lin[u]
m = len(ll)
for i in range(m):
v = ll[i]
for j in range(i+1, m):
terms.append(Not(And(("e", v, u), ("e", ll[j], u))))
ll = lout[u]
m = len(ll)
for i in range(m):
v = ll[i]
for j in range(i+1, m):
terms.append(Not(And(("e", u, v), ("e", u, ll[j]))))
for i in range(n):
terms.append(Implies(And(("v", u, i), ("e", u, v)),
("v", v, (i+1) % n)))
return And(terms)
| Add a conversion from the Hamiltonian cycle problem to SAT | Add a conversion from the Hamiltonian cycle problem to SAT
| Python | mit | jaanos/LVR-2016,jaanos/LVR-2016 | #!/usr/bin/python
"""
Conversion of the Hamiltonian cycle problem to SAT.
"""
from boolean import *
def hamiltonian_cycle(l):
"""
Convert a directed graph to an instance of SAT that is satisfiable
precisely when the graph has a Hamiltonian cycle.
The graph is given as a list of ordered tuples representing directed edges.
Parallel edges (in the same direction) are not supported. The vertices of
the graph are assumed to be the endpoints of the listed edges (e.g., no
isolated vertices can be specified).
The function returns a boolean expression whose literals are of two types:
- ("e", u, v), where (u, v) is a directed edge in the given graph, and
- ("v", u, i), where u is a vertex and i is an integer between 0 and n-1,
where n is the number of vertices of the graph.
The returned expression is satisfiable precisely when the graph has a
Hamiltonian cycle. If a satisfying valuation is found, a Hamiltonian
cycle can be retrieved as follows:
- the set of all literals ("e", u, v) whose value is true corresponds to
the set of directed edges (u, v) in the Hamiltonian cycle, or
- the set of all literals ("v", u_i, i) whose value is true corresponds to
the cyclically ordered sequence (u_0, u_1, ..., u_{n-1}) of vertices
visited by the Hamiltonian cycle.
"""
terms = []
vertices = set(sum([list(e) for e in l], []))
lin = {u: [] for u in vertices}
lout = {u: [] for u in vertices}
for u, v in l:
lin[v].append(u)
lout[u].append(v)
n = len(vertices)
terms.append(("v", next(iter(vertices)), 0))
for u in vertices:
terms.append(Or([("v", u, i) for i in range(n)]))
terms.append(Or([("e", v, u) for v in lin[u]]))
terms.append(Or([("e", u, v) for v in lout[u]]))
for i in range(n):
for j in range(i+1, n):
terms.append(Not(And(("v", u, i), ("v", u, j))))
ll = lin[u]
m = len(ll)
for i in range(m):
v = ll[i]
for j in range(i+1, m):
terms.append(Not(And(("e", v, u), ("e", ll[j], u))))
ll = lout[u]
m = len(ll)
for i in range(m):
v = ll[i]
for j in range(i+1, m):
terms.append(Not(And(("e", u, v), ("e", u, ll[j]))))
for i in range(n):
terms.append(Implies(And(("v", u, i), ("e", u, v)),
("v", v, (i+1) % n)))
return And(terms)
| Add a conversion from the Hamiltonian cycle problem to SAT
|
|
862753353a09400d0d99960ef2cd6d70fb9b4a7e | tests/top_destinations_tests.py | tests/top_destinations_tests.py | import unittest
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
class TestBasicTopDestinations(unittest.TestCase):
def read_config(self):
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def setUp(self):
# Read from config
self.client_id, self.client_secret = self.read_config()
self.sds = sabre_dev_studio.SabreDevStudio()
self.sds.set_credentials(self.client_id, self.client_secret)
self.sds.authenticate()
def test_fn_request(self):
res = self.sds.top_destinations('YYZ', theme='beach',
destination_type='INTERNATIONAL',
region='North America')
self.assertIsNotNone(res)
def test_basic_request(self):
options = {
'origin': 'YYZ',
'destinationtype': 'DOMESTIC',
'lookbackweeks': 2,
'topdestinations': 20
}
res = self.sds.top_destinations_opts(options)
self.assertIsNotNone(res)
def test_no_authorization(self):
sds = sabre_dev_studio.SabreDevStudio()
with self.assertRaises(sabre_exceptions.NotAuthorizedError):
resp = sds.instaflights({})
if __name__ == '__main__':
unittest.main()
| Add tests for top destinations | Add tests for top destinations
| Python | mit | Jamil/sabre_dev_studio | import unittest
import datetime
import json
import sys
sys.path.append('..')
import sabre_dev_studio
import sabre_dev_studio.sabre_exceptions as sabre_exceptions
'''
requires config.json in the same directory for api authentication
{
"sabre_client_id": -----,
"sabre_client_secret": -----
}
'''
class TestBasicTopDestinations(unittest.TestCase):
def read_config(self):
raw_data = open('config.json').read()
data = json.loads(raw_data)
client_secret = data['sabre_client_secret']
client_id = data['sabre_client_id']
return (client_id, client_secret)
def setUp(self):
# Read from config
self.client_id, self.client_secret = self.read_config()
self.sds = sabre_dev_studio.SabreDevStudio()
self.sds.set_credentials(self.client_id, self.client_secret)
self.sds.authenticate()
def test_fn_request(self):
res = self.sds.top_destinations('YYZ', theme='beach',
destination_type='INTERNATIONAL',
region='North America')
self.assertIsNotNone(res)
def test_basic_request(self):
options = {
'origin': 'YYZ',
'destinationtype': 'DOMESTIC',
'lookbackweeks': 2,
'topdestinations': 20
}
res = self.sds.top_destinations_opts(options)
self.assertIsNotNone(res)
def test_no_authorization(self):
sds = sabre_dev_studio.SabreDevStudio()
with self.assertRaises(sabre_exceptions.NotAuthorizedError):
resp = sds.instaflights({})
if __name__ == '__main__':
unittest.main()
| Add tests for top destinations
|
|
ed326fba4f44552eeb206f3c5af9ad6f5e89ca44 | localeurl/models.py | localeurl/models.py | from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
| from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
if reverse_kwargs!=None:
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
else:
locale = translation.get_language()
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
| Handle situation when kwargs is None | Handle situation when kwargs is None
| Python | mit | eugena/django-localeurl | from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
if reverse_kwargs!=None:
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
else:
locale = translation.get_language()
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
| Handle situation when kwargs is None
from django.conf import settings
from django.core import urlresolvers
from django.utils import translation
from localeurl import utils
def reverse(*args, **kwargs):
reverse_kwargs = kwargs.get('kwargs', {})
locale = utils.supported_language(reverse_kwargs.pop('locale',
translation.get_language()))
url = django_reverse(*args, **kwargs)
_, path = utils.strip_script_prefix(url)
return utils.locale_url(path, locale)
django_reverse = None
def patch_reverse():
"""
Monkey-patches the urlresolvers.reverse function. Will not patch twice.
"""
global django_reverse
if urlresolvers.reverse is not reverse:
django_reverse = urlresolvers.reverse
urlresolvers.reverse = reverse
if settings.USE_I18N:
patch_reverse()
|
a84dde598297495fe6f0f8b233b3a3761b0df7d4 | tests/functional/test_warning.py | tests/functional/test_warning.py |
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.join('warnings_demo.py')
demo.write('''
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
from logging import basicConfig
basicConfig()
from warnings import warn
warn("deprecated!", deprecation.PipDeprecationWarning)
''')
result = script.run('python', demo, expect_stderr=True)
assert result.stderr == \
'ERROR:pip._internal.deprecations:DEPRECATION: deprecated!\n'
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
| import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.join('warnings_demo.py')
demo.write(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
| Update test to check newer logic | Update test to check newer logic
| Python | mit | pypa/pip,pfmoore/pip,pypa/pip,pradyunsg/pip,rouge8/pip,xavfernandez/pip,pradyunsg/pip,rouge8/pip,xavfernandez/pip,xavfernandez/pip,rouge8/pip,sbidoul/pip,sbidoul/pip,techtonik/pip,techtonik/pip,techtonik/pip,pfmoore/pip | import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.join('warnings_demo.py')
demo.write(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
| Update test to check newer logic
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.join('warnings_demo.py')
demo.write('''
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
from logging import basicConfig
basicConfig()
from warnings import warn
warn("deprecated!", deprecation.PipDeprecationWarning)
''')
result = script.run('python', demo, expect_stderr=True)
assert result.stderr == \
'ERROR:pip._internal.deprecations:DEPRECATION: deprecated!\n'
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
b202e1cc5e6c5aa65c3ed22ad1e78ec505fa36c4 | cmsplugin_rst/forms.py | cmsplugin_rst/forms.py | from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel | from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"] | Add "fields" attribute to ModelForm. | Add "fields" attribute to ModelForm.
| Python | bsd-3-clause | pakal/cmsplugin-rst,ojii/cmsplugin-rst | from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel
fields = ["name", "body"] | Add "fields" attribute to ModelForm.
from cmsplugin_rst.models import RstPluginModel
from django import forms
help_text = '<a href="http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html">Reference</a>'
class RstPluginForm(forms.ModelForm):
body = forms.CharField(
widget=forms.Textarea(attrs={
'rows':30,
'cols':80,
'style':'font-family:monospace'
}),
help_text=help_text
)
class Meta:
model = RstPluginModel |
abf6af81b5f97ca6b6bb479adb1abfdf502d2a9b | utils/solve-all.py | utils/solve-all.py | import os
import subprocess
import sys
import time
paths = []
for path, dirs, files in os.walk('puzzles'):
for file in files:
paths.append(os.path.join(path, file))
for path in paths:
for method in ['human', 'hybrid']:
start = time.time()
try:
output = subprocess.check_output(['python3', 'takuzu.py', '--method', method, path], stderr = subprocess.STDOUT, timeout = 60)
except subprocess.TimeoutExpired:
output = False
end = time.time()
print('{file}\t{method}\t{time}'.format(
file = '\t'.join(path.rsplit('.', 1)[0].split('/')[1:]),
method = method,
time = '{:.02f}'.format(end - start) if output else 'false',
))
sys.stdout.flush()
| Add a wrapper to solve all puzzles in ./puzzles and print out timings (timeout after a minute) | Add a wrapper to solve all puzzles in ./puzzles and print out timings (timeout after a minute)
| Python | bsd-3-clause | jpverkamp/takuzu | import os
import subprocess
import sys
import time
paths = []
for path, dirs, files in os.walk('puzzles'):
for file in files:
paths.append(os.path.join(path, file))
for path in paths:
for method in ['human', 'hybrid']:
start = time.time()
try:
output = subprocess.check_output(['python3', 'takuzu.py', '--method', method, path], stderr = subprocess.STDOUT, timeout = 60)
except subprocess.TimeoutExpired:
output = False
end = time.time()
print('{file}\t{method}\t{time}'.format(
file = '\t'.join(path.rsplit('.', 1)[0].split('/')[1:]),
method = method,
time = '{:.02f}'.format(end - start) if output else 'false',
))
sys.stdout.flush()
| Add a wrapper to solve all puzzles in ./puzzles and print out timings (timeout after a minute)
|
|
ede7a27ca8862bdd1b9b0b7a113b80d055492ae1 | debexpo/config/__init__.py | debexpo/config/__init__.py | import os.path
import pylons
from paste.deploy import appconfig
def easy_app_init(ini_path):
ini_path = os.path.abspath(ini_path)
assert os.path.exists(ini_path)
# Initialize Pylons app
conf = appconfig('config:' + ini_path)
import debexpo.config.environment
pylons.config = debexpo.config.environment.load_environment(conf.global_conf, conf.local_conf)
| Add a simple app initialization function since paster shell is busted | Add a simple app initialization function since paster shell is busted
| Python | mit | jonnylamb/debexpo,jadonk/debexpo,jonnylamb/debexpo,jonnylamb/debexpo,swvist/Debexpo,jadonk/debexpo,swvist/Debexpo,swvist/Debexpo,jadonk/debexpo | import os.path
import pylons
from paste.deploy import appconfig
def easy_app_init(ini_path):
ini_path = os.path.abspath(ini_path)
assert os.path.exists(ini_path)
# Initialize Pylons app
conf = appconfig('config:' + ini_path)
import debexpo.config.environment
pylons.config = debexpo.config.environment.load_environment(conf.global_conf, conf.local_conf)
| Add a simple app initialization function since paster shell is busted
|
|
ba4a3caef1f361992aa7887d1f434510060d434f | hackingignores.py | hackingignores.py | #!/usr/bin/python3
import collections
import glob
# Run from openstack git org directory
# Format
# Rule: [repo]
result = collections.defaultdict(list)
for file in glob.glob("*/tox.ini"):
repo = file.split('/')[0]
with open(file) as f:
for line in f.readlines():
if line.startswith("ignore"):
ignore = line.strip().split('=')[1].split(',')
for rule in ignore:
if "H" not in rule:
# We only care about hacking rules
continue
result[rule].append(repo)
print("rule: number of ignores")
for k in result:
print("%s: %s" % (k, len(result[k])))
print("-- %s" % result[k])
| Add code to track which hacking rules are ignored | Add code to track which hacking rules are ignored
| Python | apache-2.0 | jogo/hackingignores | #!/usr/bin/python3
import collections
import glob
# Run from openstack git org directory
# Format
# Rule: [repo]
result = collections.defaultdict(list)
for file in glob.glob("*/tox.ini"):
repo = file.split('/')[0]
with open(file) as f:
for line in f.readlines():
if line.startswith("ignore"):
ignore = line.strip().split('=')[1].split(',')
for rule in ignore:
if "H" not in rule:
# We only care about hacking rules
continue
result[rule].append(repo)
print("rule: number of ignores")
for k in result:
print("%s: %s" % (k, len(result[k])))
print("-- %s" % result[k])
| Add code to track which hacking rules are ignored
|
|
46009d28e2b6285722287ccbeaa8d2f9c6c47fde | ldap_dingens/default_config.py | ldap_dingens/default_config.py | from datetime import timedelta
class DefaultConfiguration:
DEBUG = False
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USER = None
MAIL_PASSWORD = None
MAIL_CAFILE = None
INVITATION_SUBJECT = "Invitation to join the FSFW!"
TOKEN_BYTES = 5
TOKEN_LIFETIME = timedelta(days=7)
LOGIN_LIFETIME = timedelta(days=2)
#: Host name of the LDAP server
LDAP_SERVER = "localhost"
#: str.format string to create a DN which refers to a user with a given
#: loginname
LDAP_USER_DN_FORMAT = "uid={loginname},ou=Account,dc=fsfw-dresden,dc=de"
#: the DN to bind to for admin activity (create new users, change user
#: info)
LDAP_ADMIN_DN = "cn=AuthManager,ou=Management,dc=fsfw-dresden,dc=de"
#: set this to the password for the LDAP_ADMIN_DN above
LDAP_ADMIN_PASSWORD = ...
| from datetime import timedelta
class DefaultConfiguration:
DEBUG = False
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USER = None
MAIL_PASSWORD = None
MAIL_CAFILE = None
INVITATION_SUBJECT = "Invitation to join the FSFW!"
TOKEN_BYTES = 5
TOKEN_LIFETIME = timedelta(days=7)
LOGIN_LIFETIME = timedelta(days=2)
LDAP_SERVER = "localhost"
| Remove pointless default values from DefaultConfiguration | Remove pointless default values from DefaultConfiguration
| Python | agpl-3.0 | fsfw-dresden/ldap-dingens,fsfw-dresden/ldap-dingens | from datetime import timedelta
class DefaultConfiguration:
DEBUG = False
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USER = None
MAIL_PASSWORD = None
MAIL_CAFILE = None
INVITATION_SUBJECT = "Invitation to join the FSFW!"
TOKEN_BYTES = 5
TOKEN_LIFETIME = timedelta(days=7)
LOGIN_LIFETIME = timedelta(days=2)
LDAP_SERVER = "localhost"
| Remove pointless default values from DefaultConfiguration
from datetime import timedelta
class DefaultConfiguration:
DEBUG = False
MAIL_SERVER = "localhost"
MAIL_PORT = 25
MAIL_USER = None
MAIL_PASSWORD = None
MAIL_CAFILE = None
INVITATION_SUBJECT = "Invitation to join the FSFW!"
TOKEN_BYTES = 5
TOKEN_LIFETIME = timedelta(days=7)
LOGIN_LIFETIME = timedelta(days=2)
#: Host name of the LDAP server
LDAP_SERVER = "localhost"
#: str.format string to create a DN which refers to a user with a given
#: loginname
LDAP_USER_DN_FORMAT = "uid={loginname},ou=Account,dc=fsfw-dresden,dc=de"
#: the DN to bind to for admin activity (create new users, change user
#: info)
LDAP_ADMIN_DN = "cn=AuthManager,ou=Management,dc=fsfw-dresden,dc=de"
#: set this to the password for the LDAP_ADMIN_DN above
LDAP_ADMIN_PASSWORD = ...
|
22ae3a2e9a236de61c078d234d920a3e6bc62d7b | pylisp/application/lispd/address_tree/ddt_container_node.py | pylisp/application/lispd/address_tree/ddt_container_node.py | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
| '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| Add a bit of docs | Add a bit of docs
| Python | bsd-3-clause | steffann/pylisp | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| Add a bit of docs
'''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
|
c24a7287d0ac540d6ef6dcf353b06ee42aaa7a43 | serrano/decorators.py | serrano/decorators.py | from functools import wraps
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
def get_token(request):
return request.REQUEST.get('token', '')
def check_auth(func):
@wraps(func)
def inner(self, request, *args, **kwargs):
auth_required = getattr(settings, 'SERRANO_AUTH_REQUIRED', False)
user = getattr(request, 'user', None)
# Attempt to authenticate if a token is present
if not user or not user.is_authenticated():
token = get_token(request)
user = authenticate(token=token)
if user:
login(request, user)
elif auth_required:
return HttpResponse(status=401)
return func(self, request, *args, **kwargs)
return inner
| from functools import wraps
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
def get_token(request):
"Attempts to retrieve a token from the request."
if 'token' in request.REQUEST:
return request.REQUEST['token']
if 'HTTP_API_TOKEN' in request.META:
return request.META['HTTP_API_TOKEN']
return ''
def check_auth(func):
@wraps(func)
def inner(self, request, *args, **kwargs):
auth_required = getattr(settings, 'SERRANO_AUTH_REQUIRED', False)
user = getattr(request, 'user', None)
# Attempt to authenticate if a token is present
if not user or not user.is_authenticated():
token = get_token(request)
user = authenticate(token=token)
if user:
login(request, user)
elif auth_required:
return HttpResponse(status=401)
return func(self, request, *args, **kwargs)
return inner
| Add support for extracting the token from request headers | Add support for extracting the token from request headers
Clients can now set the `Api-Token` header instead of supplying the
token as a GET or POST parameter. | Python | bsd-2-clause | chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano | from functools import wraps
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
def get_token(request):
"Attempts to retrieve a token from the request."
if 'token' in request.REQUEST:
return request.REQUEST['token']
if 'HTTP_API_TOKEN' in request.META:
return request.META['HTTP_API_TOKEN']
return ''
def check_auth(func):
@wraps(func)
def inner(self, request, *args, **kwargs):
auth_required = getattr(settings, 'SERRANO_AUTH_REQUIRED', False)
user = getattr(request, 'user', None)
# Attempt to authenticate if a token is present
if not user or not user.is_authenticated():
token = get_token(request)
user = authenticate(token=token)
if user:
login(request, user)
elif auth_required:
return HttpResponse(status=401)
return func(self, request, *args, **kwargs)
return inner
| Add support for extracting the token from request headers
Clients can now set the `Api-Token` header instead of supplying the
token as a GET or POST parameter.
from functools import wraps
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
def get_token(request):
return request.REQUEST.get('token', '')
def check_auth(func):
@wraps(func)
def inner(self, request, *args, **kwargs):
auth_required = getattr(settings, 'SERRANO_AUTH_REQUIRED', False)
user = getattr(request, 'user', None)
# Attempt to authenticate if a token is present
if not user or not user.is_authenticated():
token = get_token(request)
user = authenticate(token=token)
if user:
login(request, user)
elif auth_required:
return HttpResponse(status=401)
return func(self, request, *args, **kwargs)
return inner
|
83ed5ca9bc388dbe9b2d82510842a99b3a2e5ce7 | src/personalisation/middleware.py | src/personalisation/middleware.py | from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
| from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
| Create empty 'segments' object in session if none exists | Create empty 'segments' object in session if none exists
| Python | mit | LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation | from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
if not request.session.get('segments'):
request.session['segments'] = []
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
| Create empty 'segments' object in session if none exists
from personalisation.models import AbstractBaseRule, Segment
class SegmentMiddleware(object):
"""Middleware for testing and putting a user in a segment"""
def __init__(self, get_response=None):
self.get_response = get_response
def __call__(self, request):
segments = Segment.objects.all().filter(status="enabled")
chosen_segments = []
for segment in segments:
rules = AbstractBaseRule.objects.filter(segment=segment).select_subclasses()
result = self.test_rules(rules, request)
if result:
self.add_segment_to_user(segment, request)
response = self.get_response(request)
print(request.session['segments'])
return response
def test_rules(self, rules, request):
for rule in rules:
result = rule.test_user(request)
if result is False:
return False
return True
def add_segment_to_user(self, segment, request):
if 'segments' not in request.session:
request.session['segments'] = []
if segment not in request.session['segments']:
request.session['segments'].append(segment.encoded_name())
|
35f41aa03285180e380274ba95e882906f4cbbc8 | setup.py | setup.py | import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='[email protected]',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
| import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='[email protected]',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock', 'Mako'],
)
| Add missing test Mako test dependency. | Add missing test Mako test dependency.
| Python | bsd-3-clause | thruflo/dogpile.cache,thruflo/dogpile.cache | import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='[email protected]',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock', 'Mako'],
)
| Add missing test Mako test dependency.
import os
import sys
import re
from setuptools import setup, find_packages
v = open(os.path.join(os.path.dirname(__file__), 'dogpile', 'cache', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1)
v.close()
readme = os.path.join(os.path.dirname(__file__), 'README.rst')
setup(name='dogpile.cache',
version=VERSION,
description="A caching front-end based on the Dogpile lock.",
long_description=open(readme).read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
keywords='caching',
author='Mike Bayer',
author_email='[email protected]',
url='http://bitbucket.org/zzzeek/dogpile.cache',
license='BSD',
packages=find_packages('.', exclude=['ez_setup', 'tests*']),
namespace_packages=['dogpile'],
entry_points="""
[mako.cache]
dogpile.cache = dogpile.cache.plugins.mako_cache:MakoPlugin
""",
zip_safe=False,
install_requires=['dogpile.core>=0.4.1'],
test_suite='nose.collector',
tests_require=['nose', 'mock'],
)
|
5c435749b043f0605e9d1b5279a5a8fd4a5a1c25 | pyfolio/tests/test_nbs.py | pyfolio/tests/test_nbs.py | #!/usr/bin/env python
"""
simple example script for running notebooks and reporting exceptions.
Usage: `checkipnb.py foo.ipynb [bar.ipynb [...]]`
Each cell is submitted to the kernel, and checked for errors.
"""
import os
import glob
from runipy.notebook_runner import NotebookRunner
from IPython.nbformat.current import read
from pyfolio.utils import pyfolio_root
def test_nbs():
path = os.path.join(pyfolio_root(), 'examples', '*.ipynb')
for ipynb in glob.glob(path):
with open(ipynb) as f:
nb = read(f, 'json')
nb_runner = NotebookRunner(nb)
nb_runner.run_notebook(skip_exceptions=False)
| #!/usr/bin/env python
"""
simple example script for running notebooks and reporting exceptions.
Usage: `checkipnb.py foo.ipynb [bar.ipynb [...]]`
Each cell is submitted to the kernel, and checked for errors.
"""
import os
import glob
from runipy.notebook_runner import NotebookRunner
from IPython.nbformat.current import read
from pyfolio.utils import pyfolio_root
def test_nbs():
path = os.path.join(pyfolio_root(), 'examples', '*.ipynb')
for ipynb in glob.glob(path):
# See if bayesian is useable before we run a test
if ipynb.endswith('bayesian.ipynb'):
try:
import pyfolio.bayesian # NOQA
except:
continue
with open(ipynb) as f:
nb = read(f, 'json')
nb_runner = NotebookRunner(nb)
nb_runner.run_notebook(skip_exceptions=False)
| Make nb_tests for bayesian optional because PyMC3 is not a hard dependency | TST: Make nb_tests for bayesian optional because PyMC3 is not a hard dependency
| Python | apache-2.0 | ChinaQuants/pyfolio,chayapan/pyfolio,ChinaQuants/pyfolio,quantopian/pyfolio,YihaoLu/pyfolio,quantopian/pyfolio,femtotrader/pyfolio,femtotrader/pyfolio,YihaoLu/pyfolio | #!/usr/bin/env python
"""
simple example script for running notebooks and reporting exceptions.
Usage: `checkipnb.py foo.ipynb [bar.ipynb [...]]`
Each cell is submitted to the kernel, and checked for errors.
"""
import os
import glob
from runipy.notebook_runner import NotebookRunner
from IPython.nbformat.current import read
from pyfolio.utils import pyfolio_root
def test_nbs():
path = os.path.join(pyfolio_root(), 'examples', '*.ipynb')
for ipynb in glob.glob(path):
# See if bayesian is useable before we run a test
if ipynb.endswith('bayesian.ipynb'):
try:
import pyfolio.bayesian # NOQA
except:
continue
with open(ipynb) as f:
nb = read(f, 'json')
nb_runner = NotebookRunner(nb)
nb_runner.run_notebook(skip_exceptions=False)
| TST: Make nb_tests for bayesian optional because PyMC3 is not a hard dependency
#!/usr/bin/env python
"""
simple example script for running notebooks and reporting exceptions.
Usage: `checkipnb.py foo.ipynb [bar.ipynb [...]]`
Each cell is submitted to the kernel, and checked for errors.
"""
import os
import glob
from runipy.notebook_runner import NotebookRunner
from IPython.nbformat.current import read
from pyfolio.utils import pyfolio_root
def test_nbs():
path = os.path.join(pyfolio_root(), 'examples', '*.ipynb')
for ipynb in glob.glob(path):
with open(ipynb) as f:
nb = read(f, 'json')
nb_runner = NotebookRunner(nb)
nb_runner.run_notebook(skip_exceptions=False)
|
e89faebd357cc9c929950ef38cafc97524dee205 | setup.py | setup.py | from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
| from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
| Comment out numpy, scipy which cause problems in buildout | Comment out numpy, scipy which cause problems in buildout
| Python | apache-2.0 | mistio/mist.monitor,mistio/mist.monitor | from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo',]# 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
| Comment out numpy, scipy which cause problems in buildout
from setuptools import setup, find_packages
import os
version = '0.1'
long_description = (
open('README.txt').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
requires = ['pyramid', 'PasteScript', 'requests', 'pymongo', 'numpy', 'scipy==0.10.0']
setup(name='mist.monitor',
version=version,
description="Monitoring node for the https://mist.io service",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='',
author_email='',
url='https://mist.io/',
license='copyright',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['mist'],
include_package_data=True,
zip_safe=False,
install_requires= requires,
entry_points="""
# -*- Entry points: -*-
[paste.app_factory]
main = mist.monitor:main
""",
)
|
5e1272c7c442c759116d6f85fc587514ce97b667 | scripts/list-components.py | scripts/list-components.py | """Prints the names of components installed on a WMT executor."""
import os
import yaml
wmt_executor = os.environ['wmt_executor']
cfg_file = 'wmt-config-' + wmt_executor + '.yaml'
try:
with open(cfg_file, 'r') as fp:
cfg = yaml.safe_load(fp)
except IOError:
raise
components = cfg['components'].keys()
for item in components:
print item
| Add script to print components installed on executor | Add script to print components installed on executor
It reads them from the YAML file output from `cmt-config`.
| Python | mit | csdms/wmt-metadata | """Prints the names of components installed on a WMT executor."""
import os
import yaml
wmt_executor = os.environ['wmt_executor']
cfg_file = 'wmt-config-' + wmt_executor + '.yaml'
try:
with open(cfg_file, 'r') as fp:
cfg = yaml.safe_load(fp)
except IOError:
raise
components = cfg['components'].keys()
for item in components:
print item
| Add script to print components installed on executor
It reads them from the YAML file output from `cmt-config`.
|
|
b65b0ed8d09d4a22164f16ed60f7c5b71d6f54db | setup.py | setup.py | import setuptools
from gitvendor.version import Version
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Topic :: Software Development'
]
setuptools.setup(name='git-vendor',
version=Version('0.0.1').number,
description='Vendor tagged releases from git to $VCS',
long_description=open('README.md').read().strip(),
author='Charles Butler',
author_email='[email protected]',
url='http://github.com/chuckbutler/git-vendor',
py_modules=[],
packages=find_packages(),
entry_points={
'console_scripts': [
'git-vendor = gitvendor.cli:main'
],
},
install_requires=['gitpython', 'jinja2', 'pyyaml', 'path.py',
'dirsync', 'six'],
package_data={
'template': ['template/vendor-rc'],
},
include_package_data=True,
license='MIT License',
zip_safe=False,
keywords='git, vendor',
classifiers=CLASSIFIERS)
| import setuptools
from gitvendor.version import Version
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Topic :: Software Development'
]
setuptools.setup(name='git-vendor',
version=Version('0.0.3').number,
description='Vendor tagged releases from git to $VCS',
long_description=open('README.md').read().strip(),
author='Charles Butler',
author_email='[email protected]',
url='http://github.com/chuckbutler/git-vendor',
download_url='https://github.com/chuckbutler/git-vendor/releases/',
py_modules=[],
packages=find_packages(),
entry_points={
'console_scripts': [
'git-vendor = gitvendor.cli:main'
],
},
install_requires=['gitpython', 'jinja2', 'pyyaml', 'path.py',
'dirsync', 'six'],
package_data={
'template': ['template/vendor-rc'],
},
include_package_data=True,
license='MIT License',
zip_safe=False,
keywords='git, vendor',
classifiers=CLASSIFIERS)
| Move download_url and bump version | Move download_url and bump version
| Python | mit | chuckbutler/git-vendor | import setuptools
from gitvendor.version import Version
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Topic :: Software Development'
]
setuptools.setup(name='git-vendor',
version=Version('0.0.3').number,
description='Vendor tagged releases from git to $VCS',
long_description=open('README.md').read().strip(),
author='Charles Butler',
author_email='[email protected]',
url='http://github.com/chuckbutler/git-vendor',
download_url='https://github.com/chuckbutler/git-vendor/releases/',
py_modules=[],
packages=find_packages(),
entry_points={
'console_scripts': [
'git-vendor = gitvendor.cli:main'
],
},
install_requires=['gitpython', 'jinja2', 'pyyaml', 'path.py',
'dirsync', 'six'],
package_data={
'template': ['template/vendor-rc'],
},
include_package_data=True,
license='MIT License',
zip_safe=False,
keywords='git, vendor',
classifiers=CLASSIFIERS)
| Move download_url and bump version
import setuptools
from gitvendor.version import Version
from setuptools import find_packages
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Topic :: Software Development'
]
setuptools.setup(name='git-vendor',
version=Version('0.0.1').number,
description='Vendor tagged releases from git to $VCS',
long_description=open('README.md').read().strip(),
author='Charles Butler',
author_email='[email protected]',
url='http://github.com/chuckbutler/git-vendor',
py_modules=[],
packages=find_packages(),
entry_points={
'console_scripts': [
'git-vendor = gitvendor.cli:main'
],
},
install_requires=['gitpython', 'jinja2', 'pyyaml', 'path.py',
'dirsync', 'six'],
package_data={
'template': ['template/vendor-rc'],
},
include_package_data=True,
license='MIT License',
zip_safe=False,
keywords='git, vendor',
classifiers=CLASSIFIERS)
|
96421cfe9711c77fb27a028d8e942bffd3059dd3 | project/api/urls.py | project/api/urls.py | from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet
from django.conf.urls import url, include
from rest_framework.authtoken import views
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from rest_framework.authtoken import views
schema_view = get_schema_view(title='Grailed API')
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'channels', ChannelViewSet)
router.register(r'messages', MessageViewSet)
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^schema/$', schema_view),
url(r'^', include(router.urls)),
url(r'^', include('rest_auth.urls')),
url(r'^registration/$', include('rest_auth.registration.urls')),
url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password
] | from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet
from django.conf.urls import url, include
from rest_framework.authtoken import views
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from rest_framework.authtoken import views
schema_view = get_schema_view(title='Grailed API')
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'channels', ChannelViewSet)
router.register(r'messages', MessageViewSet)
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^schema/$', schema_view),
url(r'^', include(router.urls)),
url(r'^', include('rest_auth.urls')),
url(r'^registration/', include('rest_auth.registration.urls')),
url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password
] | Fix regex for registration url | Fix regex for registration url
| Python | mit | djstein/messages-grailed | from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet
from django.conf.urls import url, include
from rest_framework.authtoken import views
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from rest_framework.authtoken import views
schema_view = get_schema_view(title='Grailed API')
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'channels', ChannelViewSet)
router.register(r'messages', MessageViewSet)
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^schema/$', schema_view),
url(r'^', include(router.urls)),
url(r'^', include('rest_auth.urls')),
url(r'^registration/', include('rest_auth.registration.urls')),
url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password
] | Fix regex for registration url
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet
from django.conf.urls import url, include
from rest_framework.authtoken import views
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from rest_framework.authtoken import views
schema_view = get_schema_view(title='Grailed API')
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'channels', ChannelViewSet)
router.register(r'messages', MessageViewSet)
# The API URLs are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^schema/$', schema_view),
url(r'^', include(router.urls)),
url(r'^', include('rest_auth.urls')),
url(r'^registration/$', include('rest_auth.registration.urls')),
url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password
] |
4ce1742472fc636aebd176a33b5fa7a819713fe7 | setup.py | setup.py | import re
from setuptools import setup
_versionRE = re.compile(r'__version__\s*=\s*\"([^\"]+)\"')
# read the version number for the settings file
with open('lib/glyphConstruction.py', "r") as settings:
code = settings.read()
found = _versionRE.search(code)
assert found is not None, "glyphConstruction __version__ not found"
__version__ = found.group(1)
setup(
name='glyphConstruction',
version=__version__,
author='Frederik Berlaen',
author_email='[email protected]',
url='https://github.com/typemytype/GlyphConstruction',
license='LICENSE.txt',
description='Letter shape description language',
long_description='Letter shape description language',
install_requires=[],
py_modules=["glyphConstruction"],
package_dir={'': 'Lib'}
)
| import re
from setuptools import setup
_versionRE = re.compile(r'__version__\s*=\s*\"([^\"]+)\"')
# read the version number for the settings file
with open('Lib/glyphConstruction.py', "r") as settings:
code = settings.read()
found = _versionRE.search(code)
assert found is not None, "glyphConstruction __version__ not found"
__version__ = found.group(1)
setup(
name='glyphConstruction',
version=__version__,
author='Frederik Berlaen',
author_email='[email protected]',
url='https://github.com/typemytype/GlyphConstruction',
license='LICENSE.txt',
description='Letter shape description language',
long_description='Letter shape description language',
install_requires=[],
py_modules=["glyphConstruction"],
package_dir={'': 'Lib'}
)
| Fix case of file path | Fix case of file path
It is impossible to build/install this on Linux with this path broken because
all file systems are case sensitive. I'll assume this only slipped by because
it was only ever tested on Windows or another case insensitive filesystem. | Python | mit | typemytype/GlyphConstruction,moyogo/glyphconstruction,moyogo/glyphconstruction,typemytype/GlyphConstruction | import re
from setuptools import setup
_versionRE = re.compile(r'__version__\s*=\s*\"([^\"]+)\"')
# read the version number for the settings file
with open('Lib/glyphConstruction.py', "r") as settings:
code = settings.read()
found = _versionRE.search(code)
assert found is not None, "glyphConstruction __version__ not found"
__version__ = found.group(1)
setup(
name='glyphConstruction',
version=__version__,
author='Frederik Berlaen',
author_email='[email protected]',
url='https://github.com/typemytype/GlyphConstruction',
license='LICENSE.txt',
description='Letter shape description language',
long_description='Letter shape description language',
install_requires=[],
py_modules=["glyphConstruction"],
package_dir={'': 'Lib'}
)
| Fix case of file path
It is impossible to build/install this on Linux with this path broken because
all file systems are case sensitive. I'll assume this only slipped by because
it was only ever tested on Windows or another case insensitive filesystem.
import re
from setuptools import setup
_versionRE = re.compile(r'__version__\s*=\s*\"([^\"]+)\"')
# read the version number for the settings file
with open('lib/glyphConstruction.py', "r") as settings:
code = settings.read()
found = _versionRE.search(code)
assert found is not None, "glyphConstruction __version__ not found"
__version__ = found.group(1)
setup(
name='glyphConstruction',
version=__version__,
author='Frederik Berlaen',
author_email='[email protected]',
url='https://github.com/typemytype/GlyphConstruction',
license='LICENSE.txt',
description='Letter shape description language',
long_description='Letter shape description language',
install_requires=[],
py_modules=["glyphConstruction"],
package_dir={'': 'Lib'}
)
|
8032fd5bf99b7c235e75617b45c77e38dcba4ec7 | core/migrations/0023_alter_homepage_featured_section_integer_block.py | core/migrations/0023_alter_homepage_featured_section_integer_block.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
import wagtail.wagtailcore.blocks
import wagtail.wagtailimages.blocks
import wagtail.wagtailimages.models
class Migration(migrations.Migration):
dependencies = [
('core', '0022_remove_filter_field_for_wagtail_1_9_upgrade'),
]
operations = [
migrations.AlterField(
model_name='homepage',
name='featured_content',
field=wagtail.wagtailcore.fields.StreamField([('featured_section', wagtail.wagtailcore.blocks.StructBlock([(b'title', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'link_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock()), (b'position_from_left', wagtail.wagtailcore.blocks.IntegerBlock(help_text='Value in percentage (Max: 75)', max_value=75, default=9, min_value=0, required=True)), (b'position_from_top', wagtail.wagtailcore.blocks.IntegerBlock(help_text='Value in percentage (Max: 40)', max_value=40, default=30, min_value=0, required=True)), (b'featured_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), (b'featured_image_label', wagtail.wagtailcore.blocks.CharBlock(required=False))]))], null=True, blank=True),
),
]
| Add migration for homepage featured-section integer_block | Add migration for homepage featured-section integer_block
| Python | bsd-3-clause | PARINetwork/pari,PARINetwork/pari,PARINetwork/pari,PARINetwork/pari | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import wagtail.wagtailcore.fields
import wagtail.wagtailcore.blocks
import wagtail.wagtailimages.blocks
import wagtail.wagtailimages.models
class Migration(migrations.Migration):
dependencies = [
('core', '0022_remove_filter_field_for_wagtail_1_9_upgrade'),
]
operations = [
migrations.AlterField(
model_name='homepage',
name='featured_content',
field=wagtail.wagtailcore.fields.StreamField([('featured_section', wagtail.wagtailcore.blocks.StructBlock([(b'title', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'link_text', wagtail.wagtailcore.blocks.CharBlock(required=False)), (b'url', wagtail.wagtailcore.blocks.CharBlock()), (b'position_from_left', wagtail.wagtailcore.blocks.IntegerBlock(help_text='Value in percentage (Max: 75)', max_value=75, default=9, min_value=0, required=True)), (b'position_from_top', wagtail.wagtailcore.blocks.IntegerBlock(help_text='Value in percentage (Max: 40)', max_value=40, default=30, min_value=0, required=True)), (b'featured_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), (b'featured_image_label', wagtail.wagtailcore.blocks.CharBlock(required=False))]))], null=True, blank=True),
),
]
| Add migration for homepage featured-section integer_block
|
|
39228ca69262511b1d0efbfc437dda19c097d530 | logger.py | logger.py | from time import strftime
"""logger.py: A simple logging module"""
__author__ = "Prajesh Ananthan"
def printDebug(text):
print(strftime('%d/%b/%Y %H:%M:%S DEBUG | {}'.format(text)))
def printInfo(text):
print(strftime('%d/%b/%Y %H:%M:%S INFO | {}'.format(text)))
def printWarning(text):
print(strftime('%d/%b/%Y %H:%M:%S WARNING | {}'.format(text)))
| from time import strftime
"""logger.py: A simple logging module"""
__author__ = "Prajesh Ananthan"
def DEBUG(text):
print(strftime('%d/%b/%Y %H:%M:%S DEBUG | {}'.format(text)))
def INFO(text):
print(strftime('%d/%b/%Y %H:%M:%S INFO | {}'.format(text)))
def WARNING(text):
print(strftime('%d/%b/%Y %H:%M:%S WARNING | {}'.format(text)))
def ERROR(text):
print(strftime('%d/%b/%Y %H:%M:%S ERROR | {}'.format(text)))
| Update on the method names | Update on the method names
| Python | mit | prajesh-ananthan/Tools | from time import strftime
"""logger.py: A simple logging module"""
__author__ = "Prajesh Ananthan"
def DEBUG(text):
print(strftime('%d/%b/%Y %H:%M:%S DEBUG | {}'.format(text)))
def INFO(text):
print(strftime('%d/%b/%Y %H:%M:%S INFO | {}'.format(text)))
def WARNING(text):
print(strftime('%d/%b/%Y %H:%M:%S WARNING | {}'.format(text)))
def ERROR(text):
print(strftime('%d/%b/%Y %H:%M:%S ERROR | {}'.format(text)))
| Update on the method names
from time import strftime
"""logger.py: A simple logging module"""
__author__ = "Prajesh Ananthan"
def printDebug(text):
print(strftime('%d/%b/%Y %H:%M:%S DEBUG | {}'.format(text)))
def printInfo(text):
print(strftime('%d/%b/%Y %H:%M:%S INFO | {}'.format(text)))
def printWarning(text):
print(strftime('%d/%b/%Y %H:%M:%S WARNING | {}'.format(text)))
|
8332dc01c3c743543f4c3faff44da84436ae5da2 | planner/forms.py | planner/forms.py | from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
| from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser, Trip, Step
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class TripForm(forms.ModelForm):
class Meta:
model = Trip
fields = ['date_origin', 'max_num_passengers']
class StepForm(forms.ModelForm):
class Meta:
model = Step
fields = ['origin', 'destination', 'hour_origin', 'hour_destination', 'max_price']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
| Add Trip and Step ModelForms | Add Trip and Step ModelForms
| Python | mit | livingsilver94/getaride,livingsilver94/getaride,livingsilver94/getaride | from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser, Trip, Step
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class TripForm(forms.ModelForm):
class Meta:
model = Trip
fields = ['date_origin', 'max_num_passengers']
class StepForm(forms.ModelForm):
class Meta:
model = Step
fields = ['origin', 'destination', 'hour_origin', 'hour_destination', 'max_price']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
| Add Trip and Step ModelForms
from django.contrib.auth.forms import AuthenticationForm
from django import forms
from django.core.validators import MinLengthValidator
from .models import PoolingUser
from users.forms import UserCreationForm
class LoginForm(AuthenticationForm):
username = forms.CharField(widget=forms.EmailInput(attrs={'placeholder': 'Email',
'class': 'form-control',
}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'placeholder': 'Password',
'class': 'form-control',
}))
class SearchTrip(forms.Form):
"""
Pay attention that id fields are meant to be hidden, since we suppose they come from
an autocomplete AJAX request via an another CharField.
"""
origin_id = forms.IntegerField()
destination_id = forms.IntegerField()
datetime = forms.DateTimeField()
class PoolingUserForm(forms.ModelForm):
class Meta:
model = PoolingUser
# Exclude the one-to-one relation with User
fields = ['birth_date', 'driving_license', 'cellphone_number']
class UserForm(UserCreationForm):
class Meta(UserCreationForm.Meta):
fields = ('email', 'first_name', 'last_name')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field_name in self.Meta.fields:
self[field_name].field.required = True
self['password1'].field.validators = [MinLengthValidator(6)]
|
aa9cb1bc1a04de4e4a4a787881123e2a60aaeb4e | docs/apps.py | docs/apps.py | import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
| import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
| Increase the ES timeout to 1 minute. | Increase the ES timeout to 1 minute.
| Python | bsd-3-clause | rmoorman/djangoproject.com,hassanabidpk/djangoproject.com,relekang/djangoproject.com,django/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,hassanabidpk/djangoproject.com,gnarf/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,django/djangoproject.com,nanuxbe/django,rmoorman/djangoproject.com,relekang/djangoproject.com,alawnchen/djangoproject.com,rmoorman/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,hassanabidpk/djangoproject.com,xavierdutreilh/djangoproject.com,vxvinh1511/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,vxvinh1511/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,alawnchen/djangoproject.com,gnarf/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com,django/djangoproject.com,gnarf/djangoproject.com,gnarf/djangoproject.com | import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
'timeout': 60.0,
},
)
| Increase the ES timeout to 1 minute.
import certifi
from django.apps import AppConfig
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from elasticsearch_dsl.connections import connections
class DocsConfig(AppConfig):
name = 'docs'
verbose_name = _('Documentation')
def ready(self):
super(DocsConfig, self).ready()
# Configure Elasticsearch connections for connection pooling.
connections.configure(
default={
'hosts': settings.ES_HOST,
'verify_certs': True,
'ca_certs': certifi.where(),
},
)
|
808fdc4351254c8f5b32d5997803562091121044 | cinderella/cinderella/settings/production.py | cinderella/cinderella/settings/production.py | from .base import *
DEBUG = False
ALLOWED_HOSTS = ['188.226.249.33', 'cinderella.li']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
| from .base import *
DEBUG = False
ALLOWED_HOSTS = ['cinderella.li']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
| Remove IP from allowed hosts | Remove IP from allowed hosts
| Python | mit | jasisz/cinderella,jasisz/cinderella | from .base import *
DEBUG = False
ALLOWED_HOSTS = ['cinderella.li']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
| Remove IP from allowed hosts
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['188.226.249.33', 'cinderella.li']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
|
967ed5fa4297bc4091a0474eab95f6b082c4bba2 | PythonWhiteLibrary/setup.py | PythonWhiteLibrary/setup.py | import distutils.sysconfig
from distutils.core import setup
setup(name = 'robotframework-whitelibrary',
version = '0.0.1',
description = 'Windows GUI testing library for Robot Framework',
author = 'SALabs',
url = 'https://github.com/Omenia/robotframework-whitelibrary',
package_dir = {'' : 'src'},
py_modules = ['WhiteLibrary'],
package_data = {'robotframework-whitelibrary': ["../WhiteLibrary/bin/CSWhiteLibrary.dll"]},
)
| import distutils.sysconfig
from distutils.core import setup
setup(name = 'robotframework-whitelibrary',
version = '0.0.1',
description = 'Windows GUI testing library for Robot Framework',
author = 'SALabs',
url = 'https://github.com/Omenia/robotframework-whitelibrary',
package_dir = {'' : 'src'},
py_modules = ['WhiteLibrary'],
package_data = {'robotframework-whitelibrary': ["WhiteLibrary/bin/CSWhiteLibrary.dll"]},
)
| Revert "Trying to fix the path" | Revert "Trying to fix the path"
This reverts commit f89b139ba7e17af8bc7ca42a8cc9a3f821825454.
| Python | apache-2.0 | Omenia/robotframework-whitelibrary,Omenia/robotframework-whitelibrary | import distutils.sysconfig
from distutils.core import setup
setup(name = 'robotframework-whitelibrary',
version = '0.0.1',
description = 'Windows GUI testing library for Robot Framework',
author = 'SALabs',
url = 'https://github.com/Omenia/robotframework-whitelibrary',
package_dir = {'' : 'src'},
py_modules = ['WhiteLibrary'],
package_data = {'robotframework-whitelibrary': ["WhiteLibrary/bin/CSWhiteLibrary.dll"]},
)
| Revert "Trying to fix the path"
This reverts commit f89b139ba7e17af8bc7ca42a8cc9a3f821825454.
import distutils.sysconfig
from distutils.core import setup
setup(name = 'robotframework-whitelibrary',
version = '0.0.1',
description = 'Windows GUI testing library for Robot Framework',
author = 'SALabs',
url = 'https://github.com/Omenia/robotframework-whitelibrary',
package_dir = {'' : 'src'},
py_modules = ['WhiteLibrary'],
package_data = {'robotframework-whitelibrary': ["../WhiteLibrary/bin/CSWhiteLibrary.dll"]},
)
|
b6b1117df271dae8adefa8cb8d3413b73fb393ce | touchpad_listener/touchpad_listener.py | touchpad_listener/touchpad_listener.py |
import serial
import sonic
sonic_pi = sonic.SonicPi()
connection = serial.Serial('/dev/tty.usbmodem1421', 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
|
import serial
import sonic
import glob
sonic_pi = sonic.SonicPi()
connection = serial.Serial(glob.glob('/dev/tty.usbmodem*')[0], 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
| Use `glob` to find an appropriate serial ttry | Use `glob` to find an appropriate serial ttry | Python | bsd-2-clause | CoderDojoScotland/coderdojo-sequencer,jonathanhogg/coderdojo-sequencer |
import serial
import sonic
import glob
sonic_pi = sonic.SonicPi()
connection = serial.Serial(glob.glob('/dev/tty.usbmodem*')[0], 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
| Use `glob` to find an appropriate serial ttry
import serial
import sonic
sonic_pi = sonic.SonicPi()
connection = serial.Serial('/dev/tty.usbmodem1421', 115200)
while True:
line = connection.readline()
command, argument = line.strip().split(' ', 1)
if command == 'pad':
number = int(argument)
sonic_pi.run('cue :pad, number: {}'.format(number))
|
2a1f1ca653fcd0a8fbaa465ba664da0a1ede6306 | simuvex/s_run.py | simuvex/s_run.py | #!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
| #!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
| Make SimRun a new-style Python class. | Make SimRun a new-style Python class.
| Python | bsd-2-clause | chubbymaggie/simuvex,iamahuman/angr,chubbymaggie/angr,chubbymaggie/simuvex,angr/angr,zhuyue1314/simuvex,schieb/angr,iamahuman/angr,angr/angr,tyb0807/angr,f-prettyland/angr,axt/angr,tyb0807/angr,f-prettyland/angr,chubbymaggie/angr,angr/angr,schieb/angr,iamahuman/angr,schieb/angr,tyb0807/angr,angr/simuvex,axt/angr,chubbymaggie/simuvex,chubbymaggie/angr,axt/angr,f-prettyland/angr | #!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun(object):
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
| Make SimRun a new-style Python class.
#!/usr/bin/env python
from .s_ref import RefTypes
import s_options as o
class SimRun:
def __init__(self, options = None, mode = None):
# the options and mode
if options is None:
options = o.default_options[mode]
self.options = options
self.mode = mode
self._exits = [ ]
self._refs = { }
self.options = options
for t in RefTypes:
self._refs[t] = [ ]
def refs(self):
return self._refs
def exits(self):
return self._exits
# Categorize and add a sequence of refs to this run
def add_refs(self, *refs):
for r in refs:
if o.SYMBOLIC not in self.options and r.is_symbolic():
continue
self._refs[type(r)].append(r)
# Categorize and add a sequence of exits to this run
def add_exits(self, *exits):
for e in exits:
if o.SYMBOLIC not in self.options and e.sim_value.is_symbolic():
continue
self._exits.append(e)
# Copy the references
def copy_refs(self, other):
for ref_list in other.refs().itervalues():
self.add_refs(*ref_list)
|
bda42a4630e8b9e720443b6785ff2e3435bfdfa6 | pybaseball/team_results.py | pybaseball/team_results.py | import pandas as pd
import requests
from bs4 import BeautifulSoup
# TODO: raise error if year > current year or < first year of a team's existence
# TODO: team validation. return error if team does not exist.
# TODO: sanitize team inputs (force to all caps)
def get_soup(season, team):
# get most recent year's schedule if year not specified
if(season is None):
season = datetime.datetime.today().strftime("%Y")
url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season)
s=requests.get(url).content
return BeautifulSoup(s, "html.parser")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")]
headings = headings[1:] # the "gm#" heading doesn't have a <td> element
headings[3] = "Home_Away"
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row_index in range(len(rows)-1): #last row is a description of column meanings
row = rows[row_index]
try:
cols = row.find_all('td')
#links = row.find_all('a')
if cols[3].text == "":
cols[3].string = 'Home' # this element only has an entry if it's an away game
if cols[12].text == "":
cols[12].string = "None" # tie games won't have a pitcher win or loss
if cols[13].text == "":
cols[13].string = "None"
if cols[14].text == "":
cols[14].string = "None" # games w/o saves have blank td entry
if cols[8].text == "":
cols[8].string = "9" # entry is blank if no extra innings
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
except:
# two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers
# if future games, grab the scheduling info. Otherwise do nothing.
if len(cols)>1:
cols = [ele.text.strip() for ele in cols][0:5]
data.append([ele for ele in cols if ele])
#convert to pandas dataframe. make first row the table's column names and reindex.
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def schedule_and_record(season=None, team=None):
# retrieve html from baseball reference
soup = get_soup(season, team)
table = get_table(soup)
return table
| Add code for getting team schedule and game outcomes | Add code for getting team schedule and game outcomes
| Python | mit | jldbc/pybaseball | import pandas as pd
import requests
from bs4 import BeautifulSoup
# TODO: raise error if year > current year or < first year of a team's existence
# TODO: team validation. return error if team does not exist.
# TODO: sanitize team inputs (force to all caps)
def get_soup(season, team):
# get most recent year's schedule if year not specified
if(season is None):
season = datetime.datetime.today().strftime("%Y")
url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season)
s=requests.get(url).content
return BeautifulSoup(s, "html.parser")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")]
headings = headings[1:] # the "gm#" heading doesn't have a <td> element
headings[3] = "Home_Away"
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row_index in range(len(rows)-1): #last row is a description of column meanings
row = rows[row_index]
try:
cols = row.find_all('td')
#links = row.find_all('a')
if cols[3].text == "":
cols[3].string = 'Home' # this element only has an entry if it's an away game
if cols[12].text == "":
cols[12].string = "None" # tie games won't have a pitcher win or loss
if cols[13].text == "":
cols[13].string = "None"
if cols[14].text == "":
cols[14].string = "None" # games w/o saves have blank td entry
if cols[8].text == "":
cols[8].string = "9" # entry is blank if no extra innings
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
except:
# two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers
# if future games, grab the scheduling info. Otherwise do nothing.
if len(cols)>1:
cols = [ele.text.strip() for ele in cols][0:5]
data.append([ele for ele in cols if ele])
#convert to pandas dataframe. make first row the table's column names and reindex.
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def schedule_and_record(season=None, team=None):
# retrieve html from baseball reference
soup = get_soup(season, team)
table = get_table(soup)
return table
| Add code for getting team schedule and game outcomes
|
|
bcd5ea69815405508d7f862754f910fe381172b9 | responsive/context_processors.py | responsive/context_processors.py | from django.core.exceptions import ImproperlyConfigured
from .conf import settings
from .utils import Device
def device(request):
responsive_middleware = 'responsive.middleware.ResponsiveMiddleware'
if responsive_middleware not in settings.MIDDLEWARE_CLASSES:
raise ImproperlyConfigured(
"responsive context_processors requires the responsive middleware to "
"be installed. Edit your MIDDLEWARE_CLASSES setting to insert"
"the 'responsive.middleware.ResponsiveMiddleware'")
device_obj = getattr(request, settings.RESPONSIVE_VARIABLE_NAME, None)
if not device_obj:
device_obj = Device()
return {
settings.RESPONSIVE_VARIABLE_NAME: device_obj
}
| from django.core.exceptions import ImproperlyConfigured
from .conf import settings
from .utils import Device
def device(request):
responsive_middleware = 'responsive.middleware.ResponsiveMiddleware'
if responsive_middleware not in settings.MIDDLEWARE_CLASSES:
raise ImproperlyConfigured(
"You must enable the 'ResponsiveMiddleware'. Edit your "
"MIDDLEWARE_CLASSES setting to insert"
"the 'responsive.middleware.ResponsiveMiddleware'")
device_obj = getattr(request, settings.RESPONSIVE_VARIABLE_NAME, None)
if not device_obj:
device_obj = Device()
return {
settings.RESPONSIVE_VARIABLE_NAME: device_obj
}
| Update message for missing ResponsiveMiddleware | Update message for missing ResponsiveMiddleware
| Python | bsd-3-clause | mishbahr/django-responsive2,mishbahr/django-responsive2 | from django.core.exceptions import ImproperlyConfigured
from .conf import settings
from .utils import Device
def device(request):
responsive_middleware = 'responsive.middleware.ResponsiveMiddleware'
if responsive_middleware not in settings.MIDDLEWARE_CLASSES:
raise ImproperlyConfigured(
"You must enable the 'ResponsiveMiddleware'. Edit your "
"MIDDLEWARE_CLASSES setting to insert"
"the 'responsive.middleware.ResponsiveMiddleware'")
device_obj = getattr(request, settings.RESPONSIVE_VARIABLE_NAME, None)
if not device_obj:
device_obj = Device()
return {
settings.RESPONSIVE_VARIABLE_NAME: device_obj
}
| Update message for missing ResponsiveMiddleware
from django.core.exceptions import ImproperlyConfigured
from .conf import settings
from .utils import Device
def device(request):
responsive_middleware = 'responsive.middleware.ResponsiveMiddleware'
if responsive_middleware not in settings.MIDDLEWARE_CLASSES:
raise ImproperlyConfigured(
"responsive context_processors requires the responsive middleware to "
"be installed. Edit your MIDDLEWARE_CLASSES setting to insert"
"the 'responsive.middleware.ResponsiveMiddleware'")
device_obj = getattr(request, settings.RESPONSIVE_VARIABLE_NAME, None)
if not device_obj:
device_obj = Device()
return {
settings.RESPONSIVE_VARIABLE_NAME: device_obj
}
|
89363fb720d259b60f9ec6d9872f59db1a28e14c | examples/Gauss_example.py | examples/Gauss_example.py | import sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
| Add script variant of the Gauss example | Add script variant of the Gauss example
| Python | bsd-3-clause | lintusj1/elfi,elfi-dev/elfi,elfi-dev/elfi,HIIT/elfi,lintusj1/elfi | import sys
import time
import numpy as np
from abcpy.core import *
from abcpy.distributions import *
from distributed import Client
from dask.dot import dot_graph
from functools import partial
import matplotlib
import matplotlib.pyplot as plt
def normal_simu(n, mu, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n)
u = mu + latents
y = u
return y
def mean(y):
mu = np.mean(y, axis=1, keepdims=True)
return mu
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
def main():
n = 1000
mu = 1.6
# Set up observed data y
latents = np.random.randn(n)
y = normal_simu(n, mu, latents=latents)
# Plot
plt.hist(y)
# Set up the simulator
simulator = partial(normal_simu, n)
# Specify the graphical model
mu = Prior('mu', 'uniform', 0, 4)
Y = Simulator('normal_simu', simulator, mu, observed=y)
S1 = Summary('S1', mean, Y)
d = Discrepancy('d', distance, S1)
# Specify the number of simulations
N = 1000000
# Time and run parallel
s = time.time()
dists = d.generate(N, batch_size=10000).compute()
print("Elapsed time %d sec" % (time.time() - s))
# Take the parameters
mu_sample = mu.generate(N).compute()
# Set threshold and reject to get posteriors
eps = 0.01
accepts = dists < eps
mu_post = mu_sample[accepts]
print("Number of accepted samples %d" % sum(accepts))
if len(mu_post) > 0:
print("Posterior for mu")
plt.hist(mu_post, bins=20)
else:
print("No accepted samples")
if __name__ == "__main__":
main()
| Add script variant of the Gauss example
|
|
37b175b6a6ac3f0fd7fdaa5c2ed6435c159a29c2 | py/optimal-division.py | py/optimal-division.py | from fractions import Fraction
class Solution(object):
def optimalDivision(self, nums):
"""
:type nums: List[int]
:rtype: str
"""
min_result, max_result = dict(), dict()
min_offset, max_offset = dict(), dict()
lnums = len(nums)
def print_ans(start, end, need_max=True):
if start + 1 == end:
return str(nums[start])
if need_max:
cut = max_offset[start, end]
else:
cut = min_offset[start, end]
ans = print_ans(start, cut, need_max) + "/"
if end - cut > 1:
ans += "("
ans += print_ans(cut, end, not need_max)
if end - cut > 1:
ans += ")"
return ans
for i, n in enumerate(nums):
min_result[i, i + 1] = max_result[i, i + 1] = Fraction(n)
for l in xrange(2, lnums + 1):
for i in xrange(lnums - l + 1):
m, M = None, None
mj, Mj = None, None
for j in xrange(1, l):
tm = min_result[i, i + j] / max_result[i + j, i + l]
tM = max_result[i, i + j] / min_result[i + j, i + l]
if m is None or m > tm:
m, mj = tm, i + j
if M is None or M < tM:
M, Mj = tM, i + j
min_result[i, i + l] = m
max_result[i, i + l] = M
min_offset[i, i + l] = mj
max_offset[i, i + l] = Mj
return print_ans(0, lnums)
| Add py solution for 553. Optimal Division | Add py solution for 553. Optimal Division
553. Optimal Division: https://leetcode.com/problems/optimal-division/
Approach1
Bottom-up DP
| Python | apache-2.0 | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | from fractions import Fraction
class Solution(object):
def optimalDivision(self, nums):
"""
:type nums: List[int]
:rtype: str
"""
min_result, max_result = dict(), dict()
min_offset, max_offset = dict(), dict()
lnums = len(nums)
def print_ans(start, end, need_max=True):
if start + 1 == end:
return str(nums[start])
if need_max:
cut = max_offset[start, end]
else:
cut = min_offset[start, end]
ans = print_ans(start, cut, need_max) + "/"
if end - cut > 1:
ans += "("
ans += print_ans(cut, end, not need_max)
if end - cut > 1:
ans += ")"
return ans
for i, n in enumerate(nums):
min_result[i, i + 1] = max_result[i, i + 1] = Fraction(n)
for l in xrange(2, lnums + 1):
for i in xrange(lnums - l + 1):
m, M = None, None
mj, Mj = None, None
for j in xrange(1, l):
tm = min_result[i, i + j] / max_result[i + j, i + l]
tM = max_result[i, i + j] / min_result[i + j, i + l]
if m is None or m > tm:
m, mj = tm, i + j
if M is None or M < tM:
M, Mj = tM, i + j
min_result[i, i + l] = m
max_result[i, i + l] = M
min_offset[i, i + l] = mj
max_offset[i, i + l] = Mj
return print_ans(0, lnums)
| Add py solution for 553. Optimal Division
553. Optimal Division: https://leetcode.com/problems/optimal-division/
Approach1
Bottom-up DP
|
|
689dd5cb67516fd091a69e39708b547c66f96750 | nap/dataviews/models.py | nap/dataviews/models.py |
from .fields import Field
from .views import DataView
from django.utils.six import with_metaclass
class MetaView(type):
def __new__(mcs, name, bases, attrs):
meta = attrs.get('Meta', None)
try:
model = meta.model
except AttributeError:
if name != 'ModelDataView':
raise
else:
include = getattr(meta, 'fields', None)
exclude = getattr(meta, 'exclude', [])
# XXX Does the top base have all fields?
for model_field in model._meta.fields:
if model_field.name in attrs:
continue
if model_field.name in exclude:
continue
if include != '__all__' and model_field.name not in include:
continue
# XXX Magic for field types
attrs[model_field.name] = Field(model_field.name)
attrs['_meta'] = meta
return super(MetaView, mcs).__new__(mcs, name, bases, attrs)
class ModelDataView(with_metaclass(MetaView, DataView)):
pass
|
from django.db.models.fields import NOT_PROVIDED
from django.utils.six import with_metaclass
from . import filters
from .fields import Field
from .views import DataView
# Map of ModelField name -> list of filters
FIELD_FILTERS = {
'DateField': [filters.DateFilter],
'TimeField': [filters.TimeFilter],
'DateTimeField': [filters.DateTimeFilter],
}
class Options(object):
def __init__(self, meta):
self.model = getattr(meta, 'model', None)
self.fields = getattr(meta, 'fields', [])
self.exclude = getattr(meta, 'exclude', [])
self.required = getattr(meta, 'required', {})
class MetaView(type):
def __new__(mcs, name, bases, attrs):
meta = Options(attrs.get('Meta', None))
if meta.model is None:
if name != 'ModelDataView':
raise ValueError('model not defined on class Meta')
else:
# XXX Does the top base have all fields?
for model_field in meta.model._meta.fields:
if model_field.name in attrs:
continue
if model_field.name in meta.exclude:
continue
if meta.fields != '__all__' and model_field.name not in meta.fields:
continue
# XXX Magic for field types
kwargs = {}
kwargs['default'] = model_field.default
kwargs['required'] = any([
not model_field.blank,
model_field.default is not NOT_PROVIDED,
])
kwargs['filters'] = FIELD_FILTERS.get(model_field.__class__.__name__, [])
attrs[model_field.name] = Field(model_field.name, **kwargs)
attrs['_meta'] = meta
return super(MetaView, mcs).__new__(mcs, name, bases, attrs)
class ModelDataView(with_metaclass(MetaView, DataView)):
pass
| Add Options class Add field filters lists Start proper model field introspection | Add Options class
Add field filters lists
Start proper model field introspection
| Python | bsd-3-clause | limbera/django-nap,MarkusH/django-nap |
from django.db.models.fields import NOT_PROVIDED
from django.utils.six import with_metaclass
from . import filters
from .fields import Field
from .views import DataView
# Map of ModelField name -> list of filters
FIELD_FILTERS = {
'DateField': [filters.DateFilter],
'TimeField': [filters.TimeFilter],
'DateTimeField': [filters.DateTimeFilter],
}
class Options(object):
def __init__(self, meta):
self.model = getattr(meta, 'model', None)
self.fields = getattr(meta, 'fields', [])
self.exclude = getattr(meta, 'exclude', [])
self.required = getattr(meta, 'required', {})
class MetaView(type):
def __new__(mcs, name, bases, attrs):
meta = Options(attrs.get('Meta', None))
if meta.model is None:
if name != 'ModelDataView':
raise ValueError('model not defined on class Meta')
else:
# XXX Does the top base have all fields?
for model_field in meta.model._meta.fields:
if model_field.name in attrs:
continue
if model_field.name in meta.exclude:
continue
if meta.fields != '__all__' and model_field.name not in meta.fields:
continue
# XXX Magic for field types
kwargs = {}
kwargs['default'] = model_field.default
kwargs['required'] = any([
not model_field.blank,
model_field.default is not NOT_PROVIDED,
])
kwargs['filters'] = FIELD_FILTERS.get(model_field.__class__.__name__, [])
attrs[model_field.name] = Field(model_field.name, **kwargs)
attrs['_meta'] = meta
return super(MetaView, mcs).__new__(mcs, name, bases, attrs)
class ModelDataView(with_metaclass(MetaView, DataView)):
pass
| Add Options class
Add field filters lists
Start proper model field introspection
from .fields import Field
from .views import DataView
from django.utils.six import with_metaclass
class MetaView(type):
def __new__(mcs, name, bases, attrs):
meta = attrs.get('Meta', None)
try:
model = meta.model
except AttributeError:
if name != 'ModelDataView':
raise
else:
include = getattr(meta, 'fields', None)
exclude = getattr(meta, 'exclude', [])
# XXX Does the top base have all fields?
for model_field in model._meta.fields:
if model_field.name in attrs:
continue
if model_field.name in exclude:
continue
if include != '__all__' and model_field.name not in include:
continue
# XXX Magic for field types
attrs[model_field.name] = Field(model_field.name)
attrs['_meta'] = meta
return super(MetaView, mcs).__new__(mcs, name, bases, attrs)
class ModelDataView(with_metaclass(MetaView, DataView)):
pass
|
189c7a7c982739cd7a3026e34a9969ea9278a12b | api/data/src/lib/middleware.py | api/data/src/lib/middleware.py | import os
import re
class SetBaseEnv(object):
"""
Figure out which port we are on if we are running and set it.
So that the links will be correct.
Not sure if we need this always...
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if os.environ.get('HTTP_PORT'):
request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT'])
response = self.get_response(request)
return response
| import os
class SetBaseEnv(object):
"""
Figure out which port we are on if we are running and set it.
So that the links will be correct.
Not sure if we need this always...
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']:
request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT'])
response = self.get_response(request)
return response
| Fix so we can do :5000 queries from api container | Fix so we can do :5000 queries from api container
| Python | mit | xeor/hohu,xeor/hohu,xeor/hohu,xeor/hohu | import os
class SetBaseEnv(object):
"""
Figure out which port we are on if we are running and set it.
So that the links will be correct.
Not sure if we need this always...
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']:
request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT'])
response = self.get_response(request)
return response
| Fix so we can do :5000 queries from api container
import os
import re
class SetBaseEnv(object):
"""
Figure out which port we are on if we are running and set it.
So that the links will be correct.
Not sure if we need this always...
"""
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if os.environ.get('HTTP_PORT'):
request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT'])
response = self.get_response(request)
return response
|
c8360831ab2fa4d5af2929a85beca4a1f33ef9d1 | travis_settings.py | travis_settings.py | # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| Use MySQL database backend in Travis CI. | Use MySQL database backend in Travis CI.
| Python | bsd-3-clause | Inter-Actief/alexia,Inter-Actief/alexia,Inter-Actief/alexia,Inter-Actief/alexia | # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| Use MySQL database backend in Travis CI.
# Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
|
9e9910346f7bacdc2a4fc2e92ecb8237bf38275e | plumbium/environment.py | plumbium/environment.py | """
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
| """
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
| Stop pylint complaining about bare-except | Stop pylint complaining about bare-except
| Python | mit | jstutters/Plumbium | """
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except: # pylint: disable=bare-except
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
| Stop pylint complaining about bare-except
"""
plumbium.environment
====================
Module containing the get_environment function.
"""
import os
try:
import pip
except ImportError:
pass
import socket
def get_environment():
"""Obtain information about the executing environment.
Captures:
* installed Python packages using pip (if available),
* hostname
* uname
* environment variables
Returns:
dict: a dict with the keys ``python_packages``, ``hostname``, ``uname`` and ``environ``
"""
env = {}
try:
env['python_packages'] = [str(p) for p in pip.get_installed_distributions()]
except:
pass
env['hostname'] = socket.gethostname()
env['uname'] = os.uname()
env['environ'] = dict(os.environ)
return env
|
bd243742f65a8fd92f4a773ce485cdc6f03f4a84 | kevin/leet/copy_list_with_random_pointers.py | kevin/leet/copy_list_with_random_pointers.py | """
https://leetcode.com/explore/challenge/card/february-leetcoding-challenge-2021/585/week-2-february-8th-february-14th/3635/
"""
class Node:
def __init__(self, x: int, next: 'Node'=None, random: 'Node'=None):
self.val = int(x)
self.next = next
self.random = random
class Solution:
def copy_random_list(self, head: Node) -> Node:
# I am aware this solution is not at all optimal in terms of
# memory consumption
# O(1) space is possible by cleverly linking old and new nodes in
# an alternating fashion
if head is None:
return None
# construct map from nodes to their random nodes
current = head
random = dict() # map from old node to old random node
while current is not None:
random[current] = current.random
current = current.next
# set up new nodes, with map from new to old
current = head
new = Node(current.val)
new_head = new
back = {new: head} # map back from new to old
forward = {head: new} # map forward from old to new
while current.next is not None:
current = current.next
new.next = Node(current.val)
new = new.next
back[new] = current
forward[current] = new
# add correct random pointers in new nodes
current = new_head
while current is not None:
old = back[current]
old_random = random[old]
current.random = forward.get(old_random, None)
current = current.next
return new_head
| Add Copy List with Random Pointer LeetCode problem | Add Copy List with Random Pointer LeetCode problem
- No tests though
| Python | mit | kalyons11/kevin,kalyons11/kevin | """
https://leetcode.com/explore/challenge/card/february-leetcoding-challenge-2021/585/week-2-february-8th-february-14th/3635/
"""
class Node:
def __init__(self, x: int, next: 'Node'=None, random: 'Node'=None):
self.val = int(x)
self.next = next
self.random = random
class Solution:
def copy_random_list(self, head: Node) -> Node:
# I am aware this solution is not at all optimal in terms of
# memory consumption
# O(1) space is possible by cleverly linking old and new nodes in
# an alternating fashion
if head is None:
return None
# construct map from nodes to their random nodes
current = head
random = dict() # map from old node to old random node
while current is not None:
random[current] = current.random
current = current.next
# set up new nodes, with map from new to old
current = head
new = Node(current.val)
new_head = new
back = {new: head} # map back from new to old
forward = {head: new} # map forward from old to new
while current.next is not None:
current = current.next
new.next = Node(current.val)
new = new.next
back[new] = current
forward[current] = new
# add correct random pointers in new nodes
current = new_head
while current is not None:
old = back[current]
old_random = random[old]
current.random = forward.get(old_random, None)
current = current.next
return new_head
| Add Copy List with Random Pointer LeetCode problem
- No tests though
|
|
52609334bdd25eb89dbc67b75921029c37babd63 | setup.py | setup.py | import os
import sys
from setuptools import setup
__author__ = 'Ryan McGrath <[email protected]>'
__version__ = '2.10.0'
packages = [
'twython',
'twython.streaming'
]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
setup(
# Basic package information.
name='twython',
version=__version__,
packages=packages,
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['requests==1.2.2', 'requests_oauthlib==0.3.2'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='[email protected]',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython',
description='An easy (and up to date) way to access Twitter data with Python.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| import os
import sys
from setuptools import setup
__author__ = 'Ryan McGrath <[email protected]>'
__version__ = '2.10.0'
packages = [
'twython',
'twython.streaming'
]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
setup(
# Basic package information.
name='twython',
version=__version__,
packages=packages,
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['requests==1.2.2', 'requests_oauthlib==0.3.2'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='[email protected]',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython stream',
description='Actively maintained, pure Python wrapper for the Twitter API. Supports both normal and streaming Twitter APIs',
long_description=open('README.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| Update description and long description | Update description and long description
[ci skip]
| Python | mit | joebos/twython,Oire/twython,fibears/twython,Devyani-Divs/twython,Hasimir/twython,akarambir/twython,ping/twython,vivek8943/twython,ryanmcgrath/twython,Fueled/twython | import os
import sys
from setuptools import setup
__author__ = 'Ryan McGrath <[email protected]>'
__version__ = '2.10.0'
packages = [
'twython',
'twython.streaming'
]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
setup(
# Basic package information.
name='twython',
version=__version__,
packages=packages,
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['requests==1.2.2', 'requests_oauthlib==0.3.2'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='[email protected]',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython stream',
description='Actively maintained, pure Python wrapper for the Twitter API. Supports both normal and streaming Twitter APIs',
long_description=open('README.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
| Update description and long description
[ci skip]
import os
import sys
from setuptools import setup
__author__ = 'Ryan McGrath <[email protected]>'
__version__ = '2.10.0'
packages = [
'twython',
'twython.streaming'
]
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
setup(
# Basic package information.
name='twython',
version=__version__,
packages=packages,
# Packaging options.
include_package_data=True,
# Package dependencies.
install_requires=['requests==1.2.2', 'requests_oauthlib==0.3.2'],
# Metadata for PyPI.
author='Ryan McGrath',
author_email='[email protected]',
license='MIT License',
url='http://github.com/ryanmcgrath/twython/tree/master',
keywords='twitter search api tweet twython',
description='An easy (and up to date) way to access Twitter data with Python.',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
e14ceda6370b506b80f65d45abd36c9f728e5699 | pitchfork/manage_globals/forms.py | pitchfork/manage_globals/forms.py | # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
| # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
| Rework imports so not having to specify every type of field. Alter field definitions to reflect change | Rework imports so not having to specify every type of field. Alter field definitions to reflect change
| Python | apache-2.0 | rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork | # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import fields, validators
class VerbSet(Form):
name = fields.TextField('Verb:', validators=[validators.required()])
active = fields.BooleanField('Active:')
submit = fields.SubmitField('Submit')
class DCSet(Form):
name = fields.TextField('Name:', validators=[validators.required()])
abbreviation = fields.TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = fields.SubmitField('Submit')
| Rework imports so not having to specify every type of field. Alter field definitions to reflect change
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask.ext.wtf import Form
from wtforms import TextField, SelectField, IntegerField, BooleanField,\
PasswordField, TextAreaField, SubmitField, HiddenField, RadioField
from wtforms import validators
class VerbSet(Form):
name = TextField('Verb:', validators=[validators.required()])
active = BooleanField('Active:')
submit = SubmitField('Submit')
class DCSet(Form):
name = TextField('Name:', validators=[validators.required()])
abbreviation = TextField(
'Abbreviation:',
validators=[validators.required()]
)
submit = SubmitField('Submit')
|
79a1f426e22f3c213bbb081f4ca23ccf1a6f61d7 | openedx/core/djangoapps/content/block_structure/migrations/0005_trim_leading_slashes_in_data_path.py | openedx/core/djangoapps/content/block_structure/migrations/0005_trim_leading_slashes_in_data_path.py | """
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
| Convert block_structure.data to relative paths (TNL-8335) | fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected.
| Python | agpl-3.0 | eduNEXT/edunext-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform | """
Data migration to convert absolute paths in block_structure.data to be relative.
This has only been tested with MySQL, though it should also work for Postgres as
well. This is necessary to manually correct absolute paths in the "data" field
of the block_structure table. For S3 storage, having a path that starts with
"/courses/" puts things in the same place as a path starting with "courses/",
but absolute paths are not permitted for FileFields.
These values would have always been broken in devstack (because it's not in
MEDIA_ROOT), but it used to work for the S3 storages option because the security
checking happened at the storage layer, and the path is equivalent in S3 because
we just append either value to the bucket's root.
However, in Django > 2.2.20, this checking against absolute paths has been added
to the FileField itself, and an upgrade attempt started causing write failures
to Block Structures.
There are separate PRs to fix the config values so that new writes start with a
"courses/" prefix. This migration to is fix old entries by removing any leading
"/" characters.
THIS MIGRATION MUST BE RUN BEFORE UPGRADING TO DJANGO > 2.2.20 IF YOU ARE
USING A STORAGE_CLASS IN BLOCK_STRUCTURES_SETTINGS. If you do not specify this
setting and only run Block Structures out of memcached, this should not affect
you.
"""
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('block_structure', '0004_blockstructuremodel_usagekeywithrun'),
]
operations = [
migrations.RunSQL(
"""
UPDATE block_structure
SET data = right(data, length(data) - 1)
WHERE data like '/%';
"""
)
]
| fix: Convert block_structure.data to relative paths (TNL-8335)
In order to upgrade to Django > 2.2.20, we can't continue to use
absolute paths in the block_structure's data FileField. This used to
work for S3, but it will not work going forward due to a security fix
in Django 2.2.21.
This data migration will remove the starting '/' from any paths in the
block_structure table. The resulting locations in S3 should be
unaffected.
|
|
e308575d9723c90d3a15e5e8de45b0232c5d0b75 | parse_ast.py | parse_ast.py | """Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
| Add basic ast to json converter | Add basic ast to json converter
| Python | mit | RishiRamraj/wensleydale | """Parse python code into the abstract syntax tree and represent as JSON"""
from __future__ import print_function
import ast
from itertools import chain, count
import json
import sys
def dictify(obj):
if hasattr(obj, "__dict__"):
result = {k: dictify(v) for k, v in chain(obj.__dict__.items(), [("classname", obj.__class__.__name__)])}
return result
elif isinstance(obj, list):
return [dictify(x) for x in obj]
else:
return obj
def parse_file(filename):
with open(filename) as f:
source = f.read()
return ast.parse(source, filename=filename, mode="exec")
def main(args):
filename = args[0]
if len(args) != 1 or filename.lower() in ("help", "h", "-h", "--help"):
print(__doc__)
else:
ast_node = parse_file(filename)
ast_dict = dictify(ast_node)
ast_json = json.dumps(ast_dict, sort_keys=True, indent=4, separators=(',', ': '))
print(ast_json)
if __name__ == "__main__":
main(sys.argv[1:])
| Add basic ast to json converter
|
|
7f62587e099b9ef59731b6387030431b09f663f9 | bot_chucky/helpers.py | bot_chucky/helpers.py | """ Helper classes """
import facebook
import requests as r
class FacebookData:
def __init__(self, token):
"""
:param token: Facebook Page token
:param _api: Instance of the GraphAPI object
"""
self.token = token
self._api = facebook.GraphAPI(self.token)
def get_user_name(self, _id):
"""
:param _id: find user object by _id
:return: first name of user, type -> str
"""
if not isinstance(_id, str):
raise ValueError('id must be a str')
user = self._api.get_object(_id)
return user['first_name'] if user else None
class WeatherData:
"""
Class which collect weather data
"""
def __init__(self, api_token):
"""
:param api_token: Open Weather TOKEN
"""
self.token = api_token
def get_current_weather(self, city_name):
"""
:param city_name: Open weather API, find by city name
:return dictionary object with information
for example:
{'weather': [{'id': 800, 'main': 'Clear', 'description': 'clear sky'}]}
"""
api_url = f'http://api.openweathermap.org' \
f'/data/2.5/weather?q={city_name}&APPID={self.token}'
info = r.get(api_url).json()
return info
| """ Helper classes """
import facebook
import requests as r
class FacebookData:
def __init__(self, token):
"""
:param token: Facebook Page token
:param _api: Instance of the GraphAPI object
"""
self.token = token
self._api = facebook.GraphAPI(self.token)
def get_user_name(self, _id):
"""
:param _id: find user object by _id
:return: first name of user, type -> str
"""
if not isinstance(_id, str):
raise ValueError('id must be a str')
user = self._api.get_object(_id)
return user['first_name'] if user else None
class WeatherData:
"""
Class which collect weather data
"""
def __init__(self, api_token):
"""
:param api_token: Open Weather TOKEN
"""
self.token = api_token
def get_current_weather(self, city_name):
"""
:param city_name: Open weather API, find by city name
:return dictionary object with information
for example:
{'weather': [{'id': 800, 'main': 'Clear', 'description': 'clear sky'}]}
"""
api_url = f'http://api.openweathermap.org' \
f'/data/2.5/weather?q={city_name}&APPID={self.token}'
info = r.get(api_url).json()
return info
class StackOverFlowData:
params = {}
def get_answer_by_title(self, title):
pass
| Add StackOverFlowData, not completed yet | Add StackOverFlowData, not completed yet
| Python | mit | MichaelYusko/Bot-Chucky | """ Helper classes """
import facebook
import requests as r
class FacebookData:
def __init__(self, token):
"""
:param token: Facebook Page token
:param _api: Instance of the GraphAPI object
"""
self.token = token
self._api = facebook.GraphAPI(self.token)
def get_user_name(self, _id):
"""
:param _id: find user object by _id
:return: first name of user, type -> str
"""
if not isinstance(_id, str):
raise ValueError('id must be a str')
user = self._api.get_object(_id)
return user['first_name'] if user else None
class WeatherData:
"""
Class which collect weather data
"""
def __init__(self, api_token):
"""
:param api_token: Open Weather TOKEN
"""
self.token = api_token
def get_current_weather(self, city_name):
"""
:param city_name: Open weather API, find by city name
:return dictionary object with information
for example:
{'weather': [{'id': 800, 'main': 'Clear', 'description': 'clear sky'}]}
"""
api_url = f'http://api.openweathermap.org' \
f'/data/2.5/weather?q={city_name}&APPID={self.token}'
info = r.get(api_url).json()
return info
class StackOverFlowData:
params = {}
def get_answer_by_title(self, title):
pass
| Add StackOverFlowData, not completed yet
""" Helper classes """
import facebook
import requests as r
class FacebookData:
def __init__(self, token):
"""
:param token: Facebook Page token
:param _api: Instance of the GraphAPI object
"""
self.token = token
self._api = facebook.GraphAPI(self.token)
def get_user_name(self, _id):
"""
:param _id: find user object by _id
:return: first name of user, type -> str
"""
if not isinstance(_id, str):
raise ValueError('id must be a str')
user = self._api.get_object(_id)
return user['first_name'] if user else None
class WeatherData:
"""
Class which collect weather data
"""
def __init__(self, api_token):
"""
:param api_token: Open Weather TOKEN
"""
self.token = api_token
def get_current_weather(self, city_name):
"""
:param city_name: Open weather API, find by city name
:return dictionary object with information
for example:
{'weather': [{'id': 800, 'main': 'Clear', 'description': 'clear sky'}]}
"""
api_url = f'http://api.openweathermap.org' \
f'/data/2.5/weather?q={city_name}&APPID={self.token}'
info = r.get(api_url).json()
return info
|
e42c2f6607d59706358fbd0a81163d793d1bebfb | plumeria/plugins/server_control.py | plumeria/plugins/server_control.py | import asyncio
import io
import re
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.image import read_image
from plumeria.perms import server_admins_only
from plumeria.transport.transport import ForbiddenError
@commands.register('icon set', category='Management')
@server_admins_only
async def set_icon(message: Message):
"""
Set the server icon to the given image.
Example::
/drawtext Hello there! | set icon
Requires an input image.
"""
attachment = await read_image(message)
if not attachment:
raise CommandError("No image is available to process.")
def execute():
width, height = attachment.image.size
if width < 128 or height < 128:
raise CommandError("Image is too small (128x128 minimum size).")
buffer = io.BytesIO()
attachment.image.save(buffer, "png")
return buffer.getvalue()
image_data = await asyncio.get_event_loop().run_in_executor(None, execute)
try:
await message.server.update(icon=image_data)
return "Server icon updated."
except ForbiddenError as e:
raise CommandError("The bot doesn't have the permissions to do this: {}".format(str(e)))
| import asyncio
import io
import re
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.image import read_image
from plumeria.perms import server_admins_only
from plumeria.transport.transport import ForbiddenError
@commands.register('icon set', category='Management')
@server_admins_only
async def set_icon(message: Message):
"""
Set the server icon to the given image.
Example::
/drawtext Hello there! | icon set
Requires an input image.
"""
attachment = await read_image(message)
if not attachment:
raise CommandError("No image is available to process.")
def execute():
width, height = attachment.image.size
if width < 128 or height < 128:
raise CommandError("Image is too small (128x128 minimum size).")
buffer = io.BytesIO()
attachment.image.save(buffer, "png")
return buffer.getvalue()
image_data = await asyncio.get_event_loop().run_in_executor(None, execute)
try:
await message.server.update(icon=image_data)
return "Server icon updated."
except ForbiddenError as e:
raise CommandError("The bot doesn't have the permissions to do this: {}".format(str(e)))
| Fix typo in docs for /icon set. | Fix typo in docs for /icon set.
| Python | mit | sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria | import asyncio
import io
import re
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.image import read_image
from plumeria.perms import server_admins_only
from plumeria.transport.transport import ForbiddenError
@commands.register('icon set', category='Management')
@server_admins_only
async def set_icon(message: Message):
"""
Set the server icon to the given image.
Example::
/drawtext Hello there! | icon set
Requires an input image.
"""
attachment = await read_image(message)
if not attachment:
raise CommandError("No image is available to process.")
def execute():
width, height = attachment.image.size
if width < 128 or height < 128:
raise CommandError("Image is too small (128x128 minimum size).")
buffer = io.BytesIO()
attachment.image.save(buffer, "png")
return buffer.getvalue()
image_data = await asyncio.get_event_loop().run_in_executor(None, execute)
try:
await message.server.update(icon=image_data)
return "Server icon updated."
except ForbiddenError as e:
raise CommandError("The bot doesn't have the permissions to do this: {}".format(str(e)))
| Fix typo in docs for /icon set.
import asyncio
import io
import re
from plumeria.command import commands, CommandError
from plumeria.message import Message
from plumeria.message.image import read_image
from plumeria.perms import server_admins_only
from plumeria.transport.transport import ForbiddenError
@commands.register('icon set', category='Management')
@server_admins_only
async def set_icon(message: Message):
"""
Set the server icon to the given image.
Example::
/drawtext Hello there! | set icon
Requires an input image.
"""
attachment = await read_image(message)
if not attachment:
raise CommandError("No image is available to process.")
def execute():
width, height = attachment.image.size
if width < 128 or height < 128:
raise CommandError("Image is too small (128x128 minimum size).")
buffer = io.BytesIO()
attachment.image.save(buffer, "png")
return buffer.getvalue()
image_data = await asyncio.get_event_loop().run_in_executor(None, execute)
try:
await message.server.update(icon=image_data)
return "Server icon updated."
except ForbiddenError as e:
raise CommandError("The bot doesn't have the permissions to do this: {}".format(str(e)))
|
964da81ef5a90130a47ff726839798a7a7b716ef | buildcert.py | buildcert.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = '[email protected]', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
with app.app_context():
msg = Message('Freifunk Vpn03 Key', sender = '[email protected]', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
| Add app.context() to populate context for render_template | Add app.context() to populate context for render_template
| Python | mit | freifunk-berlin/ca.berlin.freifunk.net,freifunk-berlin/ca.berlin.freifunk.net,freifunk-berlin/ca.berlin.freifunk.net | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
with app.app_context():
msg = Message('Freifunk Vpn03 Key', sender = '[email protected]', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
| Add app.context() to populate context for render_template
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = '[email protected]', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
08a7389e5be0d3f7a9e6c4e12b13f82da50480b1 | reference/gittaggers.py | reference/gittaggers.py | from setuptools.command.egg_info import egg_info
import subprocess
import time
class EggInfoFromGit(egg_info):
"""Tag the build with git commit timestamp.
If a build tag has already been set (e.g., "egg_info -b", building
from source package), leave it alone.
"""
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
'--format=format:%ct', '..']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self):
if self.tag_build is None:
self.tag_build = self.git_timestamp_tag()
return egg_info.tags(self)
| from setuptools.command.egg_info import egg_info
import subprocess
import time
class EggInfoFromGit(egg_info):
"""Tag the build with git commit timestamp.
If a build tag has already been set (e.g., "egg_info -b", building
from source package), leave it alone.
"""
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
'--format=format:%ct']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self):
if self.tag_build is None:
self.tag_build = self.git_timestamp_tag()
return egg_info.tags(self)
| Fix Python packaging to use correct git log for package time/version stamps (2nd try) | Fix Python packaging to use correct git log for package time/version stamps (2nd try)
| Python | apache-2.0 | chapmanb/cwltool,brainstorm/common-workflow-language,curoverse/common-workflow-language,chapmanb/cwltool,hmenager/common-workflow-language,common-workflow-language/cwltool,foreveremain/common-workflow-language,StarvingMarvin/common-workflow-language,jeremiahsavage/cwltool,foreveremain/common-workflow-language,satra/common-workflow-language,common-workflow-language/common-workflow-language,curoverse/common-workflow-language,SciDAP/cwltool,mr-c/common-workflow-language,foreveremain/common-workflow-language,chapmanb/cwltool,mr-c/common-workflow-language,SciDAP/cwltool,dleehr/common-workflow-language,common-workflow-language/common-workflow-language,dleehr/common-workflow-language,chapmanb/cwltool,stain/common-workflow-language,slnovak/common-workflow-language,common-workflow-language/common-workflow-language,slnovak/common-workflow-language,dleehr/cwltool,stain/common-workflow-language,satra/common-workflow-language,guillermo-carrasco/common-workflow-language,hmenager/common-workflow-language,SciDAP/cwltool,dleehr/cwltool,StarvingMarvin/common-workflow-language,dleehr/common-workflow-language,dleehr/common-workflow-language,guillermo-carrasco/common-workflow-language,stain/common-workflow-language,jeremiahsavage/cwltool,slnovak/common-workflow-language,hmenager/common-workflow-language,jeremiahsavage/cwltool,stain/common-workflow-language,guillermo-carrasco/common-workflow-language,jeremiahsavage/cwltool,SciDAP/cwltool,hmenager/common-workflow-language,ohsu-computational-biology/common-workflow-language,brainstorm/common-workflow-language,common-workflow-language/common-workflow-language,common-workflow-language/cwltool,StarvingMarvin/common-workflow-language,ohsu-computational-biology/common-workflow-language,StarvingMarvin/common-workflow-language,brainstorm/common-workflow-language,dleehr/cwltool,satra/common-workflow-language,common-workflow-language/cwltool,mr-c/common-workflow-language,dleehr/cwltool,ohsu-computational-biology/common-workflow-language | from setuptools.command.egg_info import egg_info
import subprocess
import time
class EggInfoFromGit(egg_info):
"""Tag the build with git commit timestamp.
If a build tag has already been set (e.g., "egg_info -b", building
from source package), leave it alone.
"""
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
'--format=format:%ct']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self):
if self.tag_build is None:
self.tag_build = self.git_timestamp_tag()
return egg_info.tags(self)
| Fix Python packaging to use correct git log for package time/version stamps (2nd try)
from setuptools.command.egg_info import egg_info
import subprocess
import time
class EggInfoFromGit(egg_info):
"""Tag the build with git commit timestamp.
If a build tag has already been set (e.g., "egg_info -b", building
from source package), leave it alone.
"""
def git_timestamp_tag(self):
gitinfo = subprocess.check_output(
['git', 'log', '--first-parent', '--max-count=1',
'--format=format:%ct', '..']).strip()
return time.strftime('.%Y%m%d%H%M%S', time.gmtime(int(gitinfo)))
def tags(self):
if self.tag_build is None:
self.tag_build = self.git_timestamp_tag()
return egg_info.tags(self)
|
299aa432b3183e9db418f0735511330763c8141b | botbot/fileinfo.py | botbot/fileinfo.py | """File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
| """File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
| Move reader() generator out of file hasher | Move reader() generator out of file hasher
| Python | mit | jackstanek/BotBot,jackstanek/BotBot | """File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
| Move reader() generator out of file hasher
"""File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
84acc00a3f6d09b4212b6728667af583b45e5a99 | km_api/know_me/tests/serializers/test_profile_list_serializer.py | km_api/know_me/tests/serializers/test_profile_list_serializer.py | from know_me import serializers
def test_serialize(profile_factory):
"""
Test serializing a profile.
"""
profile = profile_factory()
serializer = serializers.ProfileListSerializer(profile)
expected = {
'id': profile.id,
'name': profile.name,
'quote': profile.quote,
'welcome_message': profile.welcome_message,
}
assert serializer.data == expected
| from know_me import serializers
def test_create(user_factory):
"""
Saving a serializer containing valid data should create a new
profile.
"""
user = user_factory()
data = {
'name': 'John',
'quote': "Hi, I'm John",
'welcome_message': 'This is my profile.',
}
serializer = serializers.ProfileListSerializer(data=data)
assert serializer.is_valid()
serializer.save(user=user)
profile = user.profile
assert profile.name == data['name']
assert profile.quote == data['quote']
assert profile.welcome_message == data['welcome_message']
assert profile.user == user
def test_serialize(profile_factory):
"""
Test serializing a profile.
"""
profile = profile_factory()
serializer = serializers.ProfileListSerializer(profile)
expected = {
'id': profile.id,
'name': profile.name,
'quote': profile.quote,
'welcome_message': profile.welcome_message,
}
assert serializer.data == expected
| Add test for creating profile from serializer. | Add test for creating profile from serializer.
| Python | apache-2.0 | knowmetools/km-api,knowmetools/km-api,knowmetools/km-api,knowmetools/km-api | from know_me import serializers
def test_create(user_factory):
"""
Saving a serializer containing valid data should create a new
profile.
"""
user = user_factory()
data = {
'name': 'John',
'quote': "Hi, I'm John",
'welcome_message': 'This is my profile.',
}
serializer = serializers.ProfileListSerializer(data=data)
assert serializer.is_valid()
serializer.save(user=user)
profile = user.profile
assert profile.name == data['name']
assert profile.quote == data['quote']
assert profile.welcome_message == data['welcome_message']
assert profile.user == user
def test_serialize(profile_factory):
"""
Test serializing a profile.
"""
profile = profile_factory()
serializer = serializers.ProfileListSerializer(profile)
expected = {
'id': profile.id,
'name': profile.name,
'quote': profile.quote,
'welcome_message': profile.welcome_message,
}
assert serializer.data == expected
| Add test for creating profile from serializer.
from know_me import serializers
def test_serialize(profile_factory):
"""
Test serializing a profile.
"""
profile = profile_factory()
serializer = serializers.ProfileListSerializer(profile)
expected = {
'id': profile.id,
'name': profile.name,
'quote': profile.quote,
'welcome_message': profile.welcome_message,
}
assert serializer.data == expected
|
9f95715cc7260d02d88781c208f6a6a167496015 | aiohttp_json_api/jsonpointer/__init__.py | aiohttp_json_api/jsonpointer/__init__.py | """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| Fix bug with JSONPointer if part passed via __truediv__ is integer | Fix bug with JSONPointer if part passed via __truediv__ is integer
| Python | mit | vovanbo/aiohttp_json_api | """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| Fix bug with JSONPointer if part passed via __truediv__ is integer
"""
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
|
a3e537dc7e91785bb45bfe4d5a788c26d52653b1 | command_line/make_sphinx_html.py | command_line/make_sphinx_html.py | # LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html
from __future__ import division
from libtbx import easy_run
import libtbx.load_env
import os.path as op
import shutil
import os
import sys
if (__name__ == "__main__") :
xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False)
assert (xia2_dir is not None)
dest_dir = op.join(xia2_dir, "html")
if op.exists(dest_dir):
shutil.rmtree(dest_dir)
os.chdir(op.join(xia2_dir, "doc", "sphinx"))
easy_run.call("make clean")
easy_run.call("make html")
print "Moving HTML pages to", dest_dir
shutil.move("build/html", dest_dir)
| # LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html
from __future__ import division
import libtbx.load_env
from dials.util.procrunner import run_process
import shutil
import os
if (__name__ == "__main__") :
xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False)
assert (xia2_dir is not None)
dest_dir = os.path.join(xia2_dir, "html")
if os.path.exists(dest_dir):
shutil.rmtree(dest_dir)
os.chdir(os.path.join(xia2_dir, "doc", "sphinx"))
result = run_process(["make", "clean"])
assert result['exitcode'] == 0, \
'make clean failed with exit code %d' % result['exitcode']
result = run_process(["make", "html"])
assert result['exitcode'] == 0, \
'make html failed with exit code %d' % result['exitcode']
print "Moving HTML pages to", dest_dir
shutil.move("build/html", dest_dir)
| Check make exit codes and stop on error | Check make exit codes and stop on error
| Python | bsd-3-clause | xia2/xia2,xia2/xia2 | # LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html
from __future__ import division
import libtbx.load_env
from dials.util.procrunner import run_process
import shutil
import os
if (__name__ == "__main__") :
xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False)
assert (xia2_dir is not None)
dest_dir = os.path.join(xia2_dir, "html")
if os.path.exists(dest_dir):
shutil.rmtree(dest_dir)
os.chdir(os.path.join(xia2_dir, "doc", "sphinx"))
result = run_process(["make", "clean"])
assert result['exitcode'] == 0, \
'make clean failed with exit code %d' % result['exitcode']
result = run_process(["make", "html"])
assert result['exitcode'] == 0, \
'make html failed with exit code %d' % result['exitcode']
print "Moving HTML pages to", dest_dir
shutil.move("build/html", dest_dir)
| Check make exit codes and stop on error
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html
from __future__ import division
from libtbx import easy_run
import libtbx.load_env
import os.path as op
import shutil
import os
import sys
if (__name__ == "__main__") :
xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False)
assert (xia2_dir is not None)
dest_dir = op.join(xia2_dir, "html")
if op.exists(dest_dir):
shutil.rmtree(dest_dir)
os.chdir(op.join(xia2_dir, "doc", "sphinx"))
easy_run.call("make clean")
easy_run.call("make html")
print "Moving HTML pages to", dest_dir
shutil.move("build/html", dest_dir)
|
5997e30e05d51996345e3154c5495683e3229410 | app/taskqueue/celeryconfig.py | app/taskqueue/celeryconfig.py | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
| # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
| Increase ack on broker to 4 hours. | Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd
| Python | lgpl-2.1 | kernelci/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
| Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
9658033dab279828975183f94f8c8641891f4ea9 | froide/helper/api_utils.py | froide/helper/api_utils.py | from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
| from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
max_limit = 50
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
| Add max limit to api pagination | Add max limit to api pagination | Python | mit | fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide | from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
max_limit = 50
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
| Add max limit to api pagination
from collections import OrderedDict
from rest_framework.pagination import LimitOffsetPagination
from rest_framework.response import Response
from rest_framework.serializers import ListSerializer
from rest_framework.utils.serializer_helpers import ReturnDict
class CustomLimitOffsetPagination(LimitOffsetPagination):
def get_paginated_response(self, data):
return Response(OrderedDict([
('meta', OrderedDict([
('limit', self.limit),
('next', self.get_next_link()),
('offset', self.offset),
('previous', self.get_previous_link()),
('total_count', self.count),
])),
('objects', data),
]))
class SearchFacetListSerializer(ListSerializer):
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnDict(ret, serializer=self)
def to_representation(self, instance):
ret = super(SearchFacetListSerializer, self).to_representation(instance)
ret = OrderedDict([
('results', ret),
('facets', self._context.get('facets', {'fields': {}})),
])
return ret
|
94d6ac50b4ce48aec51d5f32989d8d4aea938868 | {{cookiecutter.repo_name}}/setup.py | {{cookiecutter.repo_name}}/setup.py | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
| import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
| Set up console script for main | Set up console script for main
| Python | mit | hackebrot/cookiedozer,hackebrot/cookiedozer | import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
| Set up console script for main
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
48edfcddca89c506107035bd804fa536d3dec84d | geotrek/signage/migrations/0013_auto_20200423_1255.py | geotrek/signage/migrations/0013_auto_20200423_1255.py | # Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
| # Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
| Remove element with deleted=true before removefield | Remove element with deleted=true before removefield
| Python | bsd-2-clause | makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek | # Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RunSQL(sql=[("DELETE FROM geotrek.signage_blade WHERE deleted=TRUE;", )]),
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
| Remove element with deleted=true before removefield
# Generated by Django 2.0.13 on 2020-04-23 12:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('signage', '0012_auto_20200406_1411'),
]
operations = [
migrations.RemoveField(
model_name='blade',
name='deleted',
),
migrations.RemoveField(
model_name='blade',
name='structure',
),
migrations.RemoveField(
model_name='line',
name='structure',
),
migrations.AlterField(
model_name='line',
name='blade',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='lines',
to='signage.Blade', verbose_name='Blade'),
),
]
|
5e5e58b705d30df62423ec8bb6018c6807114580 | providers/io/osf/registrations/apps.py | providers/io/osf/registrations/apps.py | from share.provider import ProviderAppConfig
from .harvester import OSFRegistrationsHarvester
class AppConfig(ProviderAppConfig):
name = 'providers.io.osf.registrations'
version = '0.0.1'
title = 'osf_registrations'
long_title = 'Open Science Framework Registrations'
home_page = 'http://api.osf.io/registrations/'
harvester = OSFRegistrationsHarvester
| Add the app config for osf registrations | Add the app config for osf registrations
| Python | apache-2.0 | laurenbarker/SHARE,aaxelb/SHARE,aaxelb/SHARE,zamattiac/SHARE,zamattiac/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,aaxelb/SHARE,zamattiac/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE | from share.provider import ProviderAppConfig
from .harvester import OSFRegistrationsHarvester
class AppConfig(ProviderAppConfig):
name = 'providers.io.osf.registrations'
version = '0.0.1'
title = 'osf_registrations'
long_title = 'Open Science Framework Registrations'
home_page = 'http://api.osf.io/registrations/'
harvester = OSFRegistrationsHarvester
| Add the app config for osf registrations
|
|
08d1db2f6031d3496309ae290e4d760269706d26 | meinberlin/config/settings/dev.py | meinberlin/config/settings/dev.py | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
| from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
LOGGING = {
'version': 1,
'handlers': {
'console': {
'class': 'logging.StreamHandler'},
},
'loggers': {'background_task': {'handlers': ['console'], 'level': 'INFO'}}}
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
| Print tracebacks that happened in tasks | Print tracebacks that happened in tasks
| Python | agpl-3.0 | liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
LOGGING = {
'version': 1,
'handlers': {
'console': {
'class': 'logging.StreamHandler'},
},
'loggers': {'background_task': {'handlers': ['console'], 'level': 'INFO'}}}
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
| Print tracebacks that happened in tasks
from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
for template_engine in TEMPLATES:
template_engine['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qid$h1o8&wh#p(j)lifis*5-rf@lbiy8%^3l4x%@b$z(tli@ab'
try:
import debug_toolbar
except ImportError:
pass
else:
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1', 'localhost')
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': '',
}
try:
from .local import *
except ImportError:
pass
try:
from .polygons import *
except ImportError:
pass
try:
INSTALLED_APPS += tuple(ADDITIONAL_APPS)
except NameError:
pass
|
0de3ca1439acec9191932a51e222aabc8b957047 | mosql/__init__.py | mosql/__init__.py | # -*- coding: utf-8 -*-
VERSION = (0, 11,)
__author__ = 'Mosky <http://mosky.tw>'
__version__ = '.'.join(str(v) for v in VERSION)
| # -*- coding: utf-8 -*-
VERSION = (0, 12,)
__author__ = 'Mosky <http://mosky.tw>'
__version__ = '.'.join(str(v) for v in VERSION)
| Change the version to v0.12 | Change the version to v0.12
| Python | mit | moskytw/mosql | # -*- coding: utf-8 -*-
VERSION = (0, 12,)
__author__ = 'Mosky <http://mosky.tw>'
__version__ = '.'.join(str(v) for v in VERSION)
| Change the version to v0.12
# -*- coding: utf-8 -*-
VERSION = (0, 11,)
__author__ = 'Mosky <http://mosky.tw>'
__version__ = '.'.join(str(v) for v in VERSION)
|
ee4faf2e1a81fe400d818a5a7337cf562c968d2e | quantecon/common_messages.py | quantecon/common_messages.py | """
Warnings Module
===============
Contains a collection of warning messages for consistent package wide notifications
"""
#-Numba-#
numba_import_fail_message = "Numba import failed. Falling back to non-optimized routine." | """
Warnings Module
===============
Contains a collection of warning messages for consistent package wide notifications
"""
#-Numba-#
numba_import_fail_message = ("Numba import failed. Falling back to non-optimized routines.\n"
"This will reduce the overall performance of this package.\n"
"To install please use the anaconda distribution.\n"
"http://continuum.io/downloads") | Update warning message if numba import fails | Update warning message if numba import fails
| Python | bsd-3-clause | gxxjjj/QuantEcon.py,agutieda/QuantEcon.py,andybrnr/QuantEcon.py,oyamad/QuantEcon.py,QuantEcon/QuantEcon.py,andybrnr/QuantEcon.py,gxxjjj/QuantEcon.py,QuantEcon/QuantEcon.py,dingliumath/quant-econ,mgahsan/QuantEcon.py,jviada/QuantEcon.py,dingliumath/quant-econ,agutieda/QuantEcon.py,jviada/QuantEcon.py,mgahsan/QuantEcon.py,oyamad/QuantEcon.py | """
Warnings Module
===============
Contains a collection of warning messages for consistent package wide notifications
"""
#-Numba-#
numba_import_fail_message = ("Numba import failed. Falling back to non-optimized routines.\n"
"This will reduce the overall performance of this package.\n"
"To install please use the anaconda distribution.\n"
"http://continuum.io/downloads") | Update warning message if numba import fails
"""
Warnings Module
===============
Contains a collection of warning messages for consistent package wide notifications
"""
#-Numba-#
numba_import_fail_message = "Numba import failed. Falling back to non-optimized routine." |
3e0b015da6a2c9ef648e54959e6f3aab1509a036 | kippt_reader/settings/production.py | kippt_reader/settings/production.py | from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
| from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
| Add SECURE_REDIRECT_EXEMPT to old HTTP callbacks | Add SECURE_REDIRECT_EXEMPT to old HTTP callbacks | Python | mit | jpadilla/feedleap,jpadilla/feedleap | from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_REDIRECT_EXEMPT = [
'^(?!hub/).*'
]
| Add SECURE_REDIRECT_EXEMPT to old HTTP callbacks
from os import environ
import dj_database_url
from .base import *
INSTALLED_APPS += (
'djangosecure',
)
PRODUCTION_MIDDLEWARE_CLASSES = (
'djangosecure.middleware.SecurityMiddleware',
)
MIDDLEWARE_CLASSES = PRODUCTION_MIDDLEWARE_CLASSES + MIDDLEWARE_CLASSES
DATABASES = {'default': dj_database_url.config()}
SECRET_KEY = environ.get('SECRET_KEY')
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ALLOWED_HOSTS = [DOMAIN]
# django-secure
SESSION_COOKIE_SECURE = True
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 15
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
SECURE_FRAME_DENY = True
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
0d80c81bbc6280e13d1702a9df210980e5852174 | utils/clear_redis.py | utils/clear_redis.py | """Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis()
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
| Add utility for clearing out redis keys from tests. | Add utility for clearing out redis keys from tests.
| Python | bsd-3-clause | harrissoerja/vumi,TouK/vumi,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix | """Utility for clearing all keys out of redis -- do not use in production!"""
import sys
from optparse import OptionParser
import redis
def option_parser():
parser = OptionParser()
parser.add_option("-f", "--force",
action="store_true", dest="force", default=False,
help="Don't ask for confirmation.")
return parser
def main():
parser = option_parser()
options, args = parser.parse_args()
if args:
parser.print_help()
return 1
if not options.force:
confirm = raw_input("About to delete ALL redis keys. "
"Press Y to confirm, N to exit: ")
if confirm.lower() != 'y':
return 1
r_server = redis.Redis()
keys = r_server.keys()
for key in keys:
r_server.delete(key)
print "Deleted %i keys." % len(keys)
return 0
if __name__ == "__main__":
sys.exit(main())
| Add utility for clearing out redis keys from tests.
|
|
5c000543ce943619ea89b2443395a2ee10c49ee0 | solutions/beecrowd/1010/1010.py | solutions/beecrowd/1010/1010.py | import sys
s = 0.0
for line in sys.stdin:
a, b, c = line.split()
a, b, c = int(a), int(b), float(c)
s += c * b
print(f'VALOR A PAGAR: R$ {s:.2f}')
| import sys
s = 0.0
for line in sys.stdin:
_, b, c = line.split()
b, c = int(b), float(c)
s += c * b
print(f'VALOR A PAGAR: R$ {s:.2f}')
| Refactor python version of Simple Calculate | Refactor python version of Simple Calculate
| Python | mit | deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground | import sys
s = 0.0
for line in sys.stdin:
_, b, c = line.split()
b, c = int(b), float(c)
s += c * b
print(f'VALOR A PAGAR: R$ {s:.2f}')
| Refactor python version of Simple Calculate
import sys
s = 0.0
for line in sys.stdin:
a, b, c = line.split()
a, b, c = int(a), int(b), float(c)
s += c * b
print(f'VALOR A PAGAR: R$ {s:.2f}')
|
79cb9edf45ed77cdaa851e45d71f10c69db41221 | benchexec/tools/yogar-cbmc-parallel.py | benchexec/tools/yogar-cbmc-parallel.py | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| Add forgotten program file for deployment | Add forgotten program file for deployment
| Python | apache-2.0 | ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
REQUIRED_PATHS = [
"yogar-cbmc"
]
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
| Add forgotten program file for deployment
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
yogar_cbmc = __import__("benchexec.tools.yogar-cbmc", fromlist=["Tool"])
class Tool(yogar_cbmc.Tool):
def executable(self):
return util.find_executable('yogar-cbmc-parallel')
def name(self):
return 'Yogar-CBMC-Parallel'
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
return [executable] + options + tasks
|
f4c8f003a4ffdd8e64468d261aa2cd34d58f1b9d | src/compdb/__init__.py | src/compdb/__init__.py | import warnings
from signac import *
msg = "compdb was renamed to signac. Please import signac in the future."
warnings.warn(DeprecationWarning, msg)
| import warnings
from signac import *
__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
print('Warning!',msg)
warnings.warn(msg, DeprecationWarning)
| Add surrogate compdb package, linking to signac. | Add surrogate compdb package, linking to signac.
Provided to guarantee compatibility.
Prints warning on import.
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | import warnings
from signac import *
__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
print('Warning!',msg)
warnings.warn(msg, DeprecationWarning)
| Add surrogate compdb package, linking to signac.
Provided to guarantee compatibility.
Prints warning on import.
import warnings
from signac import *
msg = "compdb was renamed to signac. Please import signac in the future."
warnings.warn(DeprecationWarning, msg)
|
11c22561bd0475f9b58befd8bb47068c7c3a652a | api/players/management/commands/update_all_player_mmrs.py | api/players/management/commands/update_all_player_mmrs.py | import time
from datetime import timedelta
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from django.utils import timezone
from players.models import Player
class Command(BaseCommand):
def handle(self, *args, **options):
start_date = timezone.now() - timedelta(days=7)
players = Player.objects.filter(Q(mmr_last_updated__isnull=True) | Q(mmr_last_updated__lt=start_date))
self.stdout.write('Updating MMR for {} players.'.format(players.count()))
for player in players:
player.update_mmr()
time.sleep(1)
self.stdout.write(self.style.SUCCESS('Finished updating all player MMRs.'))
| Add management command to update all player MMRs | Add management command to update all player MMRs
| Python | apache-2.0 | prattl/teamfinder,prattl/teamfinder,prattl/teamfinder,prattl/teamfinder | import time
from datetime import timedelta
from django.core.management.base import BaseCommand, CommandError
from django.db.models import Q
from django.utils import timezone
from players.models import Player
class Command(BaseCommand):
def handle(self, *args, **options):
start_date = timezone.now() - timedelta(days=7)
players = Player.objects.filter(Q(mmr_last_updated__isnull=True) | Q(mmr_last_updated__lt=start_date))
self.stdout.write('Updating MMR for {} players.'.format(players.count()))
for player in players:
player.update_mmr()
time.sleep(1)
self.stdout.write(self.style.SUCCESS('Finished updating all player MMRs.'))
| Add management command to update all player MMRs
|
|
ac9d87bf486f8062d1c2d8122e2dc5660546a22f | menpofit/clm/expert/base.py | menpofit/clm/expert/base.py | import numpy as np
from menpofit.math.correlationfilter import mccf, imccf
# TODO: document me!
class IncrementalCorrelationFilterThinWrapper(object):
r"""
"""
def __init__(self, cf_callable=mccf, icf_callable=imccf):
self.cf_callable = cf_callable
self.icf_callable = icf_callable
def increment(self, A, B, n_x, Z, t):
r"""
"""
# Turn list of X into ndarray
if isinstance(Z, list):
Z = np.asarray(Z)
return self.icf_callable(A, B, n_x, Z, t)
def train(self, X, t):
r"""
"""
# Turn list of X into ndarray
if isinstance(X, list):
X = np.asarray(X)
# Return linear svm filter and bias
return self.cf_callable(X, t)
| Add dummy wrapper for correlation filters | Add dummy wrapper for correlation filters
| Python | bsd-3-clause | grigorisg9gr/menpofit,yuxiang-zhou/menpofit,yuxiang-zhou/menpofit,grigorisg9gr/menpofit | import numpy as np
from menpofit.math.correlationfilter import mccf, imccf
# TODO: document me!
class IncrementalCorrelationFilterThinWrapper(object):
r"""
"""
def __init__(self, cf_callable=mccf, icf_callable=imccf):
self.cf_callable = cf_callable
self.icf_callable = icf_callable
def increment(self, A, B, n_x, Z, t):
r"""
"""
# Turn list of X into ndarray
if isinstance(Z, list):
Z = np.asarray(Z)
return self.icf_callable(A, B, n_x, Z, t)
def train(self, X, t):
r"""
"""
# Turn list of X into ndarray
if isinstance(X, list):
X = np.asarray(X)
# Return linear svm filter and bias
return self.cf_callable(X, t)
| Add dummy wrapper for correlation filters
|
|
be2d33152c07594465c9b838c060edeaa8bc6ddc | tests/main.py | tests/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from ParseArticleReferenceTest import ParseArticleReferenceTest
from SortReferencesVisitorTest import SortReferencesVisitorTest
from ParseEditTest import ParseEditTest
from ParseAlineaReferenceTest import ParseAlineaReferenceTest
from ParseAlineaDefinitionTest import ParseAlineaDefinitionTest
from ParseSentenceDefinitionTest import ParseSentenceDefinitionTest
from ParseHeader2ReferenceTest import ParseHeader2ReferenceTest
from ParseHeader2DefinitionTest import ParseHeader2DefinitionTest
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from ParseArticleReferenceTest import ParseArticleReferenceTest
from SortReferencesVisitorTest import SortReferencesVisitorTest
from ParseEditTest import ParseEditTest
from ParseAlineaReferenceTest import ParseAlineaReferenceTest
from ParseAlineaDefinitionTest import ParseAlineaDefinitionTest
from ParseHeader2ReferenceTest import ParseHeader2ReferenceTest
from ParseHeader2DefinitionTest import ParseHeader2DefinitionTest
if __name__ == '__main__':
unittest.main()
| Fix broken reference to ParseSentenceDefinitionTest. | Fix broken reference to ParseSentenceDefinitionTest.
| Python | mit | Legilibre/duralex | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from ParseArticleReferenceTest import ParseArticleReferenceTest
from SortReferencesVisitorTest import SortReferencesVisitorTest
from ParseEditTest import ParseEditTest
from ParseAlineaReferenceTest import ParseAlineaReferenceTest
from ParseAlineaDefinitionTest import ParseAlineaDefinitionTest
from ParseHeader2ReferenceTest import ParseHeader2ReferenceTest
from ParseHeader2DefinitionTest import ParseHeader2DefinitionTest
if __name__ == '__main__':
unittest.main()
| Fix broken reference to ParseSentenceDefinitionTest.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from ParseArticleReferenceTest import ParseArticleReferenceTest
from SortReferencesVisitorTest import SortReferencesVisitorTest
from ParseEditTest import ParseEditTest
from ParseAlineaReferenceTest import ParseAlineaReferenceTest
from ParseAlineaDefinitionTest import ParseAlineaDefinitionTest
from ParseSentenceDefinitionTest import ParseSentenceDefinitionTest
from ParseHeader2ReferenceTest import ParseHeader2ReferenceTest
from ParseHeader2DefinitionTest import ParseHeader2DefinitionTest
if __name__ == '__main__':
unittest.main()
|
fb986717d5016b1cb3c6b953020ff2aff037b3dc | call_server/extensions.py | call_server/extensions.py | # define flask extensions in separate file, to resolve import dependencies
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from flask_caching import Cache
cache = Cache()
from flask_assets import Environment
assets = Environment()
from flask_babel import Babel
babel = Babel()
from flask_mail import Mail
mail = Mail()
from flask_login import LoginManager
login_manager = LoginManager()
from flask_restless import APIManager
rest = APIManager()
from flask_wtf.csrf import CSRFProtect
csrf = CSRFProtect()
from flask_store import Store
store = Store()
from flask_rq2 import RQ
rq = RQ()
from flask_talisman import Talisman
CALLPOWER_CSP = {
'default-src':'\'self\'',
'script-src':['\'self\'', '\'unsafe-inline\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'],
'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'],
'font-src': ['\'self\'', 'fonts.gstatic.com'],
}
talisman = Talisman() | # define flask extensions in separate file, to resolve import dependencies
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from flask_caching import Cache
cache = Cache()
from flask_assets import Environment
assets = Environment()
from flask_babel import Babel
babel = Babel()
from flask_mail import Mail
mail = Mail()
from flask_login import LoginManager
login_manager = LoginManager()
from flask_restless import APIManager
rest = APIManager()
from flask_wtf.csrf import CSRFProtect
csrf = CSRFProtect()
from flask_store import Store
store = Store()
from flask_rq2 import RQ
rq = RQ()
from flask_talisman import Talisman
CALLPOWER_CSP = {
'default-src':'\'self\'',
'script-src':['\'self\'', '\'unsafe-inline\'', '\'unsafe-eval\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'],
'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'],
'font-src': ['\'self\'', 'fonts.gstatic.com'],
}
# unsafe-inline needed to render <script> tags without nonce
# unsafe-eval needed to run bootstrap templates
talisman = Talisman() | Include script-src unsafe-eval to allow underscore templating Long term, we should pre-compile with webpack to avoid needing this | Include script-src unsafe-eval to allow underscore templating
Long term, we should pre-compile with webpack to avoid needing this
| Python | agpl-3.0 | OpenSourceActivismTech/call-power,spacedogXYZ/call-power,18mr/call-congress,spacedogXYZ/call-power,spacedogXYZ/call-power,18mr/call-congress,OpenSourceActivismTech/call-power,OpenSourceActivismTech/call-power,spacedogXYZ/call-power,OpenSourceActivismTech/call-power,18mr/call-congress,18mr/call-congress | # define flask extensions in separate file, to resolve import dependencies
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from flask_caching import Cache
cache = Cache()
from flask_assets import Environment
assets = Environment()
from flask_babel import Babel
babel = Babel()
from flask_mail import Mail
mail = Mail()
from flask_login import LoginManager
login_manager = LoginManager()
from flask_restless import APIManager
rest = APIManager()
from flask_wtf.csrf import CSRFProtect
csrf = CSRFProtect()
from flask_store import Store
store = Store()
from flask_rq2 import RQ
rq = RQ()
from flask_talisman import Talisman
CALLPOWER_CSP = {
'default-src':'\'self\'',
'script-src':['\'self\'', '\'unsafe-inline\'', '\'unsafe-eval\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'],
'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'],
'font-src': ['\'self\'', 'fonts.gstatic.com'],
}
# unsafe-inline needed to render <script> tags without nonce
# unsafe-eval needed to run bootstrap templates
talisman = Talisman() | Include script-src unsafe-eval to allow underscore templating
Long term, we should pre-compile with webpack to avoid needing this
# define flask extensions in separate file, to resolve import dependencies
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from flask_caching import Cache
cache = Cache()
from flask_assets import Environment
assets = Environment()
from flask_babel import Babel
babel = Babel()
from flask_mail import Mail
mail = Mail()
from flask_login import LoginManager
login_manager = LoginManager()
from flask_restless import APIManager
rest = APIManager()
from flask_wtf.csrf import CSRFProtect
csrf = CSRFProtect()
from flask_store import Store
store = Store()
from flask_rq2 import RQ
rq = RQ()
from flask_talisman import Talisman
CALLPOWER_CSP = {
'default-src':'\'self\'',
'script-src':['\'self\'', '\'unsafe-inline\'', 'cdnjs.cloudflare.com', 'media.twiliocdn.com'],
'style-src': ['\'self\'', '\'unsafe-inline\'', 'fonts.googleapis.com'],
'font-src': ['\'self\'', 'fonts.gstatic.com'],
}
talisman = Talisman() |
0e98d0fae4a81deec57ae162b8db5bcf950b3ea3 | cnxarchive/sql/migrations/20160128110515_mimetype_on_files_table.py | cnxarchive/sql/migrations/20160128110515_mimetype_on_files_table.py | # -*- coding: utf-8 -*-
"""\
- Add a ``media_type`` column to the ``files`` table.
- Move the mimetype value from ``module_files`` to ``files``.
"""
from __future__ import print_function
import sys
def up(cursor):
# Add a ``media_type`` column to the ``files`` table.
cursor.execute("ALTER TABLE files ADD COLUMN media_type TEXT")
# Move the mimetype value from ``module_files`` to ``files``.
cursor.execute("UPDATE files AS f SET media_type = mf.mimetype "
"FROM module_files AS mf "
"WHERE mf.fileid = f.fileid")
# Warn about missing mimetype.
cursor.execute("SELECT fileid, sha1 "
"FROM files AS f "
"WHERE f.fileid NOT IN (SELECT fileid FROM module_files)")
rows = '\n'.join(['{}, {}'.format(fid, sha1)
for fid, sha1 in cursor.fetchall()])
print("These files (fileid, sha1) do not have a corresponding "
"module_files entry:\n{}\n".format(rows),
file=sys.stderr)
def down(cursor):
# Remove the ``mimetype`` column from the ``files`` table.
cursor.execute("ALTER TABLE files DROP COLUMN media_type")
| Move mimetype column from module_files to files | Move mimetype column from module_files to files
| Python | agpl-3.0 | Connexions/cnx-archive,Connexions/cnx-archive | # -*- coding: utf-8 -*-
"""\
- Add a ``media_type`` column to the ``files`` table.
- Move the mimetype value from ``module_files`` to ``files``.
"""
from __future__ import print_function
import sys
def up(cursor):
# Add a ``media_type`` column to the ``files`` table.
cursor.execute("ALTER TABLE files ADD COLUMN media_type TEXT")
# Move the mimetype value from ``module_files`` to ``files``.
cursor.execute("UPDATE files AS f SET media_type = mf.mimetype "
"FROM module_files AS mf "
"WHERE mf.fileid = f.fileid")
# Warn about missing mimetype.
cursor.execute("SELECT fileid, sha1 "
"FROM files AS f "
"WHERE f.fileid NOT IN (SELECT fileid FROM module_files)")
rows = '\n'.join(['{}, {}'.format(fid, sha1)
for fid, sha1 in cursor.fetchall()])
print("These files (fileid, sha1) do not have a corresponding "
"module_files entry:\n{}\n".format(rows),
file=sys.stderr)
def down(cursor):
# Remove the ``mimetype`` column from the ``files`` table.
cursor.execute("ALTER TABLE files DROP COLUMN media_type")
| Move mimetype column from module_files to files
|
|
4cb1b6b8656d4e3893b3aa8fe5766b507afa6d24 | cmsplugin_rt/button/cms_plugins.py | cmsplugin_rt/button/cms_plugins.py | from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.models.pluginmodel import CMSPlugin
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from models import *
bootstrap_module_name = _("Widgets")
layout_module_name = _("Layout elements")
generic_module_name = _("Generic")
meta_module_name = _("Meta elements")
class ButtonPlugin(CMSPluginBase):
model = ButtonPluginModel
name = _("Button")
#module = bootstrap_module_name
render_template = "button_plugin.html"
def render(self, context, instance, placeholder):
context['instance'] = instance
if instance.page_link:
context['link'] = instance.page_link.get_absolute_url()
else:
context['link'] = instance.button_link
return context
plugin_pool.register_plugin(ButtonPlugin)
| from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.models.pluginmodel import CMSPlugin
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from models import *
bootstrap_module_name = _("Widgets")
layout_module_name = _("Layout elements")
generic_module_name = _("Generic")
meta_module_name = _("Meta elements")
class ButtonPlugin(CMSPluginBase):
model = ButtonPluginModel
name = _("Button")
#module = bootstrap_module_name
render_template = "button_plugin.html"
text_enabled = True
def render(self, context, instance, placeholder):
context['instance'] = instance
if instance.page_link:
context['link'] = instance.page_link.get_absolute_url()
else:
context['link'] = instance.button_link
return context
plugin_pool.register_plugin(ButtonPlugin)
| Make Button plugin usable inside Text plugin | Make Button plugin usable inside Text plugin
| Python | bsd-3-clause | RacingTadpole/cmsplugin-rt | from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.models.pluginmodel import CMSPlugin
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from models import *
bootstrap_module_name = _("Widgets")
layout_module_name = _("Layout elements")
generic_module_name = _("Generic")
meta_module_name = _("Meta elements")
class ButtonPlugin(CMSPluginBase):
model = ButtonPluginModel
name = _("Button")
#module = bootstrap_module_name
render_template = "button_plugin.html"
text_enabled = True
def render(self, context, instance, placeholder):
context['instance'] = instance
if instance.page_link:
context['link'] = instance.page_link.get_absolute_url()
else:
context['link'] = instance.button_link
return context
plugin_pool.register_plugin(ButtonPlugin)
| Make Button plugin usable inside Text plugin
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.models.pluginmodel import CMSPlugin
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from models import *
bootstrap_module_name = _("Widgets")
layout_module_name = _("Layout elements")
generic_module_name = _("Generic")
meta_module_name = _("Meta elements")
class ButtonPlugin(CMSPluginBase):
model = ButtonPluginModel
name = _("Button")
#module = bootstrap_module_name
render_template = "button_plugin.html"
def render(self, context, instance, placeholder):
context['instance'] = instance
if instance.page_link:
context['link'] = instance.page_link.get_absolute_url()
else:
context['link'] = instance.button_link
return context
plugin_pool.register_plugin(ButtonPlugin)
|
ee8acd5a476b0dcce9b79f70e4c70186ea4d5dc0 | miniutils.py | miniutils.py | import __builtin__
def any(it):
for obj in it:
if obj:
return True
def all(it):
for obj in it:
if not obj:
return False
return True
def max(it, key=None):
if key is not None:
k, value = max((key(value), value) for value in it)
return value
return max(it)
def min(it, key=None):
if key is not None:
k, value = min((key(value), value) for value in it)
return value
return min(it)
class Condition(object):
"""
This wraps a condition so that it can be shared by everyone and modified
by whomever wants to.
"""
def __init__(self, value):
self.value = value
def __nonzero__(self):
return self.value
class ComparableObjectMixin(object):
def __hash__(self):
"Implement in subclasses"
raise NotImplementedError
def __eq__(self, other):
"Implement in subclasses"
return NotImplemented | import __builtin__
def any(it):
for obj in it:
if obj:
return True
return False
def all(it):
for obj in it:
if not obj:
return False
return True
def max(it, key=None):
if key is not None:
k, value = max((key(value), value) for value in it)
return value
return max(it)
def min(it, key=None):
if key is not None:
k, value = min((key(value), value) for value in it)
return value
return min(it)
class Condition(object):
"""
This wraps a condition so that it can be shared by everyone and modified
by whomever wants to.
"""
def __init__(self, value):
self.value = value
def __nonzero__(self):
return self.value
class ComparableObjectMixin(object):
def __hash__(self):
"Implement in subclasses"
raise NotImplementedError
def __eq__(self, other):
"Implement in subclasses"
return NotImplemented | Return an actual bool from any() | Return an actual bool from any()
| Python | bsd-2-clause | markflorisson/minivect,markflorisson/minivect | import __builtin__
def any(it):
for obj in it:
if obj:
return True
return False
def all(it):
for obj in it:
if not obj:
return False
return True
def max(it, key=None):
if key is not None:
k, value = max((key(value), value) for value in it)
return value
return max(it)
def min(it, key=None):
if key is not None:
k, value = min((key(value), value) for value in it)
return value
return min(it)
class Condition(object):
"""
This wraps a condition so that it can be shared by everyone and modified
by whomever wants to.
"""
def __init__(self, value):
self.value = value
def __nonzero__(self):
return self.value
class ComparableObjectMixin(object):
def __hash__(self):
"Implement in subclasses"
raise NotImplementedError
def __eq__(self, other):
"Implement in subclasses"
return NotImplemented | Return an actual bool from any()
import __builtin__
def any(it):
for obj in it:
if obj:
return True
def all(it):
for obj in it:
if not obj:
return False
return True
def max(it, key=None):
if key is not None:
k, value = max((key(value), value) for value in it)
return value
return max(it)
def min(it, key=None):
if key is not None:
k, value = min((key(value), value) for value in it)
return value
return min(it)
class Condition(object):
"""
This wraps a condition so that it can be shared by everyone and modified
by whomever wants to.
"""
def __init__(self, value):
self.value = value
def __nonzero__(self):
return self.value
class ComparableObjectMixin(object):
def __hash__(self):
"Implement in subclasses"
raise NotImplementedError
def __eq__(self, other):
"Implement in subclasses"
return NotImplemented |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.