commit
stringlengths 40
40
| old_file
stringlengths 5
117
| new_file
stringlengths 5
117
| old_contents
stringlengths 0
1.93k
| new_contents
stringlengths 19
3.3k
| subject
stringlengths 17
320
| message
stringlengths 18
3.28k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
42.4k
| completion
stringlengths 19
3.3k
| prompt
stringlengths 21
3.65k
|
---|---|---|---|---|---|---|---|---|---|---|---|
dc755e07516e1cbbcd01f01e8be59abf8f1a6329 | humfrey/update/management/commands/update_dataset.py | humfrey/update/management/commands/update_dataset.py | import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': 'manual',
'queued_at': datetime.datetime.now(),
})))
| import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
trigger = args[1] if len(args) > 1 else 'manual'
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': trigger,
'queued_at': datetime.datetime.now(),
})))
if __name__ == '__main__':
import sys
Command().handle(*sys.argv[1:]) | Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script. | Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
| Python | bsd-3-clause | ox-it/humfrey,ox-it/humfrey,ox-it/humfrey | import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
trigger = args[1] if len(args) > 1 else 'manual'
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': trigger,
'queued_at': datetime.datetime.now(),
})))
if __name__ == '__main__':
import sys
Command().handle(*sys.argv[1:]) | Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
import base64
import datetime
import os
import pickle
from lxml import etree
import redis
from django.core.management.base import BaseCommand
from django.conf import settings
from humfrey.update.longliving.updater import Updater
class Command(BaseCommand):
def handle(self, *args, **options):
config_filename = os.path.abspath(args[0])
with open(config_filename, 'r') as f:
config_file = etree.parse(f)
dataset_name = config_file.xpath('meta/name')[0].text
client = redis.client.Redis(**settings.REDIS_PARAMS)
client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({
'config_filename': config_filename,
'name': dataset_name,
'trigger': 'manual',
'queued_at': datetime.datetime.now(),
})))
|
dbc7ad0dad6161d19f65bbf186d84d23628cfd16 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
'coverage',
]
setup(
name='pic2map',
version='0.1.0',
description="Display pictures location in a map",
long_description=readme + '\n\n' + history,
author="Javier Collado",
author_email='[email protected]',
url='https://github.com/jcollado/pic2map',
packages=[
'pic2map',
],
package_dir={'pic2map':
'pic2map'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='picture map location',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
'coverage',
]
setup(
name='pic2map',
version='0.1.0',
description="Display pictures location in a map",
long_description=readme + '\n\n' + history,
author="Javier Collado",
author_email='[email protected]',
url='https://github.com/jcollado/pic2map',
packages=[
'pic2map',
],
package_dir={'pic2map':
'pic2map'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='picture map location',
entry_points={
'console_scripts': [
'pic2map = pic2map.cli:main',
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
| Add entry point for the CLI script | Add entry point for the CLI script
| Python | mit | jcollado/pic2map,jcollado/pic2map,jcollado/pic2map | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
'coverage',
]
setup(
name='pic2map',
version='0.1.0',
description="Display pictures location in a map",
long_description=readme + '\n\n' + history,
author="Javier Collado",
author_email='[email protected]',
url='https://github.com/jcollado/pic2map',
packages=[
'pic2map',
],
package_dir={'pic2map':
'pic2map'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='picture map location',
entry_points={
'console_scripts': [
'pic2map = pic2map.cli:main',
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
| Add entry point for the CLI script
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
'coverage',
]
setup(
name='pic2map',
version='0.1.0',
description="Display pictures location in a map",
long_description=readme + '\n\n' + history,
author="Javier Collado",
author_email='[email protected]',
url='https://github.com/jcollado/pic2map',
packages=[
'pic2map',
],
package_dir={'pic2map':
'pic2map'},
include_package_data=True,
install_requires=requirements,
license="BSD",
zip_safe=False,
keywords='picture map location',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=test_requirements
)
|
08ea2c257780f23cec5dfb923e80966fdf9c5ac8 | IPython/zmq/zmqshell.py | IPython/zmq/zmqshell.py | import sys
from subprocess import Popen, PIPE
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
class ZMQInteractiveShell(InteractiveShell):
"""A subclass of InteractiveShell for ZMQ."""
def system(self, cmd):
cmd = self.var_expand(cmd, depth=2)
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
for line in p.stdout.read().split('\n'):
if len(line) > 0:
print line
for line in p.stderr.read().split('\n'):
if len(line) > 0:
print line
return p.wait()
InteractiveShellABC.register(ZMQInteractiveShell)
| import sys
from subprocess import Popen, PIPE
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
class ZMQInteractiveShell(InteractiveShell):
"""A subclass of InteractiveShell for ZMQ."""
def system(self, cmd):
cmd = self.var_expand(cmd, depth=2)
sys.stdout.flush()
sys.stderr.flush()
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
for line in p.stdout.read().split('\n'):
if len(line) > 0:
print line
for line in p.stderr.read().split('\n'):
if len(line) > 0:
print line
return p.wait()
InteractiveShellABC.register(ZMQInteractiveShell)
| Add flushing to stdout/stderr in system calls. | Add flushing to stdout/stderr in system calls.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | import sys
from subprocess import Popen, PIPE
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
class ZMQInteractiveShell(InteractiveShell):
"""A subclass of InteractiveShell for ZMQ."""
def system(self, cmd):
cmd = self.var_expand(cmd, depth=2)
sys.stdout.flush()
sys.stderr.flush()
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
for line in p.stdout.read().split('\n'):
if len(line) > 0:
print line
for line in p.stderr.read().split('\n'):
if len(line) > 0:
print line
return p.wait()
InteractiveShellABC.register(ZMQInteractiveShell)
| Add flushing to stdout/stderr in system calls.
import sys
from subprocess import Popen, PIPE
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
class ZMQInteractiveShell(InteractiveShell):
"""A subclass of InteractiveShell for ZMQ."""
def system(self, cmd):
cmd = self.var_expand(cmd, depth=2)
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE)
for line in p.stdout.read().split('\n'):
if len(line) > 0:
print line
for line in p.stderr.read().split('\n'):
if len(line) > 0:
print line
return p.wait()
InteractiveShellABC.register(ZMQInteractiveShell)
|
7dbdae4cbf8e4d78f84c2b8163cd62c7935d3890 | bandicoot/tests/generate_regressions.py | bandicoot/tests/generate_regressions.py | import bandicoot as bc
from os.path import dirname, abspath, join
if __name__ == '__main__':
empty_user = bc.User()
empty_user.attributes['empty'] = True
empty_path = join(dirname(abspath(__file__)), 'samples/empty_user.json')
bc.io.to_json(bc.utils.all(empty_user, summary='extended', flatten=True), empty_path)
sample_user = bc.tests.generate_user.sample_user()
sample_path = join(dirname(abspath(__file__)), 'samples/sample_user_all_metrics.json')
bc.io.to_json(bc.utils.all(sample_user, summary='extended', groupby=None, flatten=True), sample_path)
| Add a simple command to generate automatic regressions | Add a simple command to generate automatic regressions
| Python | mit | ulfaslak/bandicoot,yvesalexandre/bandicoot,econandrew/bandicoot,econandrew/bandicoot,yvesalexandre/bandicoot,econandrew/bandicoot,ulfaslak/bandicoot,yvesalexandre/bandicoot,ulfaslak/bandicoot | import bandicoot as bc
from os.path import dirname, abspath, join
if __name__ == '__main__':
empty_user = bc.User()
empty_user.attributes['empty'] = True
empty_path = join(dirname(abspath(__file__)), 'samples/empty_user.json')
bc.io.to_json(bc.utils.all(empty_user, summary='extended', flatten=True), empty_path)
sample_user = bc.tests.generate_user.sample_user()
sample_path = join(dirname(abspath(__file__)), 'samples/sample_user_all_metrics.json')
bc.io.to_json(bc.utils.all(sample_user, summary='extended', groupby=None, flatten=True), sample_path)
| Add a simple command to generate automatic regressions
|
|
5c447d46a8a62407549650ada98131968ace9921 | spyc/scheduler.py | spyc/scheduler.py | from spyc.graph import Vertex, find_cycle, topological_sort
class Scheduler(object):
def __init__(self):
self.specs = {}
def ensure(self, spec):
"""Require that ``spec`` is satisfied."""
if spec.key() in self.specs:
self.specs[spec.key()].data.merge(spec)
else:
self.specs[spec.key()] = Vertex(spec)
def depend(self, first, next):
"""Specify that ``first`` depends on ``next``.
This also has the effect of invoking ``ensure`` on both resources.
"""
first.schedule(self)
next.schedule(self)
self.specs[first.key()].edges.add(self.specs[next.key()])
def apply(self):
verticies = set(self.specs.values())
cycle = find_cycle(verticies)
if cycle is not None:
assert False # TODO proper checking
for v in topological_sort(verticies):
v.data.apply()
| from spyc.graph import Vertex, find_cycle, topological_sort
class CircularDependency(Exception):
pass
class Scheduler(object):
def __init__(self):
self.specs = {}
def ensure(self, spec):
"""Require that ``spec`` is satisfied."""
if spec.key() in self.specs:
self.specs[spec.key()].data.merge(spec)
else:
self.specs[spec.key()] = Vertex(spec)
def depend(self, first, next):
"""Specify that ``first`` depends on ``next``.
This also has the effect of invoking ``ensure`` on both resources.
"""
first.schedule(self)
next.schedule(self)
self.specs[first.key()].edges.add(self.specs[next.key()])
def apply(self):
verticies = set(self.specs.values())
cycle = find_cycle(verticies)
if cycle is not None:
raise CircularDependency(cycle)
for v in topological_sort(verticies):
v.data.apply()
| Raise a more useful error for circular deps. | Raise a more useful error for circular deps.
| Python | lgpl-2.1 | zenhack/spyc | from spyc.graph import Vertex, find_cycle, topological_sort
class CircularDependency(Exception):
pass
class Scheduler(object):
def __init__(self):
self.specs = {}
def ensure(self, spec):
"""Require that ``spec`` is satisfied."""
if spec.key() in self.specs:
self.specs[spec.key()].data.merge(spec)
else:
self.specs[spec.key()] = Vertex(spec)
def depend(self, first, next):
"""Specify that ``first`` depends on ``next``.
This also has the effect of invoking ``ensure`` on both resources.
"""
first.schedule(self)
next.schedule(self)
self.specs[first.key()].edges.add(self.specs[next.key()])
def apply(self):
verticies = set(self.specs.values())
cycle = find_cycle(verticies)
if cycle is not None:
raise CircularDependency(cycle)
for v in topological_sort(verticies):
v.data.apply()
| Raise a more useful error for circular deps.
from spyc.graph import Vertex, find_cycle, topological_sort
class Scheduler(object):
def __init__(self):
self.specs = {}
def ensure(self, spec):
"""Require that ``spec`` is satisfied."""
if spec.key() in self.specs:
self.specs[spec.key()].data.merge(spec)
else:
self.specs[spec.key()] = Vertex(spec)
def depend(self, first, next):
"""Specify that ``first`` depends on ``next``.
This also has the effect of invoking ``ensure`` on both resources.
"""
first.schedule(self)
next.schedule(self)
self.specs[first.key()].edges.add(self.specs[next.key()])
def apply(self):
verticies = set(self.specs.values())
cycle = find_cycle(verticies)
if cycle is not None:
assert False # TODO proper checking
for v in topological_sort(verticies):
v.data.apply()
|
fb7e771646946637824b06eaf6d21b8c1b2be164 | main.py | main.py | # -*- coding: utf-8 -*-
'''
url-shortener
==============
An application for generating and storing shorter aliases for
requested urls. Uses `spam-lists`__ to prevent generating a short url
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
'''
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "[email protected]"
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016 Piotr Rusin'
if not app.debug:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'])
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
| # -*- coding: utf-8 -*-
'''
url-shortener
==============
An application for generating and storing shorter aliases for
requested urls. Uses `spam-lists`__ to prevent generating a short url
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
'''
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "[email protected]"
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'])
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
| Make application use log file if its name is not None | Make application use log file if its name is not None
| Python | mit | piotr-rusin/url-shortener,piotr-rusin/url-shortener | # -*- coding: utf-8 -*-
'''
url-shortener
==============
An application for generating and storing shorter aliases for
requested urls. Uses `spam-lists`__ to prevent generating a short url
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
'''
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "[email protected]"
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'])
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
| Make application use log file if its name is not None
# -*- coding: utf-8 -*-
'''
url-shortener
==============
An application for generating and storing shorter aliases for
requested urls. Uses `spam-lists`__ to prevent generating a short url
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
'''
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "[email protected]"
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2016 Piotr Rusin'
if not app.debug:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'])
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
9704602f26b4a9aab15caf00795d283c5f6e4ae4 | src/fiona/tool.py | src/fiona/tool.py | # The Fiona data tool.
if __name__ == '__main__':
import argparse
import fiona
import json
import pprint
import sys
parser = argparse.ArgumentParser(
description="Serialize a file to GeoJSON or view its description")
parser.add_argument('-i', '--info',
action='store_true',
help='View pretty printed description information only')
parser.add_argument('-j', '--json',
action='store_true',
help='Output description as indented JSON')
parser.add_argument('filename', help="data file name")
args = parser.parse_args()
with fiona.open(args.filename, 'r') as col:
if args.info:
if args.json:
meta = col.meta.copy()
meta.update(name=args.filename)
print(json.dumps(meta, indent=2))
else:
print("\nDescription of: %r" % col)
print("\nCoordinate reference system (col.crs):")
pprint.pprint(meta['crs'])
print("\nFormat driver (col.driver):")
pprint.pprint(meta['driver'])
print("\nData description (col.schema):")
pprint.pprint(meta['schema'])
else:
print(json.dumps(list(col), indent=2))
| # The Fiona data tool.
if __name__ == '__main__':
import argparse
import fiona
import json
import pprint
import sys
parser = argparse.ArgumentParser(
description="Serialize a file to GeoJSON or view its description")
parser.add_argument('-i', '--info',
action='store_true',
help='View pretty printed description information only')
parser.add_argument('-j', '--json',
action='store_true',
help='Output description as indented JSON')
parser.add_argument('filename', help="data file name")
args = parser.parse_args()
with fiona.open(args.filename, 'r') as col:
if args.info:
if args.json:
meta = col.meta.copy()
meta.update(name=args.filename)
print(json.dumps(meta, indent=2))
else:
print("\nDescription of: %r" % col)
print("\nCoordinate reference system (col.crs):")
pprint.pprint(meta['crs'])
print("\nFormat driver (col.driver):")
pprint.pprint(meta['driver'])
print("\nData description (col.schema):")
pprint.pprint(meta['schema'])
else:
collection = {'type': 'FeatureCollection'}
collection['features'] = list(col)
print(json.dumps(collection, indent=2))
| Change record output to strict GeoJSON. | Change record output to strict GeoJSON.
Meaning features in a FeatureCollection.
| Python | bsd-3-clause | rbuffat/Fiona,Toblerity/Fiona,sgillies/Fiona,johanvdw/Fiona,perrygeo/Fiona,Toblerity/Fiona,perrygeo/Fiona,rbuffat/Fiona | # The Fiona data tool.
if __name__ == '__main__':
import argparse
import fiona
import json
import pprint
import sys
parser = argparse.ArgumentParser(
description="Serialize a file to GeoJSON or view its description")
parser.add_argument('-i', '--info',
action='store_true',
help='View pretty printed description information only')
parser.add_argument('-j', '--json',
action='store_true',
help='Output description as indented JSON')
parser.add_argument('filename', help="data file name")
args = parser.parse_args()
with fiona.open(args.filename, 'r') as col:
if args.info:
if args.json:
meta = col.meta.copy()
meta.update(name=args.filename)
print(json.dumps(meta, indent=2))
else:
print("\nDescription of: %r" % col)
print("\nCoordinate reference system (col.crs):")
pprint.pprint(meta['crs'])
print("\nFormat driver (col.driver):")
pprint.pprint(meta['driver'])
print("\nData description (col.schema):")
pprint.pprint(meta['schema'])
else:
collection = {'type': 'FeatureCollection'}
collection['features'] = list(col)
print(json.dumps(collection, indent=2))
| Change record output to strict GeoJSON.
Meaning features in a FeatureCollection.
# The Fiona data tool.
if __name__ == '__main__':
import argparse
import fiona
import json
import pprint
import sys
parser = argparse.ArgumentParser(
description="Serialize a file to GeoJSON or view its description")
parser.add_argument('-i', '--info',
action='store_true',
help='View pretty printed description information only')
parser.add_argument('-j', '--json',
action='store_true',
help='Output description as indented JSON')
parser.add_argument('filename', help="data file name")
args = parser.parse_args()
with fiona.open(args.filename, 'r') as col:
if args.info:
if args.json:
meta = col.meta.copy()
meta.update(name=args.filename)
print(json.dumps(meta, indent=2))
else:
print("\nDescription of: %r" % col)
print("\nCoordinate reference system (col.crs):")
pprint.pprint(meta['crs'])
print("\nFormat driver (col.driver):")
pprint.pprint(meta['driver'])
print("\nData description (col.schema):")
pprint.pprint(meta['schema'])
else:
print(json.dumps(list(col), indent=2))
|
cd62369097feba54172c0048c4ef0ec0713be6d3 | linter.py | linter.py |
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ivan Sobolev
# Copyright (c) 2016 Ivan Sobolev
#
# License: MIT
#
"""This module exports the Bemlint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Bemlint(NodeLinter):
"""Provides an interface to bemlint."""
syntax = ('html', 'html+tt2', 'html+tt3')
cmd = ('bemlint', '@', '--format', 'compact')
version_args = '--version'
version_re = r'v(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.4.5'
config_file = ('--config', '.bemlint.json')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
multiline = False
line_col_base = (1, 1)
error_stream = util.STREAM_BOTH
tempfile_suffix = 'bem'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ivan Sobolev
# Copyright (c) 2016 Ivan Sobolev
#
# License: MIT
#
"""This module exports the Bemlint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Bemlint(NodeLinter):
"""Provides an interface to bemlint."""
name = 'bemlint'
cmd = 'bemlint @ ${args}'
config_file = ('--config', '.bemlint.json')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
multiline = False
line_col_base = (1, 1)
error_stream = util.STREAM_BOTH
tempfile_suffix = 'bem'
defaults = {
'selector': 'text.html',
'--format': 'compact',
}
# the following attributes are marked useless for SL4
version_args = '--version'
version_re = r'v(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.4.5'
| Fix compatibility for SublimeLinter 4.12.0 | Fix compatibility for SublimeLinter 4.12.0
Signed-off-by: Jack Cherng <[email protected]>
| Python | mit | DesTincT/SublimeLinter-contrib-bemlint |
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ivan Sobolev
# Copyright (c) 2016 Ivan Sobolev
#
# License: MIT
#
"""This module exports the Bemlint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Bemlint(NodeLinter):
"""Provides an interface to bemlint."""
name = 'bemlint'
cmd = 'bemlint @ ${args}'
config_file = ('--config', '.bemlint.json')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
multiline = False
line_col_base = (1, 1)
error_stream = util.STREAM_BOTH
tempfile_suffix = 'bem'
defaults = {
'selector': 'text.html',
'--format': 'compact',
}
# the following attributes are marked useless for SL4
version_args = '--version'
version_re = r'v(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.4.5'
| Fix compatibility for SublimeLinter 4.12.0
Signed-off-by: Jack Cherng <[email protected]>
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ivan Sobolev
# Copyright (c) 2016 Ivan Sobolev
#
# License: MIT
#
"""This module exports the Bemlint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Bemlint(NodeLinter):
"""Provides an interface to bemlint."""
syntax = ('html', 'html+tt2', 'html+tt3')
cmd = ('bemlint', '@', '--format', 'compact')
version_args = '--version'
version_re = r'v(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.4.5'
config_file = ('--config', '.bemlint.json')
regex = (
r'^.+?: line (?P<line>\d+), col (?P<col>\d+), '
r'(?:(?P<error>Error)|(?P<warning>Warning)) - '
r'(?P<message>.+)'
)
multiline = False
line_col_base = (1, 1)
error_stream = util.STREAM_BOTH
tempfile_suffix = 'bem'
|
20f7b0296f0e7139a69f94ca8c80c9ad1b73c011 | tests/test_package.py | tests/test_package.py | import argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
| Add a test for package generation | Add a test for package generation
| Python | mit | jboss-dockerfiles/dogen,goldmann/dogen,jboss-container-images/concreate,jboss-container-images/concreate,goldmann/dogen,goldmann/dogen,jboss-container-images/concreate,jboss-dockerfiles/dogen,jboss-dockerfiles/dogen | import argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
| Add a test for package generation
|
|
460580ff585fa76cebc5e2e9cb1d49550db9f68d | components/item_lock.py | components/item_lock.py | from models.item import ItemModel
from models.base_model import ETAG
from superdesk import SuperdeskError
LOCK_USER = 'lock_user'
STATUS = '_status'
class ItemLock():
def __init__(self, data_layer):
self.data_layer = data_layer
def lock(self, filter, user, etag):
item_model = ItemModel(self.data_layer)
item = item_model.find_one(filter)
if item and self._can_lock(item, user):
# filter[ETAG] = etag
updates = {LOCK_USER: user}
item_model.update(filter, updates)
item[LOCK_USER] = user
else:
raise SuperdeskError('Item locked by another user')
return item
def unlock(self, filter, user, etag):
item_model = ItemModel()
filter[LOCK_USER] = user
filter[ETAG] = etag
item = item_model.find_one(filter)
if item:
update = {LOCK_USER: None}
item_model.update(filter, update)
def _can_lock(self, item, user):
# TODO: implement
return True
| from models.item import ItemModel
from models.base_model import ETAG
from superdesk import SuperdeskError
from superdesk.utc import utcnow
LOCK_USER = 'lock_user'
STATUS = '_status'
class ItemLock():
def __init__(self, data_layer):
self.data_layer = data_layer
def lock(self, filter, user, etag):
item_model = ItemModel(self.data_layer)
item = item_model.find_one(filter)
if item and self._can_lock(item, user):
# filter[ETAG] = etag
updates = {LOCK_USER: user, 'lock_time': utcnow()}
item_model.update(filter, updates)
item[LOCK_USER] = user
else:
raise SuperdeskError('Item locked by another user')
return item
def unlock(self, filter, user, etag):
item_model = ItemModel()
filter[LOCK_USER] = user
filter[ETAG] = etag
item = item_model.find_one(filter)
if item:
update = {LOCK_USER: None}
item_model.update(filter, update)
def _can_lock(self, item, user):
# TODO: implement
return True
| Set timestamp on item lock operation | Set timestamp on item lock operation
| Python | agpl-3.0 | akintolga/superdesk,superdesk/superdesk-ntb,verifiedpixel/superdesk,Aca-jov/superdesk,amagdas/superdesk,hlmnrmr/superdesk,verifiedpixel/superdesk,pavlovicnemanja/superdesk,ioanpocol/superdesk-ntb,liveblog/superdesk,marwoodandrew/superdesk-aap,sivakuna-aap/superdesk,mugurrus/superdesk,akintolga/superdesk-aap,pavlovicnemanja/superdesk,petrjasek/superdesk-ntb,superdesk/superdesk,fritzSF/superdesk,fritzSF/superdesk,marwoodandrew/superdesk,fritzSF/superdesk,vied12/superdesk,thnkloud9/superdesk,verifiedpixel/superdesk,marwoodandrew/superdesk-aap,superdesk/superdesk-aap,marwoodandrew/superdesk,liveblog/superdesk,akintolga/superdesk-aap,petrjasek/superdesk,plamut/superdesk,petrjasek/superdesk,superdesk/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,ancafarcas/superdesk,verifiedpixel/superdesk,plamut/superdesk,akintolga/superdesk,vied12/superdesk,ancafarcas/superdesk,marwoodandrew/superdesk,superdesk/superdesk-ntb,marwoodandrew/superdesk,vied12/superdesk,mdhaman/superdesk,mdhaman/superdesk,sivakuna-aap/superdesk,liveblog/superdesk,liveblog/superdesk,superdesk/superdesk-aap,hlmnrmr/superdesk,hlmnrmr/superdesk,superdesk/superdesk,darconny/superdesk,akintolga/superdesk-aap,pavlovicnemanja92/superdesk,vied12/superdesk,petrjasek/superdesk-server,petrjasek/superdesk,ioanpocol/superdesk-ntb,superdesk/superdesk-aap,mdhaman/superdesk,liveblog/superdesk,pavlovicnemanja92/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,sivakuna-aap/superdesk,akintolga/superdesk,mdhaman/superdesk-aap,petrjasek/superdesk-server,pavlovicnemanja92/superdesk,ioanpocol/superdesk,petrjasek/superdesk,plamut/superdesk,mdhaman/superdesk-aap,amagdas/superdesk,fritzSF/superdesk,mdhaman/superdesk-aap,akintolga/superdesk-aap,amagdas/superdesk,petrjasek/superdesk-ntb,fritzSF/superdesk,thnkloud9/superdesk,plamut/superdesk,ioanpocol/superdesk,superdesk/superdesk-aap,amagdas/superdesk,ioanpocol/superdesk-ntb,sjunaid/superdesk,vied12/superdesk,mugurrus/superdesk,sjunaid/superdesk,Aca-jov/superdesk,marwoodandrew/superdesk-aap,gbbr/superdesk,pavlovicnemanja92/superdesk,superdesk/superdesk,thnkloud9/superdesk,amagdas/superdesk,superdesk/superdesk-ntb,petrjasek/superdesk-ntb,marwoodandrew/superdesk-aap,darconny/superdesk,petrjasek/superdesk-ntb,plamut/superdesk,darconny/superdesk,akintolga/superdesk,gbbr/superdesk,superdesk/superdesk-ntb,pavlovicnemanja/superdesk,marwoodandrew/superdesk,mugurrus/superdesk,pavlovicnemanja/superdesk,ancafarcas/superdesk,ioanpocol/superdesk,verifiedpixel/superdesk,gbbr/superdesk,sjunaid/superdesk,Aca-jov/superdesk,sivakuna-aap/superdesk | from models.item import ItemModel
from models.base_model import ETAG
from superdesk import SuperdeskError
from superdesk.utc import utcnow
LOCK_USER = 'lock_user'
STATUS = '_status'
class ItemLock():
def __init__(self, data_layer):
self.data_layer = data_layer
def lock(self, filter, user, etag):
item_model = ItemModel(self.data_layer)
item = item_model.find_one(filter)
if item and self._can_lock(item, user):
# filter[ETAG] = etag
updates = {LOCK_USER: user, 'lock_time': utcnow()}
item_model.update(filter, updates)
item[LOCK_USER] = user
else:
raise SuperdeskError('Item locked by another user')
return item
def unlock(self, filter, user, etag):
item_model = ItemModel()
filter[LOCK_USER] = user
filter[ETAG] = etag
item = item_model.find_one(filter)
if item:
update = {LOCK_USER: None}
item_model.update(filter, update)
def _can_lock(self, item, user):
# TODO: implement
return True
| Set timestamp on item lock operation
from models.item import ItemModel
from models.base_model import ETAG
from superdesk import SuperdeskError
LOCK_USER = 'lock_user'
STATUS = '_status'
class ItemLock():
def __init__(self, data_layer):
self.data_layer = data_layer
def lock(self, filter, user, etag):
item_model = ItemModel(self.data_layer)
item = item_model.find_one(filter)
if item and self._can_lock(item, user):
# filter[ETAG] = etag
updates = {LOCK_USER: user}
item_model.update(filter, updates)
item[LOCK_USER] = user
else:
raise SuperdeskError('Item locked by another user')
return item
def unlock(self, filter, user, etag):
item_model = ItemModel()
filter[LOCK_USER] = user
filter[ETAG] = etag
item = item_model.find_one(filter)
if item:
update = {LOCK_USER: None}
item_model.update(filter, update)
def _can_lock(self, item, user):
# TODO: implement
return True
|
08300895dc8d2abb740dd71b027e9acda8bb84dd | chatterbot/ext/django_chatterbot/views.py | chatterbot/ext/django_chatterbot/views.py | from django.views.generic import View
from django.http import JsonResponse
from django.conf import settings
class ChatterBotView(View):
def post(self, request, *args, **kwargs):
input_statement = request.POST.get('text')
response_data = settings.CHATTERBOT.get_response(input_statement)
return JsonResponse(response_data, status=200)
def get(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def patch(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def delete(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
| from django.views.generic import View
from django.http import JsonResponse
from django.conf import settings
from chatterbot import ChatBot
class ChatterBotView(View):
chatterbot = ChatBot(
settings.CHATTERBOT['NAME'],
storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter',
input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter',
output_adapter='chatterbot.adapters.output.OutputFormatAdapter',
output_format='json'
)
def post(self, request, *args, **kwargs):
input_statement = request.POST.get('text')
response_data = self.chatterbot.get_response(input_statement)
return JsonResponse(response_data, status=200)
def get(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def patch(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def delete(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
| Initialize ChatterBot in django view module instead of settings. | Initialize ChatterBot in django view module instead of settings.
| Python | bsd-3-clause | Reinaesaya/OUIRL-ChatBot,Reinaesaya/OUIRL-ChatBot,vkosuri/ChatterBot,davizucon/ChatterBot,gunthercox/ChatterBot,maclogan/VirtualPenPal,Gustavo6046/ChatterBot | from django.views.generic import View
from django.http import JsonResponse
from django.conf import settings
from chatterbot import ChatBot
class ChatterBotView(View):
chatterbot = ChatBot(
settings.CHATTERBOT['NAME'],
storage_adapter='chatterbot.adapters.storage.DjangoStorageAdapter',
input_adapter='chatterbot.adapters.input.VariableInputTypeAdapter',
output_adapter='chatterbot.adapters.output.OutputFormatAdapter',
output_format='json'
)
def post(self, request, *args, **kwargs):
input_statement = request.POST.get('text')
response_data = self.chatterbot.get_response(input_statement)
return JsonResponse(response_data, status=200)
def get(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def patch(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def delete(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
| Initialize ChatterBot in django view module instead of settings.
from django.views.generic import View
from django.http import JsonResponse
from django.conf import settings
class ChatterBotView(View):
def post(self, request, *args, **kwargs):
input_statement = request.POST.get('text')
response_data = settings.CHATTERBOT.get_response(input_statement)
return JsonResponse(response_data, status=200)
def get(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def patch(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
def delete(self, request, *args, **kwargs):
data = {
'detail': 'You should make a POST request to this endpoint.'
}
# Return a method not allowed response
return JsonResponse(data, status=405)
|
6425c20b536e8952b062ccb8b470ea615ebc0fa2 | conman/routes/migrations/0002_simplify_route_slug_help_text.py | conman/routes/migrations/0002_simplify_route_slug_help_text.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('routes', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='route',
name='slug',
field=models.SlugField(max_length=255, help_text='The url fragment at this point in the Route hierarchy.', default=''),
),
]
| Add missing migration to routes app | Add missing migration to routes app
| Python | bsd-2-clause | meshy/django-conman,Ian-Foote/django-conman,meshy/django-conman | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('routes', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='route',
name='slug',
field=models.SlugField(max_length=255, help_text='The url fragment at this point in the Route hierarchy.', default=''),
),
]
| Add missing migration to routes app
|
|
44b311eade20016f613311a98d666b2463826ad1 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='[email protected]',
version='1.0.3',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.21',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
],
extras_require={
'testing': [
'mock>=2.0,<2.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='[email protected]',
version='1.0.3',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.22',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
],
extras_require={
'testing': [
'mock>=2.0,<2.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| Update requests requirement from <2.21,>=2.4.2 to >=2.4.2,<2.22 | Update requests requirement from <2.21,>=2.4.2 to >=2.4.2,<2.22
Updates the requirements on [requests](https://github.com/requests/requests) to permit the latest version.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/requests/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/commits/v2.21.0)
Signed-off-by: dependabot[bot] <[email protected]> | Python | apache-2.0 | zooniverse/panoptes-python-client | from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='[email protected]',
version='1.0.3',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.22',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
],
extras_require={
'testing': [
'mock>=2.0,<2.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
| Update requests requirement from <2.21,>=2.4.2 to >=2.4.2,<2.22
Updates the requirements on [requests](https://github.com/requests/requests) to permit the latest version.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/requests/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/commits/v2.21.0)
Signed-off-by: dependabot[bot] <[email protected]>
from setuptools import setup, find_packages
setup(
name='panoptes_client',
url='https://github.com/zooniverse/panoptes-python-client',
author='Adam McMaster',
author_email='[email protected]',
version='1.0.3',
packages=find_packages(),
include_package_data=True,
install_requires=[
'requests>=2.4.2,<2.21',
'future>=0.16,<0.18',
'python-magic>=0.4,<0.5',
'redo>=1.7',
],
extras_require={
'testing': [
'mock>=2.0,<2.1',
],
'docs': [
'sphinx',
],
':python_version == "2.7"': ['futures'],
}
)
|
5e2111a5ccc0bcbe7b9af4fec09b9b46eb03ebd3 | GenNowPlayingMovieID.py | GenNowPlayingMovieID.py | #!/usr/bin/python
#coding: utf-8
import requests
import re
if __name__=="__main__":
page = requests.get('https://movie.douban.com/nowplaying/beijing/')
content=page.text.encode("utf-8")
pattern=re.compile(r'(?<=id=")\d+(?="\n)')
result=pattern.findall(content)
for iterm in result:
print iterm
| #!/usr/bin/python
#coding: utf-8
import requests
import re
import time
from time import gmtime, strftime
class GenNowPlayingID(object):
"""docstring for ClassName"""
def __init__(self):
#super(ClassName, self).__init__()
# self.arg = arg
pass
def GenNowPlayingIdList(self):
page = requests.get('https://movie.douban.com/nowplaying/beijing/')
content=page.text.encode("utf-8")
pattern=re.compile(r'(?<=id=")\d+(?="\n)')
result=pattern.findall(content)
currentTime = strftime("%Y%m%d_%H:%M:%S", time.localtime(time.time()))
print currentTime
filename = './nowplaying_id/' + currentTime + '.id'
fp = open(filename,'w')
for iterm in result:
fp.write(iterm+"\n")
fp.close()
if __name__=="__main__":
genNowPlayingID = GenNowPlayingID()
genNowPlayingID.GenNowPlayingIdList()
| Write the nowplaying movie id to file | Write the nowplaying movie id to file
| Python | apache-2.0 | ModernKings/MKMovieCenter,ModernKings/MKMovieCenter,ModernKings/MKMovieCenter | #!/usr/bin/python
#coding: utf-8
import requests
import re
import time
from time import gmtime, strftime
class GenNowPlayingID(object):
"""docstring for ClassName"""
def __init__(self):
#super(ClassName, self).__init__()
# self.arg = arg
pass
def GenNowPlayingIdList(self):
page = requests.get('https://movie.douban.com/nowplaying/beijing/')
content=page.text.encode("utf-8")
pattern=re.compile(r'(?<=id=")\d+(?="\n)')
result=pattern.findall(content)
currentTime = strftime("%Y%m%d_%H:%M:%S", time.localtime(time.time()))
print currentTime
filename = './nowplaying_id/' + currentTime + '.id'
fp = open(filename,'w')
for iterm in result:
fp.write(iterm+"\n")
fp.close()
if __name__=="__main__":
genNowPlayingID = GenNowPlayingID()
genNowPlayingID.GenNowPlayingIdList()
| Write the nowplaying movie id to file
#!/usr/bin/python
#coding: utf-8
import requests
import re
if __name__=="__main__":
page = requests.get('https://movie.douban.com/nowplaying/beijing/')
content=page.text.encode("utf-8")
pattern=re.compile(r'(?<=id=")\d+(?="\n)')
result=pattern.findall(content)
for iterm in result:
print iterm
|
631665a8aeee54d5094480ddf4140a61dce4a960 | ostinato/blog/apps.py | ostinato/blog/apps.py | from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
| from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
| Correct app label of ostinato_blog | Correct app label of ostinato_blog
| Python | mit | andrewebdev/django-ostinato,andrewebdev/django-ostinato,andrewebdev/django-ostinato | from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
| Correct app label of ostinato_blog
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
|
a7ba6ece76e768e642a6ed264791e3987f7c7629 | apps/user_app/forms.py | apps/user_app/forms.py | from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
| from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
| Implement validation to the username field. | Implement validation to the username field.
| Python | mit | pedrolinhares/po-po-modoro,pedrolinhares/po-po-modoro | from django import forms
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
def isValidUserName(username):
try:
User.objects.get(username=username)
except User.DoesNotExist:
return
raise ValidationError('The username "%s" is already taken.' % username)
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True, validators=[isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
| Implement validation to the username field.
from django import forms
from django.core import validators
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
class RegistrationForm(UserCreationForm):
username = forms.CharField(label='username',
max_length=30,
required=True,) #validators=[self.isValidUserName])
class Meta:
model = User
fields = ('username','first_name', 'last_name', 'email',)
# def isValidUserName(self, field_data, all_data):
# try:
# User.objects.get(username=field_data)
# except User.DoesNotExist:
# return
# raise validators.ValidationError('The username "%s" is already taken.' % field_data)
def save(self, commit=True):
new_user = super(RegistrationForm, self).save(commit=False)
new_user.is_active = False
if commit:
new_user.save()
return new_user
|
b0dbd7029f003538ddef9f3a5f8035f8691bf4d7 | shuup/core/utils/shops.py | shuup/core/utils/shops.py | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.core.models import Shop
def get_shop_from_host(host):
"""
Try to find a shop that matches a `host`
e.g: shop.domain.com, domain.com, localhost:8000
:type host str
"""
shop = Shop.objects.filter(domain=host).first()
if not shop:
subdomain = host.split(".")[0]
shop = Shop.objects.filter(domain=subdomain).first()
return shop
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.core.models import Shop
def get_shop_from_host(host):
"""
Try to find a shop that matches a `host`
e.g: shop.domain.com, domain.com, localhost:8000
:type host str
"""
shop = Shop.objects.filter(domain=host).first()
if not shop and ":" in host:
shop = Shop.objects.filter(domain=host.rsplit(":")[0]).first()
if not shop:
subdomain = host.split(".")[0]
shop = Shop.objects.filter(domain=subdomain).first()
return shop
| Make handle ports better when choosing shop | Make handle ports better when choosing shop
Refs EE-235
| Python | agpl-3.0 | shoopio/shoop,shoopio/shoop,shoopio/shoop | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.core.models import Shop
def get_shop_from_host(host):
"""
Try to find a shop that matches a `host`
e.g: shop.domain.com, domain.com, localhost:8000
:type host str
"""
shop = Shop.objects.filter(domain=host).first()
if not shop and ":" in host:
shop = Shop.objects.filter(domain=host.rsplit(":")[0]).first()
if not shop:
subdomain = host.split(".")[0]
shop = Shop.objects.filter(domain=subdomain).first()
return shop
| Make handle ports better when choosing shop
Refs EE-235
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.core.models import Shop
def get_shop_from_host(host):
"""
Try to find a shop that matches a `host`
e.g: shop.domain.com, domain.com, localhost:8000
:type host str
"""
shop = Shop.objects.filter(domain=host).first()
if not shop:
subdomain = host.split(".")[0]
shop = Shop.objects.filter(domain=subdomain).first()
return shop
|
5945b27aa6b5ae43470738dd6638ffa4617f7be1 | poradnia/users/migrations/0014_auto_20170317_1927.py | poradnia/users/migrations/0014_auto_20170317_1927.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
| Fix backward compatibility of migrations | Fix backward compatibility of migrations
| Python | mit | watchdogpolska/poradnia,rwakulszowa/poradnia,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
from django.db import migrations, models
try:
import django.contrib.auth.validators
extra_kwargs = {'validators': [django.contrib.auth.validators.ASCIIUsernameValidator()]}
except ImportError:
extra_kwargs = {}
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'},
help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.',
max_length=150, unique=True, verbose_name='username', **extra_kwargs),
),
]
| Fix backward compatibility of migrations
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-17 18:27
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0013_profile_event_reminder_time'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.ASCIIUsernameValidator()], verbose_name='username'),
),
]
|
2f26197d10a1c7cfc010074576c7e1a2c2a31e78 | data_structures/bitorrent/torrent.py | data_structures/bitorrent/torrent.py | import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| Use __getitem__ to improve readbility | Use __getitem__ to improve readbility
| Python | apache-2.0 | vtemian/university_projects,vtemian/university_projects,vtemian/university_projects | import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
def __getitem__(self, item):
return self.decoded[item]
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
| Use __getitem__ to improve readbility
import hashlib
import urllib
import bencode
class Torrent(object):
def __init__(self, path):
self.encoded = self._get_meta(path)
self.decoded = bencode.bdecode(self.encoded)
def _get_meta(self, path):
with open(path) as f:
return f.read()
@property
def hash(self):
info_hash = hashlib.sha1(bencode.bencode(self.decoded['info'])).digest()
return urllib.quote(info_hash)
@property
def urls(self):
urls = [self.decoded['announce']]
urls += [announce[0] for announce in self.decoded['announce-list']]
return urls
|
3e6f835a88183182b6ebba25c61666735a69fc81 | tests/vaultshell.py | tests/vaultshell.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
class VaultShellTests(unittest.TestCase):
def test_basic(self):
print "test basic. Pass"
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import vault_shell.vault_commandhelper as VaultHelper
class VaultShellTests(unittest.TestCase):
def test_basic(self):
print "test basic. Pass"
vaulthelper = VaultHelper.VaultCommandHelper()
self.failUnless(vaulthelper is not None)
def test_execute_vault_commands(self):
vaulthelper = VaultHelper.VaultCommandHelper()
output = vaulthelper.execute_vault_commands(['vault'])
self.failUnless(output is not None)
def test_get_commandkey_from_cmdlist(self):
vaulthelper = VaultHelper.VaultCommandHelper()
cmdkey = vaulthelper.get_commandkey_from_cmdlist(["token-create"])
self.assertEqual(cmdkey,
"vault_token-create",
msg="cmdkey did not match")
| Add more tests for the vault commandhelper | Add more tests for the vault commandhelper
| Python | apache-2.0 | bdastur/vault-shell | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import vault_shell.vault_commandhelper as VaultHelper
class VaultShellTests(unittest.TestCase):
def test_basic(self):
print "test basic. Pass"
vaulthelper = VaultHelper.VaultCommandHelper()
self.failUnless(vaulthelper is not None)
def test_execute_vault_commands(self):
vaulthelper = VaultHelper.VaultCommandHelper()
output = vaulthelper.execute_vault_commands(['vault'])
self.failUnless(output is not None)
def test_get_commandkey_from_cmdlist(self):
vaulthelper = VaultHelper.VaultCommandHelper()
cmdkey = vaulthelper.get_commandkey_from_cmdlist(["token-create"])
self.assertEqual(cmdkey,
"vault_token-create",
msg="cmdkey did not match")
| Add more tests for the vault commandhelper
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
class VaultShellTests(unittest.TestCase):
def test_basic(self):
print "test basic. Pass"
|
8605b07f2f5951f8a0b85d3d77baa1758723fb64 | auth0/v2/authentication/users.py | auth0/v2/authentication/users.py | from .base import AuthenticationBase
class Users(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
| from .base import AuthenticationBase
class Users(AuthenticationBase):
"""Userinfo related endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
"""Returns the user information based on the Auth0 access token.
Args:
access_token (str): Auth0 access token (obtained during login).
Returns:
The user profile.
"""
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
"""Returns user profile based on the user's jwt
Validates a JSON Web Token (signature and expiration) and returns the
user information associated with the user id (sub property) of
the token.
Args:
jwt (str): User's jwt
Returns:
The user profile.
"""
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
| Add docstrings to Users class | Add docstrings to Users class
| Python | mit | auth0/auth0-python,auth0/auth0-python | from .base import AuthenticationBase
class Users(AuthenticationBase):
"""Userinfo related endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
"""Returns the user information based on the Auth0 access token.
Args:
access_token (str): Auth0 access token (obtained during login).
Returns:
The user profile.
"""
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
"""Returns user profile based on the user's jwt
Validates a JSON Web Token (signature and expiration) and returns the
user information associated with the user id (sub property) of
the token.
Args:
jwt (str): User's jwt
Returns:
The user profile.
"""
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
| Add docstrings to Users class
from .base import AuthenticationBase
class Users(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def userinfo(self, access_token):
return self.get(
url='https://%s/userinfo' % self.domain,
headers={'Authorization': 'Bearer %s' % access_token}
)
def tokeninfo(self, jwt):
return self.post(
url='https://%s/tokeninfo' % self.domain,
data={'id_token': jwt},
headers={'Content-Type: application/json'}
)
|
c7e65db27da59ddf221d1720362434581ef30311 | test/unit/locale/test_locale.py | test/unit/locale/test_locale.py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
| #!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
| Make test_translations test our tree | Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24
| Python | apache-2.0 | swiftstack/swift,rackerlabs/swift,zackmdavis/swift,williamthegrey/swift,eatbyte/Swift,matthewoliver/swift,Seagate/swift,anishnarang/gswift,clayg/swift,shibaniahegde/OpenStak_swift,openstack/swift,prashanthpai/swift,matthewoliver/swift,psachin/swift,nadeemsyed/swift,AfonsoFGarcia/swift,prashanthpai/swift,smerritt/swift,levythu/swift,hbhdytf/mac,mjzmjz/swift,Khushbu27/Tutorial,redhat-openstack/swift,Seagate/swift,takeshineshiro/swift,notmyname/swift,dpgoetz/swift,nadeemsyed/swift,matthewoliver/swift,tipabu/swift,tipabu/swift,bkolli/swift,gold3bear/swift,mjwtom/swift,swiftstack/swift,IPVL/swift-kilo,Em-Pan/swift,hbhdytf/mac,scality/ScalitySproxydSwift,thiagodasilva/swift,Khushbu27/Tutorial,xiaoguoai/ec-dev-swift,nadeemsyed/swift,williamthegrey/swift,openstack/swift,sarvesh-ranjan/swift,anishnarang/gswift,smerritt/swift,hurricanerix/swift,mjwtom/swift,bradleypj823/swift,dpgoetz/swift,eatbyte/Swift,Akanoa/swift,clayg/swift,gold3bear/swift,psachin/swift,swiftstack/swift,hbhdytf/mac2,openstack/swift,psachin/swift,wenhuizhang/swift,hurricanerix/swift,hbhdytf/mac2,wenhuizhang/swift,aerwin3/swift,maginatics/swift,NeCTAR-RC/swift,bkolli/swift,sarvesh-ranjan/swift,bradleypj823/swift,daasbank/swift,revoer/keystone-8.0.0,smerritt/swift,notmyname/swift,shibaniahegde/OpenStak_swift,NeCTAR-RC/swift,Em-Pan/swift,psachin/swift,openstack/swift,thiagodasilva/swift,notmyname/swift,iostackproject/IO-Bandwidth-Differentiation,maginatics/swift,tipabu/swift,larsbutler/swift,hurricanerix/swift,bouncestorage/swift,redbo/swift,AfonsoFGarcia/swift,notmyname/swift,clayg/swift,revoer/keystone-8.0.0,iostackproject/IO-Bandwidth-Differentiation,bouncestorage/swift,xiaoguoai/ec-dev-swift,hbhdytf/mac2,redhat-openstack/swift,hbhdytf/mac2,dencaval/swift,levythu/swift,nadeemsyed/swift,takeshineshiro/swift,mjzmjz/swift,scality/ScalitySproxydSwift,larsbutler/swift,zackmdavis/swift,dencaval/swift,daasbank/swift,matthewoliver/swift,aerwin3/swift,rackerlabs/swift,IPVL/swift-kilo,smerritt/swift,Akanoa/swift,clayg/swift,redbo/swift,tipabu/swift,hurricanerix/swift | #!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
| Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
|
73a9ba740d446e19c0428ffc29bf5bb5b033d7fe | PynamoDB/persistence_engine.py | PynamoDB/persistence_engine.py | """
persistence_engine.py
~~~~~~~~~~~~
Implements put, get, delete methods for PersistenceStage. Using an actual persistence engine (i.e. MySQL, BDB), one would implement the three methods themselves.
"""
class PersistenceEngine(object):
""" Basic persistence engine implemented as a regular Python dict."""
def __init__(self):
self._persistence = dict()
def keys():
return self._persistence.keys()
def put(self, key, value):
""" Put key value pair into storage"""
self._persistence[key] = value
def get(self, key):
""" Get key's value """
return self._persistence[key]
def delete(self, key):
""" Delete key value pair """
del self._persistence[key]
| """
persistence_engine.py
~~~~~~~~~~~~
Implements put, get, delete methods for PersistenceStage. Using an actual persistence engine (i.e. MySQL, BDB), one would implement the three methods themselves.
"""
class PersistenceEngine(object):
""" Basic persistence engine implemented as a regular Python dict."""
def __init__(self):
self._persistence = dict()
def keys():
return self._persistence.keys()
def put(self, key, value, timestamp):
""" Put key value pair into storage"""
self._persistence[key] = {'value': value, 'timestamp': timestamp}
return True
def get(self, key):
""" Get key's value """
return self._persistence[key]['value'], self._persistence[key]['timestamp']
def delete(self, key):
""" Delete key value pair """
del self._persistence[key]
return True
| Remove use of timestamped value. | Remove use of timestamped value.
Thought it was dumb/inelegant to have a Value() object floating around
with value and timestamp . Instead, now all messages are sent around
as json dicts.
The request enters the system as json, flows through to an endpoint
where it becomes a reply message, then flows back to the client.
| Python | mit | samuelwu90/PynamoDB | """
persistence_engine.py
~~~~~~~~~~~~
Implements put, get, delete methods for PersistenceStage. Using an actual persistence engine (i.e. MySQL, BDB), one would implement the three methods themselves.
"""
class PersistenceEngine(object):
""" Basic persistence engine implemented as a regular Python dict."""
def __init__(self):
self._persistence = dict()
def keys():
return self._persistence.keys()
def put(self, key, value, timestamp):
""" Put key value pair into storage"""
self._persistence[key] = {'value': value, 'timestamp': timestamp}
return True
def get(self, key):
""" Get key's value """
return self._persistence[key]['value'], self._persistence[key]['timestamp']
def delete(self, key):
""" Delete key value pair """
del self._persistence[key]
return True
| Remove use of timestamped value.
Thought it was dumb/inelegant to have a Value() object floating around
with value and timestamp . Instead, now all messages are sent around
as json dicts.
The request enters the system as json, flows through to an endpoint
where it becomes a reply message, then flows back to the client.
"""
persistence_engine.py
~~~~~~~~~~~~
Implements put, get, delete methods for PersistenceStage. Using an actual persistence engine (i.e. MySQL, BDB), one would implement the three methods themselves.
"""
class PersistenceEngine(object):
""" Basic persistence engine implemented as a regular Python dict."""
def __init__(self):
self._persistence = dict()
def keys():
return self._persistence.keys()
def put(self, key, value):
""" Put key value pair into storage"""
self._persistence[key] = value
def get(self, key):
""" Get key's value """
return self._persistence[key]
def delete(self, key):
""" Delete key value pair """
del self._persistence[key]
|
85f8bb3e46c5c79af6ba1e246ad5938642feadcc | test/test_i18n.py | test/test_i18n.py | # -*- coding: utf8 -*-
###
# Copyright (c) 2012, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
from supybot.i18n import PluginInternationalization, internationalizeDocstring
import supybot.conf as conf
import supybot.i18n as i18n
msg_en = 'The operation succeeded.'
msg_fr = 'Opération effectuée avec succès.'
_ = PluginInternationalization()
@internationalizeDocstring
def foo():
'The operation succeeded.'
pass
class I18nTestCase(SupyTestCase):
def testPluginInternationalization(self):
self.assertEqual(_(msg_en), msg_en)
with conf.supybot.language.context('fr'):
self.assertEqual(_(msg_en), msg_fr)
conf.supybot.language.setValue('en')
self.assertEqual(_(msg_en), msg_en)
def testDocstring(self):
self.assertEqual(foo.__doc__, msg_en)
with conf.supybot.language.context('fr'):
self.assertEqual(foo.__doc__, msg_fr)
i18n.reloadLocales()
self.assertEqual(foo.__doc__, msg_en)
| Add unit tests for i18n. | Add unit tests for i18n.
| Python | bsd-3-clause | Ban3/Limnoria,Ban3/Limnoria,ProgVal/Limnoria-test,ProgVal/Limnoria-test | # -*- coding: utf8 -*-
###
# Copyright (c) 2012, Valentin Lorentz
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
from supybot.i18n import PluginInternationalization, internationalizeDocstring
import supybot.conf as conf
import supybot.i18n as i18n
msg_en = 'The operation succeeded.'
msg_fr = 'Opération effectuée avec succès.'
_ = PluginInternationalization()
@internationalizeDocstring
def foo():
'The operation succeeded.'
pass
class I18nTestCase(SupyTestCase):
def testPluginInternationalization(self):
self.assertEqual(_(msg_en), msg_en)
with conf.supybot.language.context('fr'):
self.assertEqual(_(msg_en), msg_fr)
conf.supybot.language.setValue('en')
self.assertEqual(_(msg_en), msg_en)
def testDocstring(self):
self.assertEqual(foo.__doc__, msg_en)
with conf.supybot.language.context('fr'):
self.assertEqual(foo.__doc__, msg_fr)
i18n.reloadLocales()
self.assertEqual(foo.__doc__, msg_en)
| Add unit tests for i18n.
|
|
c28127941ed88fdedc084c6227da3b921a5e15ab | jsk_apc2015_common/scripts/test_bof_object_recognition.py | jsk_apc2015_common/scripts/test_bof_object_recognition.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import cPickle as pickle
import gzip
import sys
import cv2
from imagesift import get_sift_keypoints
import numpy as np
from sklearn.datasets import load_files
from sklearn.metrics import accuracy_score, classification_report
from sklearn.preprocessing import normalize
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
parser.add_argument('bof_path')
parser.add_argument('clf_path')
args = parser.parse_args()
container_path = args.container_path
bof_path = args.bof_path
clf_path = args.clf_path
bunch_files = load_files(container_path=container_path,
description='images',
shuffle=False,
load_content=False)
with gzip.open(bof_path, 'rb') as f:
bof = pickle.load(f)
with gzip.open(clf_path, 'rb') as f:
clf = pickle.load(f)
descs = []
for fname in bunch_files.filenames:
img = cv2.imread(fname, 0)
_, desc = get_sift_keypoints(img)
descs.append(desc)
X = bof.transform(descs)
normalize(X, copy=False)
y_pred = clf.predict(X)
y = bunch_files.target
print accuracy_score(y, y_pred)
print classification_report(y, y_pred, target_names=clf.target_names_)
| Add bof object recognition test script | [jsk_2015_apc_common] Add bof object recognition test script
| Python | bsd-3-clause | pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import cPickle as pickle
import gzip
import sys
import cv2
from imagesift import get_sift_keypoints
import numpy as np
from sklearn.datasets import load_files
from sklearn.metrics import accuracy_score, classification_report
from sklearn.preprocessing import normalize
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
parser.add_argument('bof_path')
parser.add_argument('clf_path')
args = parser.parse_args()
container_path = args.container_path
bof_path = args.bof_path
clf_path = args.clf_path
bunch_files = load_files(container_path=container_path,
description='images',
shuffle=False,
load_content=False)
with gzip.open(bof_path, 'rb') as f:
bof = pickle.load(f)
with gzip.open(clf_path, 'rb') as f:
clf = pickle.load(f)
descs = []
for fname in bunch_files.filenames:
img = cv2.imread(fname, 0)
_, desc = get_sift_keypoints(img)
descs.append(desc)
X = bof.transform(descs)
normalize(X, copy=False)
y_pred = clf.predict(X)
y = bunch_files.target
print accuracy_score(y, y_pred)
print classification_report(y, y_pred, target_names=clf.target_names_)
| [jsk_2015_apc_common] Add bof object recognition test script
|
|
a9c6e045631103fe8508fd1b60d6076c05092fe1 | tests/examples/customnode/nodes.py | tests/examples/customnode/nodes.py | from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
| from viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
| Add undo to custom node sample | Add undo to custom node sample
| Python | agpl-3.0 | ribeiro-ucl/viewflow,codingjoe/viewflow,pombredanne/viewflow,pombredanne/viewflow,codingjoe/viewflow,codingjoe/viewflow,viewflow/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow,viewflow/viewflow,ribeiro-ucl/viewflow | from viewflow.activation import AbstractGateActivation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin,
base.UndoViewMixin,
base.CancelViewMixin,
base.PerformViewMixin,
base.DetailsViewMixin,
base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
| Add undo to custom node sample
from viewflow.activation import AbstractGateActivation, Activation
from viewflow.flow import base
from viewflow.token import Token
class DynamicSplitActivation(AbstractGateActivation):
def calculate_next(self):
self._split_count = self.flow_task._task_count_callback(self.process)
@Activation.status.super()
def activate_next(self):
if self._split_count:
token_source = Token.split_token_source(self.task.token, self.task.pk)
for _ in range(self._split_count):
self.flow_task._next.activate(prev_activation=self, token=next(token_source))
class DynamicSplit(base.NextNodeMixin, base.DetailsViewMixin, base.Gateway):
"""
Activates several outgoing task instances depends on callback value
Example::
spit_on_decision = flow.DynamicSplit(lambda p: 4) \\
.Next(this.make_decision)
make_decision = flow.View(MyView) \\
.Next(this.join_on_decision)
join_on_decision = flow.Join() \\
.Next(this.end)
"""
task_type = 'SPLIT'
activation_cls = DynamicSplitActivation
def __init__(self, callback):
super(DynamicSplit, self).__init__()
self._task_count_callback = callback
|
75a8d2ed6a3fa03ca132388182b1e7876fb6413e | setup.py | setup.py | #!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.36.0",
"flask",
"httpretty==0.8.10",
"requests",
"xmltodict",
"six",
"werkzeug",
"sure",
"freezegun"
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.28',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
| #!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.36.0",
"flask",
"httpretty==0.8.10",
"requests",
"xmltodict",
"six",
"werkzeug",
"sure",
"freezegun"
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.27',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
| Revert "Bumping the version reflecting the bugfix" | Revert "Bumping the version reflecting the bugfix"
This reverts commit 7f3daf4755aff19d04acf865df39f7d188655b15.
| Python | apache-2.0 | okomestudio/moto,spulec/moto,spulec/moto,heddle317/moto,Affirm/moto,okomestudio/moto,whummer/moto,2rs2ts/moto,Affirm/moto,ZuluPro/moto,rocky4570/moto,Brett55/moto,spulec/moto,gjtempleton/moto,2rs2ts/moto,okomestudio/moto,2rs2ts/moto,gjtempleton/moto,Brett55/moto,heddle317/moto,botify-labs/moto,spulec/moto,2rs2ts/moto,heddle317/moto,whummer/moto,okomestudio/moto,gjtempleton/moto,rocky4570/moto,kefo/moto,dbfr3qs/moto,Brett55/moto,ZuluPro/moto,silveregg/moto,botify-labs/moto,whummer/moto,dbfr3qs/moto,dbfr3qs/moto,whummer/moto,spulec/moto,heddle317/moto,Brett55/moto,ZuluPro/moto,rocky4570/moto,Affirm/moto,Affirm/moto,dbfr3qs/moto,william-richard/moto,william-richard/moto,braintreeps/moto,botify-labs/moto,kefo/moto,Affirm/moto,botify-labs/moto,william-richard/moto,whummer/moto,kefo/moto,gjtempleton/moto,william-richard/moto,botify-labs/moto,Brett55/moto,kefo/moto,ZuluPro/moto,rocky4570/moto,dbfr3qs/moto,william-richard/moto,2rs2ts/moto,spulec/moto,kefo/moto,whummer/moto,rocky4570/moto,heddle317/moto,william-richard/moto,ZuluPro/moto,gjtempleton/moto,ZuluPro/moto,okomestudio/moto,Affirm/moto,botify-labs/moto,Brett55/moto,dbfr3qs/moto,rocky4570/moto,okomestudio/moto | #!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.36.0",
"flask",
"httpretty==0.8.10",
"requests",
"xmltodict",
"six",
"werkzeug",
"sure",
"freezegun"
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.27',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
| Revert "Bumping the version reflecting the bugfix"
This reverts commit 7f3daf4755aff19d04acf865df39f7d188655b15.
#!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.36.0",
"flask",
"httpretty==0.8.10",
"requests",
"xmltodict",
"six",
"werkzeug",
"sure",
"freezegun"
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.28',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
|
681cc0a4160373fe82de59946b52e0e21611af84 | linkLister.py | linkLister.py | import requests
import re
url = raw_input("Enter URL with http or https prefix : " )
print url
website= requests.get(url)
html = website.text
print html
linklist = re.findall('"((http|ftp)s?://.*?)"',html)
print linklist
for link in linklist:
print link[0]
| Print out all links on a page | Print out all links on a page
| Python | mit | NilanjanaLodh/PyScripts,NilanjanaLodh/PyScripts | import requests
import re
url = raw_input("Enter URL with http or https prefix : " )
print url
website= requests.get(url)
html = website.text
print html
linklist = re.findall('"((http|ftp)s?://.*?)"',html)
print linklist
for link in linklist:
print link[0]
| Print out all links on a page
|
|
52610add5ae887dcbc06f1435fdff34f182d78c9 | go/campaigns/forms.py | go/campaigns/forms.py | from django import forms
class CampaignGeneralForm(forms.Form):
TYPE_CHOICES = (
('', 'Select campaign type'),
('B', 'Bulk Message'),
('C', 'Conversation'),
)
name = forms.CharField(label="Campaign name", max_length=100)
type = forms.ChoiceField(label="Which kind of campaign would you like?",
widget=forms.Select(), choices=TYPE_CHOICES)
class CampaignConfigurationForm(forms.Form):
COUNTRY_CHOICES = (
('.za', 'South Africa'),
)
CHANNEL_CHOICES = (
('ussd', 'USSD'),
)
# more than likely a many to many field, or something similair in the riak
# world. Whom I kidding, this is probably just a modelform?
countries = forms.MultipleChoiceField(label="Destinations",
widget=forms.Select(),
choices=COUNTRY_CHOICES)
channels = forms.MultipleChoiceField(label="Channels",
widget=forms.Select(),
choices=CHANNEL_CHOICES)
keyword = forms.CharField(label="Keyword", max_length=100)
class CampaignBulkMessageForm(forms.Form):
message = forms.CharField(label="Bulk message text", widget=forms.Textarea)
| from django import forms
class CampaignGeneralForm(forms.Form):
TYPE_CHOICES = (
('', 'Select campaign type'),
('B', 'Bulk Message'),
('D', 'Dialogue'),
)
name = forms.CharField(label="Campaign name", max_length=100)
type = forms.ChoiceField(label="Which kind of campaign would you like?",
widget=forms.Select(), choices=TYPE_CHOICES)
class CampaignConfigurationForm(forms.Form):
COUNTRY_CHOICES = (
('.za', 'South Africa'),
)
CHANNEL_CHOICES = (
('ussd', 'USSD'),
)
# more than likely a many to many field, or something similair in the riak
# world. Whom I kidding, this is probably just a modelform?
countries = forms.MultipleChoiceField(label="Destinations",
widget=forms.Select(),
choices=COUNTRY_CHOICES)
channels = forms.MultipleChoiceField(label="Channels",
widget=forms.Select(),
choices=CHANNEL_CHOICES)
keyword = forms.CharField(label="Keyword", max_length=100)
class CampaignBulkMessageForm(forms.Form):
message = forms.CharField(label="Bulk message text", widget=forms.Textarea)
| Use dialogue terminology in menu | Use dialogue terminology in menu
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | from django import forms
class CampaignGeneralForm(forms.Form):
TYPE_CHOICES = (
('', 'Select campaign type'),
('B', 'Bulk Message'),
('D', 'Dialogue'),
)
name = forms.CharField(label="Campaign name", max_length=100)
type = forms.ChoiceField(label="Which kind of campaign would you like?",
widget=forms.Select(), choices=TYPE_CHOICES)
class CampaignConfigurationForm(forms.Form):
COUNTRY_CHOICES = (
('.za', 'South Africa'),
)
CHANNEL_CHOICES = (
('ussd', 'USSD'),
)
# more than likely a many to many field, or something similair in the riak
# world. Whom I kidding, this is probably just a modelform?
countries = forms.MultipleChoiceField(label="Destinations",
widget=forms.Select(),
choices=COUNTRY_CHOICES)
channels = forms.MultipleChoiceField(label="Channels",
widget=forms.Select(),
choices=CHANNEL_CHOICES)
keyword = forms.CharField(label="Keyword", max_length=100)
class CampaignBulkMessageForm(forms.Form):
message = forms.CharField(label="Bulk message text", widget=forms.Textarea)
| Use dialogue terminology in menu
from django import forms
class CampaignGeneralForm(forms.Form):
TYPE_CHOICES = (
('', 'Select campaign type'),
('B', 'Bulk Message'),
('C', 'Conversation'),
)
name = forms.CharField(label="Campaign name", max_length=100)
type = forms.ChoiceField(label="Which kind of campaign would you like?",
widget=forms.Select(), choices=TYPE_CHOICES)
class CampaignConfigurationForm(forms.Form):
COUNTRY_CHOICES = (
('.za', 'South Africa'),
)
CHANNEL_CHOICES = (
('ussd', 'USSD'),
)
# more than likely a many to many field, or something similair in the riak
# world. Whom I kidding, this is probably just a modelform?
countries = forms.MultipleChoiceField(label="Destinations",
widget=forms.Select(),
choices=COUNTRY_CHOICES)
channels = forms.MultipleChoiceField(label="Channels",
widget=forms.Select(),
choices=CHANNEL_CHOICES)
keyword = forms.CharField(label="Keyword", max_length=100)
class CampaignBulkMessageForm(forms.Form):
message = forms.CharField(label="Bulk message text", widget=forms.Textarea)
|
fd47b9235a95146fc0ccbaf10f4b5c2c217fe401 | libsrc/test/TestXdmfPythonArray.py | libsrc/test/TestXdmfPythonArray.py | import Xdmf
from Xdmf import *
if __name__ == '__main__':
array = Xdmf.XdmfArray()
array.SetNumberType(Xdmf.XDMF_INT64_TYPE)
assert(array.GetNumberType() == Xdmf.XDMF_INT64_TYPE)
array.SetShapeFromString("3 3")
assert(array.GetShapeAsString() == "3 3")
assert(array.GetNumberOfElements() == 9)
toWrite = [0,1,2,3,4,500,5000,500000,1000000000000]
i = 0;
for element in toWrite:
array.SetValueFromInt64(i,element)
i += 1
i=0
for element in toWrite:
assert(array.GetValueAsInt64(i) == element)
i += 1
assert (array.GetMaxAsInt64() == 1000000000000)
assert (array.GetMinAsInt64() == 0)
| Add Xdmf Python Test that writes values to an XdmfArray | ENH: Add Xdmf Python Test that writes values to an XdmfArray
| Python | bsd-3-clause | cjh1/Xdmf2,cjh1/Xdmf2,cjh1/Xdmf2 | import Xdmf
from Xdmf import *
if __name__ == '__main__':
array = Xdmf.XdmfArray()
array.SetNumberType(Xdmf.XDMF_INT64_TYPE)
assert(array.GetNumberType() == Xdmf.XDMF_INT64_TYPE)
array.SetShapeFromString("3 3")
assert(array.GetShapeAsString() == "3 3")
assert(array.GetNumberOfElements() == 9)
toWrite = [0,1,2,3,4,500,5000,500000,1000000000000]
i = 0;
for element in toWrite:
array.SetValueFromInt64(i,element)
i += 1
i=0
for element in toWrite:
assert(array.GetValueAsInt64(i) == element)
i += 1
assert (array.GetMaxAsInt64() == 1000000000000)
assert (array.GetMinAsInt64() == 0)
| ENH: Add Xdmf Python Test that writes values to an XdmfArray
|
|
00922099d6abb03a0dbcca19781eb586d367eab0 | skimage/measure/__init__.py | skimage/measure/__init__.py | from .find_contours import find_contours
from ._regionprops import regionprops
from .find_contours import find_contours
from ._structural_similarity import ssim
| from .find_contours import find_contours
from ._regionprops import regionprops
from ._structural_similarity import ssim
| Remove double import of find contours. | BUG: Remove double import of find contours.
| Python | bsd-3-clause | robintw/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,SamHames/scikit-image,chintak/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,dpshelio/scikit-image,chintak/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,michaelaye/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,Britefury/scikit-image,dpshelio/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,Hiyorimi/scikit-image,emon10005/scikit-image,emmanuelle/scikits.image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,warmspringwinds/scikit-image,Midafi/scikit-image,youprofit/scikit-image,chintak/scikit-image,newville/scikit-image,Britefury/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,jwiggins/scikit-image,chriscrosscutler/scikit-image,michaelpacer/scikit-image,emmanuelle/scikits.image,juliusbierk/scikit-image,SamHames/scikit-image,robintw/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,Midafi/scikit-image,emmanuelle/scikits.image,vighneshbirodkar/scikit-image,newville/scikit-image,blink1073/scikit-image,michaelpacer/scikit-image,emmanuelle/scikits.image,oew1v07/scikit-image,emon10005/scikit-image,youprofit/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,rjeli/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,keflavich/scikit-image,ClinicalGraphics/scikit-image,GaZ3ll3/scikit-image,SamHames/scikit-image | from .find_contours import find_contours
from ._regionprops import regionprops
from ._structural_similarity import ssim
| BUG: Remove double import of find contours.
from .find_contours import find_contours
from ._regionprops import regionprops
from .find_contours import find_contours
from ._structural_similarity import ssim
|
57a58893a2ba94b174b06e7f5f63478dff1e879e | providers/popularity/netflix.py | providers/popularity/netflix.py | from providers.popularity.provider import PopularityProvider
from utils.torrent_util import remove_bad_torrent_matches, torrent_to_movie
IDENTIFIER = "netflix"
class Provider(PopularityProvider):
def get_popular(self):
country = "se"
url = f"https://www.finder.com/{country}/netflix-movies"
data = self.parse_html(url, 'tbody td[data-title="Title"] b, tbody td[data-title="Year of release"]', cache=False)
movies = [
{
"name": movie,
"is_bad": False,
"year": year,
}
for movie, year in zip(data[::2], data[1::2])
]
return movies
| Add provider for (Swedish) Netflix. | Add provider for (Swedish) Netflix.
Change "country" inside the provider for the movies available in a different country.
| Python | mit | EmilStenstrom/nephele | from providers.popularity.provider import PopularityProvider
from utils.torrent_util import remove_bad_torrent_matches, torrent_to_movie
IDENTIFIER = "netflix"
class Provider(PopularityProvider):
def get_popular(self):
country = "se"
url = f"https://www.finder.com/{country}/netflix-movies"
data = self.parse_html(url, 'tbody td[data-title="Title"] b, tbody td[data-title="Year of release"]', cache=False)
movies = [
{
"name": movie,
"is_bad": False,
"year": year,
}
for movie, year in zip(data[::2], data[1::2])
]
return movies
| Add provider for (Swedish) Netflix.
Change "country" inside the provider for the movies available in a different country.
|
|
318ebb141ebb50010964821145811aa36e46877f | temba/flows/migrations/0030_auto_20150825_1406.py | temba/flows/migrations/0030_auto_20150825_1406.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
| Make modified_on and org no longer be nullable | Make modified_on and org no longer be nullable
| Python | agpl-3.0 | tsotetsi/textily-web,reyrodrigues/EU-SMS,praekelt/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,reyrodrigues/EU-SMS,reyrodrigues/EU-SMS,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,praekelt/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,praekelt/rapidpro,praekelt/rapidpro | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flows', '0029_populate_run_modified_on'),
]
operations = [
migrations.AlterField(
model_name='flowrun',
name='modified_on',
field=models.DateTimeField(help_text='When this flow run was last updated', auto_now=True),
preserve_default=True,
),
migrations.AlterField(
model_name='flowrun',
name='org',
field=models.ForeignKey(related_name='runs', to='orgs.Org', db_index=False),
preserve_default=True,
),
]
| Make modified_on and org no longer be nullable
|
|
cd2b628ca118ffae8090004e845e399110aada21 | disk/datadog_checks/disk/__init__.py | disk/datadog_checks/disk/__init__.py | # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .disk import Disk
__all__ = ['Disk']
| # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__
from .disk import Disk
all = [
'__version__', 'Disk'
]
| Allow Agent to properly pull version info | [Disk] Allow Agent to properly pull version info | Python | bsd-3-clause | DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core | # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__
from .disk import Disk
all = [
'__version__', 'Disk'
]
| [Disk] Allow Agent to properly pull version info
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .disk import Disk
__all__ = ['Disk']
|
7629afde2627457b4f4b19e1542a87e695c1837d | tests/events/test_models.py | tests/events/test_models.py | """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
| """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
| Make sure slug gets updated on date change | Make sure slug gets updated on date change
| Python | mit | FlowFX/reggae-cdmx,FlowFX/reggae-cdmx | """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
| Make sure slug gets updated on date change
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
|
479f1792aabc9220a489445979b48781a8cf7ff9 | tests/pytests/unit/states/test_influxdb_continuous_query.py | tests/pytests/unit/states/test_influxdb_continuous_query.py | import pytest
import salt.modules.influxdbmod as influx_mod
import salt.states.influxdb_continuous_query as influx
from tests.support.mock import create_autospec, patch
@pytest.fixture
def configure_loader_modules():
return {influx: {"__salt__": {}, "__opts__": {"test": False}}}
@pytest.mark.xfail
@pytest.mark.parametrize(
"expected_kwargs",
(
{},
{"something": "extra"},
{"something": "extra", "even": "more"},
{"something": "extra", "still": "more and more and more", "and": "more"},
{
"something": "extra",
"what": "in tarnation",
"do": "you want",
"to": "add here?",
},
),
)
def test_when_present_is_called_it_should_pass_client_args_to_create_module(
expected_kwargs,
):
influx_module = create_autospec(influx_mod)
influx_module.continuous_query_exists.return_value = False
with patch.dict(
influx.__salt__,
{
"influxdb.continuous_query_exists": influx_module.continuous_query_exists,
"influxdb.create_continuous_query": influx_module.create_continuous_query,
},
):
influx.present(
name="foo",
database="fnord",
query="fnord",
resample_time="whatever",
coverage_period="fnord",
**expected_kwargs
)
actual_kwargs = influx_module.create_continuous_query.mock_calls[0].kwargs
assert actual_kwargs == expected_kwargs
| Add tests for influxdb create_continuous_query | Add tests for influxdb create_continuous_query
Currently marked as xfail, since we'll pull the existing changes into
here.
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | import pytest
import salt.modules.influxdbmod as influx_mod
import salt.states.influxdb_continuous_query as influx
from tests.support.mock import create_autospec, patch
@pytest.fixture
def configure_loader_modules():
return {influx: {"__salt__": {}, "__opts__": {"test": False}}}
@pytest.mark.xfail
@pytest.mark.parametrize(
"expected_kwargs",
(
{},
{"something": "extra"},
{"something": "extra", "even": "more"},
{"something": "extra", "still": "more and more and more", "and": "more"},
{
"something": "extra",
"what": "in tarnation",
"do": "you want",
"to": "add here?",
},
),
)
def test_when_present_is_called_it_should_pass_client_args_to_create_module(
expected_kwargs,
):
influx_module = create_autospec(influx_mod)
influx_module.continuous_query_exists.return_value = False
with patch.dict(
influx.__salt__,
{
"influxdb.continuous_query_exists": influx_module.continuous_query_exists,
"influxdb.create_continuous_query": influx_module.create_continuous_query,
},
):
influx.present(
name="foo",
database="fnord",
query="fnord",
resample_time="whatever",
coverage_period="fnord",
**expected_kwargs
)
actual_kwargs = influx_module.create_continuous_query.mock_calls[0].kwargs
assert actual_kwargs == expected_kwargs
| Add tests for influxdb create_continuous_query
Currently marked as xfail, since we'll pull the existing changes into
here.
|
|
3c30166378d37c812cecb505a3d9023b079d24be | app/__init__.py | app/__init__.py | # Gevent needed for sockets
from gevent import monkey
monkey.patch_all()
# Imports
import os
from flask import Flask, render_template
from flask_socketio import SocketIO
import boto3
# Configure app
socketio = SocketIO()
app = Flask(__name__)
app.config.from_object(os.environ["APP_SETTINGS"])
import nltk
try:
nltk.data.find('tokenizers/punkt')
except LookupError:
nltk.download("punkt")
# DB
db = boto3.resource("dynamodb",
region_name=app.config["DYNAMO_REGION"],
endpoint_url=app.config["DYNAMO_DATABASE_URI"])
s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"])
from app import models
models.initialize()
# Initialize the controllers
from app import controllers
# Initialize app w/SocketIO
socketio.init_app(app)
# HTTP error handling
@app.errorhandler(404)
def not_found(error):
return render_template("404.html"), 404
| # Gevent needed for sockets
from gevent import monkey
monkey.patch_all()
# Imports
import os
from flask import Flask, render_template
from flask_socketio import SocketIO
import boto3
# Configure app
socketio = SocketIO()
app = Flask(__name__)
app.config.from_object(os.environ["APP_SETTINGS"])
import nltk
nltk.download("punkt")
# DB
db = boto3.resource("dynamodb",
region_name=app.config["DYNAMO_REGION"],
endpoint_url=app.config["DYNAMO_DATABASE_URI"])
s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"])
from app import models
models.initialize()
# Initialize the controllers
from app import controllers
# Initialize app w/SocketIO
socketio.init_app(app)
# HTTP error handling
@app.errorhandler(404)
def not_found(error):
return render_template("404.html"), 404
| Fix stupid nltk data download thing | Fix stupid nltk data download thing
| Python | mit | PapaCharlie/SteamyReviews,PapaCharlie/SteamyReviews,PapaCharlie/SteamyReviews,PapaCharlie/SteamyReviews | # Gevent needed for sockets
from gevent import monkey
monkey.patch_all()
# Imports
import os
from flask import Flask, render_template
from flask_socketio import SocketIO
import boto3
# Configure app
socketio = SocketIO()
app = Flask(__name__)
app.config.from_object(os.environ["APP_SETTINGS"])
import nltk
nltk.download("punkt")
# DB
db = boto3.resource("dynamodb",
region_name=app.config["DYNAMO_REGION"],
endpoint_url=app.config["DYNAMO_DATABASE_URI"])
s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"])
from app import models
models.initialize()
# Initialize the controllers
from app import controllers
# Initialize app w/SocketIO
socketio.init_app(app)
# HTTP error handling
@app.errorhandler(404)
def not_found(error):
return render_template("404.html"), 404
| Fix stupid nltk data download thing
# Gevent needed for sockets
from gevent import monkey
monkey.patch_all()
# Imports
import os
from flask import Flask, render_template
from flask_socketio import SocketIO
import boto3
# Configure app
socketio = SocketIO()
app = Flask(__name__)
app.config.from_object(os.environ["APP_SETTINGS"])
import nltk
try:
nltk.data.find('tokenizers/punkt')
except LookupError:
nltk.download("punkt")
# DB
db = boto3.resource("dynamodb",
region_name=app.config["DYNAMO_REGION"],
endpoint_url=app.config["DYNAMO_DATABASE_URI"])
s3 = boto3.resource("s3", region_name=app.config["DYNAMO_REGION"])
from app import models
models.initialize()
# Initialize the controllers
from app import controllers
# Initialize app w/SocketIO
socketio.init_app(app)
# HTTP error handling
@app.errorhandler(404)
def not_found(error):
return render_template("404.html"), 404
|
1046157fa2e062f12123e110c82851c2484216be | gallery_plugins/plugin_gfycat.py | gallery_plugins/plugin_gfycat.py | import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| Update gfycat plugin for python3 support | Update gfycat plugin for python3 support
| Python | mit | regosen/gallery_get | import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read().decode("utf8")
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read().decode("utf8")
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
| Update gfycat plugin for python3 support
import re
try:
import urllib.request as urllib
except:
import urllib # Python 2
def title(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
link = 'https://gfycat.com/cajax/get/' + gfyId
respond = urllib.urlopen(link).read()
username = re.findall(r'\"userName\":\"(.+?)\",' ,respond)[0]
return username if username != "anonymous" else "gfycat " + gfyId
def redirect(source):
gfyId = re.findall(r'href=\".*gfycat.com/(\w+).*\">', source)[-1]
respond = urllib.urlopen('https://gfycat.com/cajax/get/' + gfyId).read()
webmurl = re.findall(r'\"webmUrl\":\"(.+?)\",' ,respond)[0]
# delete escape characters
webmurl = webmurl.replace("\\","")
# for some reason we can not connect via https
webmurl = webmurl.replace("https", "http")
return webmurl
same_filename = True
|
389d7e5d131188d5b8a3f9111d9a6a7a96ce8af8 | dmoj/executors/ICK.py | dmoj/executors/ICK.py | from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
| from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
| Make Intercal executor not fail to start at times. | Make Intercal executor not fail to start at times.
| Python | agpl-3.0 | DMOJ/judge,DMOJ/judge,DMOJ/judge | from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
flags = [self.get_command(), '-O', self._code]
if self.problem == self.test_name:
# Do not fail self-test to random compiler bug.
flags.insert(1, '-b')
return flags
| Make Intercal executor not fail to start at times.
from .base_executor import CompiledExecutor
class Executor(CompiledExecutor):
ext = '.i'
name = 'ICK'
command = 'ick'
test_program = '''\
PLEASE DO ,1 <- #1
DO .4 <- #0
DO .5 <- #0
DO COME FROM (30)
DO WRITE IN ,1
DO .1 <- ,1SUB#1
DO (10) NEXT
PLEASE GIVE UP
(20) PLEASE RESUME '?.1$#256'~'#256$#256'
(10) DO (20) NEXT
DO FORGET #1
PLEASE DO .2 <- .4
DO (1000) NEXT
DO .4 <- .3~#255
PLEASE DO .3 <- !3~#15'$!3~#240'
DO .3 <- !3~#15'$!3~#240'
DO .2 <- !3~#15'$!3~#240'
PLEASE DO .1 <- .5
DO (1010) NEXT
DO .5 <- .2
DO ,1SUB#1 <- .3
(30) PLEASE READ OUT ,1
'''
def get_compile_args(self):
return [self.get_command(), '-O', self._code]
|
4dc2d5710f5f34a0611c8d38a84ee3c5ecf79463 | uliweb/contrib/rbac/tags.py | uliweb/contrib/rbac/tags.py | from uliweb.core.template import BaseBlockNode
from uliweb import functions
class PermissionNode(BaseBlockNode):
def __init__(self, name='', content=None):
super(PermissionNode, self).__init__(name, content)
self.nodes = ['if functions.has_permission(request.user, "%s"):\n' % self.name]
def end(self):
self.nodes.append('pass\n')
class RoleNode(PermissionNode):
def __init__(self, name='', content=None):
super(RoleNode, self).__init__(name, content)
self.nodes = ['if functions.has_role(request.user, "%s"):\n' % self.name]
| from uliweb.core.template import BaseBlockNode
from uliweb import functions
class PermissionNode(BaseBlockNode):
def __init__(self, name='', content=None):
super(PermissionNode, self).__init__(name, content)
self.nodes = ['if functions.has_permission(request.user, %s):\n' % self.name]
def end(self):
self.nodes.append('pass\n')
class RoleNode(PermissionNode):
def __init__(self, name='', content=None):
super(RoleNode, self).__init__(name, content)
self.nodes = ['if functions.has_role(request.user, %s):\n' % self.name]
| Change role and permission tag parameter format, no need "" | Change role and permission tag parameter format, no need ""
| Python | bsd-2-clause | wwfifi/uliweb,wwfifi/uliweb,wwfifi/uliweb,limodou/uliweb,limodou/uliweb,wwfifi/uliweb,limodou/uliweb,limodou/uliweb | from uliweb.core.template import BaseBlockNode
from uliweb import functions
class PermissionNode(BaseBlockNode):
def __init__(self, name='', content=None):
super(PermissionNode, self).__init__(name, content)
self.nodes = ['if functions.has_permission(request.user, %s):\n' % self.name]
def end(self):
self.nodes.append('pass\n')
class RoleNode(PermissionNode):
def __init__(self, name='', content=None):
super(RoleNode, self).__init__(name, content)
self.nodes = ['if functions.has_role(request.user, %s):\n' % self.name]
| Change role and permission tag parameter format, no need ""
from uliweb.core.template import BaseBlockNode
from uliweb import functions
class PermissionNode(BaseBlockNode):
def __init__(self, name='', content=None):
super(PermissionNode, self).__init__(name, content)
self.nodes = ['if functions.has_permission(request.user, "%s"):\n' % self.name]
def end(self):
self.nodes.append('pass\n')
class RoleNode(PermissionNode):
def __init__(self, name='', content=None):
super(RoleNode, self).__init__(name, content)
self.nodes = ['if functions.has_role(request.user, "%s"):\n' % self.name]
|
415e34fb913feaf623320827fb7680ee56c9d335 | gunicorn.conf.py | gunicorn.conf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
bind = '127.0.0.1:8001'
workers = 6
proc_name = 'lastuser'
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
bind = '127.0.0.1:8002'
workers = 6
proc_name = 'lastuser'
| Use a different port for lastuser | Use a different port for lastuser
| Python | bsd-2-clause | hasgeek/lastuser,hasgeek/lastuser,sindhus/lastuser,hasgeek/lastuser,sindhus/lastuser,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/lastuser,sindhus/lastuser,sindhus/lastuser,sindhus/lastuser,hasgeek/funnel,hasgeek/lastuser | #!/usr/bin/env python
# -*- coding: utf-8 -*-
bind = '127.0.0.1:8002'
workers = 6
proc_name = 'lastuser'
| Use a different port for lastuser
#!/usr/bin/env python
# -*- coding: utf-8 -*-
bind = '127.0.0.1:8001'
workers = 6
proc_name = 'lastuser'
|
dd31ff9372f587cf2fd7e634f3c6886fa9beedc0 | examples/pywapi-example.py | examples/pywapi-example.py | #!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
| #!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
| Fix error in example script | Fix error in example script | Python | mit | kheuton/python-weather-api | #!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + weather_com_result['current_conditions']['text'].lower() + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York."
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
| Fix error in example script
#!/usr/bin/env python
import pywapi
weather_com_result = pywapi.get_weather_from_weather_com('10001')
yahoo_result = pywapi.get_weather_from_yahoo('10001')
noaa_result = pywapi.get_weather_from_noaa('KJFK')
print "Weather.com says: It is " + string.lower(weather_com_result['current_conditions']['text']) + " and " + weather_com_result['current_conditions']['temperature'] + "C now in New York.\n\n"
print("Yahoo says: It is " + yahoo_result['condition']['text'].lower() + " and " + yahoo_result['condition']['temp'] + "C now in New York.")
print("NOAA says: It is " + noaa_result['weather'].lower() + " and " + noaa_result['temp_c'] + "C now in New York.")
|
97e60ffa741bafbd34bcee18d0dce9f323b0132a | project/settings/prod.py | project/settings/prod.py | # Local
from .base import *
# Heroku Settings
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
'.barberscore.com',
'.herokuapp.com',
]
DATABASES['default']['TEST'] = {
'NAME': DATABASES['default']['NAME'],
}
# Email
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = get_env_variable("SENDGRID_USERNAME")
EMAIL_HOST_PASSWORD = get_env_variable("SENDGRID_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = "[Barberscore] "
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
| # Local
from .base import *
# Heroku Settings
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
'testserver',
'.barberscore.com',
'.herokuapp.com',
]
DATABASES['default']['TEST'] = {
'NAME': DATABASES['default']['NAME'],
}
# Email
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = get_env_variable("SENDGRID_USERNAME")
EMAIL_HOST_PASSWORD = get_env_variable("SENDGRID_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = "[Barberscore] "
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
| TEst adding test server to allowed hosts | TEst adding test server to allowed hosts
| Python | bsd-2-clause | dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django | # Local
from .base import *
# Heroku Settings
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
'testserver',
'.barberscore.com',
'.herokuapp.com',
]
DATABASES['default']['TEST'] = {
'NAME': DATABASES['default']['NAME'],
}
# Email
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = get_env_variable("SENDGRID_USERNAME")
EMAIL_HOST_PASSWORD = get_env_variable("SENDGRID_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = "[Barberscore] "
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
| TEst adding test server to allowed hosts
# Local
from .base import *
# Heroku Settings
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
'.barberscore.com',
'.herokuapp.com',
]
DATABASES['default']['TEST'] = {
'NAME': DATABASES['default']['NAME'],
}
# Email
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = get_env_variable("SENDGRID_USERNAME")
EMAIL_HOST_PASSWORD = get_env_variable("SENDGRID_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = "[Barberscore] "
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
|
1e10fa30998f63359ddd26d9804bd32a837c2cab | armstrong/esi/tests/_utils.py | armstrong/esi/tests/_utils.py | from django.conf import settings
from django.test import TestCase as DjangoTestCase
import fudge
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
| from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
import fudge
def with_fake_request(func):
def inner(self, *args, **kwargs):
request = fudge.Fake(HttpRequest)
fudge.clear_calls()
result = func(self, request, *args, **kwargs)
fudge.verify()
fudge.clear_expectations()
return result
return inner
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
| Add in a decorator for generating fake request objects for test cases | Add in a decorator for generating fake request objects for test cases
| Python | bsd-3-clause | armstrong/armstrong.esi | from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase as DjangoTestCase
import fudge
def with_fake_request(func):
def inner(self, *args, **kwargs):
request = fudge.Fake(HttpRequest)
fudge.clear_calls()
result = func(self, request, *args, **kwargs)
fudge.verify()
fudge.clear_expectations()
return result
return inner
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
| Add in a decorator for generating fake request objects for test cases
from django.conf import settings
from django.test import TestCase as DjangoTestCase
import fudge
class TestCase(DjangoTestCase):
def setUp(self):
self._original_settings = settings
def tearDown(self):
settings = self._original_settings
|
6d15230f46c22226f6a2e84ac41fc39e6c5c190b | linode/objects/linode/backup.py | linode/objects/linode/backup.py | from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| Fix datetime fields in Backup and SupportTicket | Fix datetime fields in Backup and SupportTicket
This closes #23.
| Python | bsd-3-clause | linode/python-linode-api,jo-tez/python-linode-api | from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'created': Property(is_datetime=True),
'duration': Property(),
'finished': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
| Fix datetime fields in Backup and SupportTicket
This closes #23.
from .. import DerivedBase, Property, Base
class Backup(DerivedBase):
api_name = 'backups'
api_endpoint = '/linode/instances/{linode_id}/backups/{id}'
derived_url_path = 'backups'
parent_id_name='linode_id'
properties = {
'id': Property(identifier=True),
'create_dt': Property(is_datetime=True),
'duration': Property(),
'finish_dt': Property(is_datetime=True),
'message': Property(),
'status': Property(volatile=True),
'type': Property(),
'linode_id': Property(identifier=True),
'label': Property(),
'configs': Property(),
'disks': Property(),
'availability': Property(),
}
def restore_to(self, linode, **kwargs):
d = {
"linode": linode.id if issubclass(type(linode), Base) else linode,
}
d.update(kwargs)
result = self._client.post("{}/restore".format(Backup.api_endpoint), model=self,
data=d)
return True
|
1663cb8557de85b1f3ccf5822fa01758e679ccd7 | TestScript/multi_client.py | TestScript/multi_client.py | # -*- coding: utf-8 -*-
__author__ = 'sm9'
import asyncore, socket
import string, random
import struct, time
HOST = '192.168.0.11'
PORT = 9001
PKT_CS_LOGIN = 1
PKT_SC_LOGIN = 2
PKT_CS_CHAT = 3
PKT_SC_CHAT = 4
def str_generator(size=128, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
class PacketDecoder(object):
def __init__(self):
self._stream = ''
def feed(self, buff):
self._stream += buff
def decode(self):
while len(self._stream) > 4:
pkt_size = struct.unpack('h', self._stream[:2])[0]
if pkt_size > len(self._stream):
break
packet = self._stream[:pkt_size]
yield packet
self._stream = self._stream[pkt_size:]
class Client(asyncore.dispatcher):
def __init__(self, pid):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect( (HOST, PORT) )
self.decoder = PacketDecoder()
self.islogin = False
self.pid = pid
self.buffer = struct.pack('hhi', 8, PKT_CS_LOGIN, pid)
def handle_connect(self):
pass
def handle_close(self):
self.close()
def handle_read(self):
data = self.recv(8192)
if not data:
#print 'NOT DATA'
self.close()
else:
self.decoder.feed(data)
for packet in self.decoder.decode():
self.process(packet)
if self.islogin:
self.buffer = struct.pack('<hhi1024s', 1032, PKT_CS_CHAT, self.pid, str_generator(1000))
def writable(self):
return (len(self.buffer) > 0)
def handle_write(self):
sent = self.send(self.buffer)
self.buffer = self.buffer[sent:]
def process(self, packet):
pkt_len, pkt_type = struct.unpack('hh', packet[:4])
if pkt_type == PKT_SC_LOGIN:
self.islogin = True
playerId, posX, posY, posZ, playerName = struct.unpack('<iddd30s', packet[4:])
print playerId, posX, posY, posZ, playerName
elif pkt_type == PKT_SC_CHAT:
playerId, playerName, chat = struct.unpack('<i30s1024s', packet[4:])
#print playerId, playerName, chat
else:
print 'PKT_TYPE ERROR'
for pid in range(1000, 1100):
client = Client(pid)
time.sleep(0.02)
asyncore.loop()
| Test script for the stress test | *added: Test script for the stress test | Python | mit | zrma/EasyGameServer,zeliard/EasyGameServer,zeliard/EasyGameServer,zrma/EasyGameServer,Lt-Red/EasyGameServer,Lt-Red/EasyGameServer | # -*- coding: utf-8 -*-
__author__ = 'sm9'
import asyncore, socket
import string, random
import struct, time
HOST = '192.168.0.11'
PORT = 9001
PKT_CS_LOGIN = 1
PKT_SC_LOGIN = 2
PKT_CS_CHAT = 3
PKT_SC_CHAT = 4
def str_generator(size=128, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(size))
class PacketDecoder(object):
def __init__(self):
self._stream = ''
def feed(self, buff):
self._stream += buff
def decode(self):
while len(self._stream) > 4:
pkt_size = struct.unpack('h', self._stream[:2])[0]
if pkt_size > len(self._stream):
break
packet = self._stream[:pkt_size]
yield packet
self._stream = self._stream[pkt_size:]
class Client(asyncore.dispatcher):
def __init__(self, pid):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect( (HOST, PORT) )
self.decoder = PacketDecoder()
self.islogin = False
self.pid = pid
self.buffer = struct.pack('hhi', 8, PKT_CS_LOGIN, pid)
def handle_connect(self):
pass
def handle_close(self):
self.close()
def handle_read(self):
data = self.recv(8192)
if not data:
#print 'NOT DATA'
self.close()
else:
self.decoder.feed(data)
for packet in self.decoder.decode():
self.process(packet)
if self.islogin:
self.buffer = struct.pack('<hhi1024s', 1032, PKT_CS_CHAT, self.pid, str_generator(1000))
def writable(self):
return (len(self.buffer) > 0)
def handle_write(self):
sent = self.send(self.buffer)
self.buffer = self.buffer[sent:]
def process(self, packet):
pkt_len, pkt_type = struct.unpack('hh', packet[:4])
if pkt_type == PKT_SC_LOGIN:
self.islogin = True
playerId, posX, posY, posZ, playerName = struct.unpack('<iddd30s', packet[4:])
print playerId, posX, posY, posZ, playerName
elif pkt_type == PKT_SC_CHAT:
playerId, playerName, chat = struct.unpack('<i30s1024s', packet[4:])
#print playerId, playerName, chat
else:
print 'PKT_TYPE ERROR'
for pid in range(1000, 1100):
client = Client(pid)
time.sleep(0.02)
asyncore.loop()
| *added: Test script for the stress test
|
|
2c7adc6fd0a53db44951eccb8f5db3b45e4a4653 | setup.py | setup.py | from setuptools import setup
setup(
name='jinjer',
version='0.1',
packages=['jinjer'],
package_dir={'': 'src'},
url='https://github.com/andrematheus/jinjer',
license='BSD',
author='André Roque Matheus',
author_email='[email protected]',
description='Tool to render Jinja templates from command line',
requires=['docopt', 'jinja2', 'PyYaml'],
entry_points={
'console_scripts': [
'jinjer = jinjer.jinjer:main'
]
}
)
| from setuptools import setup
setup(
name='jinjer',
version='0.2',
packages=['jinjer'],
package_dir={'': 'src'},
url='https://github.com/andrematheus/jinjer',
license='BSD',
author='André Roque Matheus',
author_email='[email protected]',
description='Tool to render Jinja templates from command line',
install_requires=['docopt', 'jinja2', 'PyYaml'],
entry_points={
'console_scripts': [
'jinjer = jinjer.jinjer:main'
]
}
)
| Correct requires to force installation. | Correct requires to force installation.
| Python | mit | andrematheus/jinjer | from setuptools import setup
setup(
name='jinjer',
version='0.2',
packages=['jinjer'],
package_dir={'': 'src'},
url='https://github.com/andrematheus/jinjer',
license='BSD',
author='André Roque Matheus',
author_email='[email protected]',
description='Tool to render Jinja templates from command line',
install_requires=['docopt', 'jinja2', 'PyYaml'],
entry_points={
'console_scripts': [
'jinjer = jinjer.jinjer:main'
]
}
)
| Correct requires to force installation.
from setuptools import setup
setup(
name='jinjer',
version='0.1',
packages=['jinjer'],
package_dir={'': 'src'},
url='https://github.com/andrematheus/jinjer',
license='BSD',
author='André Roque Matheus',
author_email='[email protected]',
description='Tool to render Jinja templates from command line',
requires=['docopt', 'jinja2', 'PyYaml'],
entry_points={
'console_scripts': [
'jinjer = jinjer.jinjer:main'
]
}
)
|
e6f19cc58f32b855fc1f71086dac0ad56b697ed3 | opps/articles/urls.py | opps/articles/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
| Add cache on article page (via url) | Add cache on article page (via url)
| Python | mit | opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,williamroot/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
from django.views.decorators.cache import cache_page
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', cache_page(60 * 2)(OppsList.as_view()), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
cache_page(60 * 15)(OppsDetail.as_view()), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$',
cache_page(60 * 2)(OppsList.as_view()), name='channel'),
)
| Add cache on article page (via url)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls import patterns, url
from .views import OppsDetail, OppsList, Search
urlpatterns = patterns(
'',
url(r'^$', OppsList.as_view(), name='home'),
url(r'^search/', Search(), name='search'),
url(r'^(?P<channel__long_slug>[\w//-]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='open'),
url(r'^(?P<channel__long_slug>[\w\b//-]+)/$', OppsList.as_view(),
name='channel'),
)
|
030e558b3b52900b8fa2cea9a92c055de3ec5b44 | corehq/apps/domain/management/commands/migrate_domain_countries.py | corehq/apps/domain/management/commands/migrate_domain_countries.py | from django.core.management.base import LabelCommand
from django_countries.countries import COUNTRIES
from corehq.apps.domain.models import Domain
class Command(LabelCommand):
help = "Migrates old django domain countries from string to list. Sept 2014."
args = ""
label = ""
def handle(self, *args, **options):
print "Migrating Domain countries"
country_lookup = {x[1].lower(): x[0] for x in COUNTRIES}
for domain in Domain.get_all():
try:
if isinstance(domain.deployment.country, basestring):
if domain.deployment.country in country_lookup.keys():
abbr = [country_lookup[domain.deployment.country.lower()]]
else:
abbr = []
domain.deployment.country = abbr
domain.save()
except Exception as e:
print "There was an error migrating the domain named %s." % domain.name
print "Error: %s", e
| Add management command to migrate countries to list | Add management command to migrate countries to list
| Python | bsd-3-clause | puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq | from django.core.management.base import LabelCommand
from django_countries.countries import COUNTRIES
from corehq.apps.domain.models import Domain
class Command(LabelCommand):
help = "Migrates old django domain countries from string to list. Sept 2014."
args = ""
label = ""
def handle(self, *args, **options):
print "Migrating Domain countries"
country_lookup = {x[1].lower(): x[0] for x in COUNTRIES}
for domain in Domain.get_all():
try:
if isinstance(domain.deployment.country, basestring):
if domain.deployment.country in country_lookup.keys():
abbr = [country_lookup[domain.deployment.country.lower()]]
else:
abbr = []
domain.deployment.country = abbr
domain.save()
except Exception as e:
print "There was an error migrating the domain named %s." % domain.name
print "Error: %s", e
| Add management command to migrate countries to list
|
|
8d53a7478a139770d9ffb241ec2985123c403845 | bookmarks/bookmarks/models.py | bookmarks/bookmarks/models.py | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "{}".format(
"Bookmark added:",
),
'icon_emoji': ":blue_book:",
'attachments': [
{
"fallback": instance.title,
"color": "good",
"title": instance.title,
"title_link": instance.url,
"text": instance.description,
}
]
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "<{}|{}>\n{}".format(
instance.url,
instance.title,
instance.description,
),
'icon_emoji': ":blue_book:",
'unfurl_links': True
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| Remove attachment and use slack link unfurling | Remove attachment and use slack link unfurling
| Python | mit | tom-henderson/bookmarks,tom-henderson/bookmarks,tom-henderson/bookmarks | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "<{}|{}>\n{}".format(
instance.url,
instance.title,
instance.description,
),
'icon_emoji': ":blue_book:",
'unfurl_links': True
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
| Remove attachment and use slack link unfurling
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from django.dispatch import receiver
from django.conf import settings
from taggit.managers import TaggableManager
import requests
class Bookmark(models.Model):
title = models.CharField(max_length=200, blank=True, null=True)
description = models.TextField(blank=True, null=True)
date_added = models.DateTimeField(default=timezone.now, blank=True)
tags = TaggableManager(blank=True)
private = models.BooleanField(default=False)
url = models.URLField(max_length=500)
def __unicode__(self):
return "{}: {} [{}]".format(
self.pk,
self.title[:40],
self.date_added
)
@receiver(models.signals.post_save, sender=Bookmark)
def bookmark_pre_save_handler(sender, instance, created, *args, **kwargs):
# Only run for new items, not updates
if created:
if not hasattr(settings, 'SLACK_WEBHOOK_URL'):
return
payload = {
'channel': "#bookmarks-dev",
'username': "Bookmarks",
'text': "{}".format(
"Bookmark added:",
),
'icon_emoji': ":blue_book:",
'attachments': [
{
"fallback": instance.title,
"color": "good",
"title": instance.title,
"title_link": instance.url,
"text": instance.description,
}
]
}
requests.post(settings.SLACK_WEBHOOK_URL, json=payload)
|
aca1b138350434c9afb08f31164269cd58de1d2d | YouKnowShit/CheckFile.py | YouKnowShit/CheckFile.py | import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file) | import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file) | Add a level of uper directory | Add a level of uper directory
| Python | mit | jiangtianyu2009/PiSoftCake | import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file)
print()
print()
print()
print('*****************************************************')
os.chdir(upupupdir)
upupupupdir = os.path.abspath('..')
print(upupupupdir)
filenames = os.listdir(upupupupdir)
for file in filenames:
print(file) | Add a level of uper directory
import os
import sys
(dir, filename) = os.path.split(os.path.abspath(sys.argv[0]))
print(dir)
filenames = os.listdir(dir)
for file in filenames:
print(file)
print('*****************************************************')
updir = os.path.abspath('..')
print(updir)
filenames = os.listdir(updir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(updir)
upupdir = os.path.abspath('..')
print(upupdir)
filenames = os.listdir(upupdir)
for file in filenames:
print(file)
print('*****************************************************')
os.chdir(upupdir)
upupupdir = os.path.abspath('..')
print(upupupdir)
filenames = os.listdir(upupupdir)
for file in filenames:
print(file) |
6ca79d2ce16f8745c8d58c3dea20174931820ef3 | api/models.py | api/models.py | from flask import current_app
from passlib.apps import custom_app_context as pwd_context
from itsdangerous import (TimedJSONWebSignatureSerializer
as Serializer, BadSignature, SignatureExpired)
from datetime import datetime
from api import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(30))
last_name = db.Column(db.String(30))
email = db.Column(db.String(64), unique=True)
username = db.Column(db.String(80), unique=True)
password_hash = db.Column(db.String(128))
bucket_lists = db.relationship('BucketList', backref='user', lazy='dynamic')
def __init__(self, first_name, last_name, email, username):
self.first_name = first_name
self.last_name = last_name
self.email = email
self.username = username
def __repr__(self):
return '<User %r>' % self.username
def full_name(self):
return '%s %s' % (self.first_name, self.last_name)
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
def generate_auth_token(self, expiration=7200):
s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': self.id})
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.query.get(data['id'])
return user
| Add User object to model | Add User object to model
| Python | mit | EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list | from flask import current_app
from passlib.apps import custom_app_context as pwd_context
from itsdangerous import (TimedJSONWebSignatureSerializer
as Serializer, BadSignature, SignatureExpired)
from datetime import datetime
from api import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(30))
last_name = db.Column(db.String(30))
email = db.Column(db.String(64), unique=True)
username = db.Column(db.String(80), unique=True)
password_hash = db.Column(db.String(128))
bucket_lists = db.relationship('BucketList', backref='user', lazy='dynamic')
def __init__(self, first_name, last_name, email, username):
self.first_name = first_name
self.last_name = last_name
self.email = email
self.username = username
def __repr__(self):
return '<User %r>' % self.username
def full_name(self):
return '%s %s' % (self.first_name, self.last_name)
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
return pwd_context.verify(password, self.password_hash)
def generate_auth_token(self, expiration=7200):
s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': self.id})
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.query.get(data['id'])
return user
| Add User object to model
|
|
7383343f7fb77c74455a50490ad2886fcf36bbd5 | dlstats/fetchers/test_ecb.py | dlstats/fetchers/test_ecb.py | import unittest
import mongomock
import ulstats
from dlstats.fetchers._skeleton import (Skeleton, Category, Series, BulkSeries,
Dataset, Provider)
import datetime
from bson import ObjectId
#class CategoriesTestCase(unittest.TestCase):
#if __name__ == '__main__':
# unittest.main()
| Comment test for the moment | Comment test for the moment
| Python | agpl-3.0 | Widukind/dlstats,mmalter/dlstats,mmalter/dlstats,MichelJuillard/dlstats,MichelJuillard/dlstats,MichelJuillard/dlstats,mmalter/dlstats,Widukind/dlstats | import unittest
import mongomock
import ulstats
from dlstats.fetchers._skeleton import (Skeleton, Category, Series, BulkSeries,
Dataset, Provider)
import datetime
from bson import ObjectId
#class CategoriesTestCase(unittest.TestCase):
#if __name__ == '__main__':
# unittest.main()
| Comment test for the moment
|
|
df68b821807d25d204f43d7b1805da6c25f42b43 | src/lib/pagination.py | src/lib/pagination.py | from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
| from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
max_page_size = 100
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
| Set a maximum to the number of elements that may be requested | Set a maximum to the number of elements that may be requested
| Python | agpl-3.0 | lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django,lafranceinsoumise/api-django | from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
max_page_size = 100
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
| Set a maximum to the number of elements that may be requested
from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
class LegacyPaginator(PageNumberPagination):
"""
A legacy paginator that mocks the one from Eve Python
"""
page_size = 25
page_size_query_param = "max_results"
def get_paginated_response(self, data):
links = OrderedDict()
if self.page.has_next():
links['next'] = OrderedDict([
('href', self.get_next_link()),
('title', _('page suivante')),
])
if self.page.has_previous():
links['prev'] = OrderedDict([
('href', self.get_previous_link()),
('title', _('page précédente')),
])
meta = OrderedDict([
('max_results', self.page.paginator.per_page),
('total', self.page.paginator.count),
('page', self.page.number),
])
return Response(OrderedDict([
('_items', data),
('_links', links),
('_meta', meta),
]))
|
74f82029223cc541beab98d7026abb1ec992be40 | createTodoFile.py | createTodoFile.py | """createTodoFile.py: Creates an todo file with title name as current date"""
import time
import os.path
def createfile():
# My-File--2009-12-31--23-59-59.txt
date = time.strftime("%d-%m-%Y")
filename = "GOALS--" + date + ".todo"
if not os.path.exists(filename):
with open(filename, "a") as myfile:
myfile.write("[RESULTS - {}]".format(date))
print("INFO: " + filename + " created!")
else:
print("ERROR: " + filename + " already exist! Exiting..")
# TODO: To move files into archive if more than a week
def archiveFiles():
pass
def main():
createfile()
if __name__ == '__main__':
main()
| """createTodoFile.py: Creates an todo file with title name as current date"""
import os.path
import time
def createfile():
# My-File--2009-12-31--23-59-59.txt
date = time.strftime("%d-%m-%Y")
filename = "GOALS--" + date + ".todo"
if not os.path.exists(filename):
with open(filename, "a") as myfile:
myfile.write("[RESULTS - {}]".format(date))
print("INFO: " + filename + " created!")
addfileToSublime(filename)
else:
print("ERROR: " + filename + " already exist! Exiting..")
def addfileToSublime(file):
os.system("subl --add " + file)
def main():
createfile()
if __name__ == '__main__':
main()
| Add created file to sublime | feat: Add created file to sublime
| Python | mit | prajesh-ananthan/Tools | """createTodoFile.py: Creates an todo file with title name as current date"""
import os.path
import time
def createfile():
# My-File--2009-12-31--23-59-59.txt
date = time.strftime("%d-%m-%Y")
filename = "GOALS--" + date + ".todo"
if not os.path.exists(filename):
with open(filename, "a") as myfile:
myfile.write("[RESULTS - {}]".format(date))
print("INFO: " + filename + " created!")
addfileToSublime(filename)
else:
print("ERROR: " + filename + " already exist! Exiting..")
def addfileToSublime(file):
os.system("subl --add " + file)
def main():
createfile()
if __name__ == '__main__':
main()
| feat: Add created file to sublime
"""createTodoFile.py: Creates an todo file with title name as current date"""
import time
import os.path
def createfile():
# My-File--2009-12-31--23-59-59.txt
date = time.strftime("%d-%m-%Y")
filename = "GOALS--" + date + ".todo"
if not os.path.exists(filename):
with open(filename, "a") as myfile:
myfile.write("[RESULTS - {}]".format(date))
print("INFO: " + filename + " created!")
else:
print("ERROR: " + filename + " already exist! Exiting..")
# TODO: To move files into archive if more than a week
def archiveFiles():
pass
def main():
createfile()
if __name__ == '__main__':
main()
|
b6389de5f531fa49e911b344cbaea29599260c82 | src/tests/test_cleanup_marathon_orphaned_containers.py | src/tests/test_cleanup_marathon_orphaned_containers.py | #!/usr/bin/env python
import cleanup_marathon_orphaned_images
# These should be left running
mesos_deployed_old = {
'Names': ['/mesos-deployed-old', ],
}
mesos_undeployed_young = {
'Names': ['/mesos-undeployed-young', ],
}
nonmesos_undeployed_old = {
'Names': ['/nonmesos-undeployed-old', ],
}
# These should be cleaned up
mesos_undeployed_old = {
'Names': ['/mesos-undeployed-old', ],
}
running_images = [
mesos_deployed_old,
nonmesos_undeployed_old,
mesos_undeployed_young,
mesos_undeployed_old,
]
def test_get_mesos_images():
actual = cleanup_marathon_orphaned_images.get_mesos_images(running_images)
assert nonmesos_undeployed_old not in actual
def test_get_old_images():
pass
| #!/usr/bin/env python
import cleanup_marathon_orphaned_images
# These should be left running
mesos_deployed_old = {
'Names': ['/mesos-deployed-old', ],
}
mesos_undeployed_young = {
'Names': ['/mesos-undeployed-young', ],
}
nonmesos_undeployed_old = {
'Names': ['/nonmesos-undeployed-old', ],
}
# These should be cleaned up
mesos_undeployed_old = {
'Names': ['/mesos-undeployed-old', ],
}
running_images = [
mesos_deployed_old,
nonmesos_undeployed_old,
mesos_undeployed_young,
mesos_undeployed_old,
]
def test_get_mesos_images():
assert nonmesos_undeployed_old in running_images
actual = cleanup_marathon_orphaned_images.get_mesos_images(running_images)
assert nonmesos_undeployed_old not in actual
def test_get_old_images():
pass
| Clarify intent and fail fast | Clarify intent and fail fast
| Python | apache-2.0 | Yelp/paasta,somic/paasta,gstarnberger/paasta,somic/paasta,gstarnberger/paasta,Yelp/paasta | #!/usr/bin/env python
import cleanup_marathon_orphaned_images
# These should be left running
mesos_deployed_old = {
'Names': ['/mesos-deployed-old', ],
}
mesos_undeployed_young = {
'Names': ['/mesos-undeployed-young', ],
}
nonmesos_undeployed_old = {
'Names': ['/nonmesos-undeployed-old', ],
}
# These should be cleaned up
mesos_undeployed_old = {
'Names': ['/mesos-undeployed-old', ],
}
running_images = [
mesos_deployed_old,
nonmesos_undeployed_old,
mesos_undeployed_young,
mesos_undeployed_old,
]
def test_get_mesos_images():
assert nonmesos_undeployed_old in running_images
actual = cleanup_marathon_orphaned_images.get_mesos_images(running_images)
assert nonmesos_undeployed_old not in actual
def test_get_old_images():
pass
| Clarify intent and fail fast
#!/usr/bin/env python
import cleanup_marathon_orphaned_images
# These should be left running
mesos_deployed_old = {
'Names': ['/mesos-deployed-old', ],
}
mesos_undeployed_young = {
'Names': ['/mesos-undeployed-young', ],
}
nonmesos_undeployed_old = {
'Names': ['/nonmesos-undeployed-old', ],
}
# These should be cleaned up
mesos_undeployed_old = {
'Names': ['/mesos-undeployed-old', ],
}
running_images = [
mesos_deployed_old,
nonmesos_undeployed_old,
mesos_undeployed_young,
mesos_undeployed_old,
]
def test_get_mesos_images():
actual = cleanup_marathon_orphaned_images.get_mesos_images(running_images)
assert nonmesos_undeployed_old not in actual
def test_get_old_images():
pass
|
3460a627b535a55eedefb7ec5a37fe068f3d7abd | tests/fixtures/postgres.py | tests/fixtures/postgres.py | import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close() | Add fixtures for connecting to Postgres test database | Add fixtures for connecting to Postgres test database
| Python | mit | igboyes/virtool,igboyes/virtool,virtool/virtool,virtool/virtool | import pytest
from sqlalchemy import text
from sqlalchemy.exc import ProgrammingError
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.models import Base
@pytest.fixture(scope="function")
async def engine():
engine = create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/virtool", isolation_level="AUTOCOMMIT")
async with engine.connect() as conn:
try:
await conn.execute(text("CREATE DATABASE test"))
except ProgrammingError:
pass
return create_async_engine("postgresql+asyncpg://virtool:virtool@localhost/test")
@pytest.fixture(scope="function")
async def dbsession(engine, loop):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
session = AsyncSession(bind=engine)
yield session
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await session.close() | Add fixtures for connecting to Postgres test database
|
|
55b7b07986590c4ab519fcda3c973c87ad23596b | flask_admin/model/typefmt.py | flask_admin/model/typefmt.py | from jinja2 import Markup
def null_formatter(value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
return Markup('<i class="icon-ok"></i>' if value else '')
DEFAULT_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter
}
| from jinja2 import Markup
def null_formatter(value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
return Markup('<i class="icon-ok"></i>' if value else '')
def list_formatter(values):
"""
Return string with comma separated values
:param values:
Value to check
"""
return u', '.join(values)
DEFAULT_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter,
list: list_formatter,
}
| Add extra type formatter for `list` type | Add extra type formatter for `list` type
| Python | bsd-3-clause | mrjoes/flask-admin,janusnic/flask-admin,Kha/flask-admin,wuxiangfeng/flask-admin,litnimax/flask-admin,HermasT/flask-admin,quokkaproject/flask-admin,Kha/flask-admin,flabe81/flask-admin,porduna/flask-admin,Junnplus/flask-admin,ibushong/test-repo,janusnic/flask-admin,jschneier/flask-admin,closeio/flask-admin,chase-seibert/flask-admin,litnimax/flask-admin,ArtemSerga/flask-admin,flask-admin/flask-admin,NickWoodhams/flask-admin,LennartP/flask-admin,late-warrior/flask-admin,likaiguo/flask-admin,iurisilvio/flask-admin,mikelambert/flask-admin,jamesbeebop/flask-admin,quokkaproject/flask-admin,mrjoes/flask-admin,pawl/flask-admin,jschneier/flask-admin,toddetzel/flask-admin,rochacbruno/flask-admin,ArtemSerga/flask-admin,Junnplus/flask-admin,torotil/flask-admin,ondoheer/flask-admin,plaes/flask-admin,AlmogCohen/flask-admin,plaes/flask-admin,wangjun/flask-admin,dxmo/flask-admin,jmagnusson/flask-admin,marrybird/flask-admin,torotil/flask-admin,wuxiangfeng/flask-admin,CoolCloud/flask-admin,toddetzel/flask-admin,lifei/flask-admin,ondoheer/flask-admin,phantomxc/flask-admin,mikelambert/flask-admin,mrjoes/flask-admin,petrus-jvrensburg/flask-admin,CoolCloud/flask-admin,wangjun/flask-admin,iurisilvio/flask-admin,petrus-jvrensburg/flask-admin,lifei/flask-admin,mikelambert/flask-admin,sfermigier/flask-admin,radioprotector/flask-admin,wuxiangfeng/flask-admin,petrus-jvrensburg/flask-admin,iurisilvio/flask-admin,likaiguo/flask-admin,jschneier/flask-admin,litnimax/flask-admin,flask-admin/flask-admin,petrus-jvrensburg/flask-admin,plaes/flask-admin,ibushong/test-repo,flask-admin/flask-admin,torotil/flask-admin,radioprotector/flask-admin,rochacbruno/flask-admin,wuxiangfeng/flask-admin,HermasT/flask-admin,LennartP/flask-admin,marrybird/flask-admin,dxmo/flask-admin,flask-admin/flask-admin,phantomxc/flask-admin,LennartP/flask-admin,chase-seibert/flask-admin,plaes/flask-admin,marrybird/flask-admin,mikelambert/flask-admin,wangjun/flask-admin,ArtemSerga/flask-admin,AlmogCohen/flask-admin,AlmogCohen/flask-admin,ondoheer/flask-admin,closeio/flask-admin,rochacbruno/flask-admin,flabe81/flask-admin,AlmogCohen/flask-admin,lifei/flask-admin,jmagnusson/flask-admin,mrjoes/flask-admin,pawl/flask-admin,torotil/flask-admin,likaiguo/flask-admin,HermasT/flask-admin,flabe81/flask-admin,porduna/flask-admin,iurisilvio/flask-admin,NickWoodhams/flask-admin,late-warrior/flask-admin,porduna/flask-admin,radioprotector/flask-admin,chase-seibert/flask-admin,CoolCloud/flask-admin,toddetzel/flask-admin,betterlife/flask-admin,betterlife/flask-admin,lifei/flask-admin,porduna/flask-admin,quokkaproject/flask-admin,rochacbruno/flask-admin,jschneier/flask-admin,late-warrior/flask-admin,pawl/flask-admin,toddetzel/flask-admin,phantomxc/flask-admin,late-warrior/flask-admin,wangjun/flask-admin,ondoheer/flask-admin,ibushong/test-repo,jmagnusson/flask-admin,CoolCloud/flask-admin,closeio/flask-admin,ArtemSerga/flask-admin,jamesbeebop/flask-admin,janusnic/flask-admin,marrybird/flask-admin,jamesbeebop/flask-admin,LennartP/flask-admin,phantomxc/flask-admin,Kha/flask-admin,radioprotector/flask-admin,flabe81/flask-admin,betterlife/flask-admin,sfermigier/flask-admin,jamesbeebop/flask-admin,closeio/flask-admin,Kha/flask-admin,Junnplus/flask-admin,Junnplus/flask-admin,ibushong/test-repo,dxmo/flask-admin,NickWoodhams/flask-admin,NickWoodhams/flask-admin,quokkaproject/flask-admin,sfermigier/flask-admin,likaiguo/flask-admin,HermasT/flask-admin,litnimax/flask-admin,jmagnusson/flask-admin,dxmo/flask-admin,betterlife/flask-admin,chase-seibert/flask-admin,janusnic/flask-admin | from jinja2 import Markup
def null_formatter(value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
return Markup('<i class="icon-ok"></i>' if value else '')
def list_formatter(values):
"""
Return string with comma separated values
:param values:
Value to check
"""
return u', '.join(values)
DEFAULT_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter,
list: list_formatter,
}
| Add extra type formatter for `list` type
from jinja2 import Markup
def null_formatter(value):
"""
Return `NULL` as the string for `None` value
:param value:
Value to check
"""
return Markup('<i>NULL</i>')
def empty_formatter(value):
"""
Return empty string for `None` value
:param value:
Value to check
"""
return ''
def bool_formatter(value):
"""
Return check icon if value is `True` or empty string otherwise.
:param value:
Value to check
"""
return Markup('<i class="icon-ok"></i>' if value else '')
DEFAULT_FORMATTERS = {
type(None): empty_formatter,
bool: bool_formatter
}
|
707a6016a3023fe423ede53db707c55273b0f6d0 | oauth2_provider/backends.py | oauth2_provider/backends.py | from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
| from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
| Use the OAuthLibCore object defined at the module level. | Use the OAuthLibCore object defined at the module level.
| Python | bsd-2-clause | bleib1dj/django-oauth-toolkit,StepicOrg/django-oauth-toolkit,JensTimmerman/django-oauth-toolkit,JensTimmerman/django-oauth-toolkit,StepicOrg/django-oauth-toolkit,DeskConnect/django-oauth-toolkit,bleib1dj/django-oauth-toolkit,DeskConnect/django-oauth-toolkit | from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
| Use the OAuthLibCore object defined at the module level.
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
f0c374eba55cdeb56bf3526ea0da041556f6ffe2 | tests/test_yamlmod.py | tests/test_yamlmod.py | import os
import sys
from nose.tools import *
def setup_yamlmod():
import yamlmod
reload(yamlmod)
def teardown_yamlmod():
import yamlmod
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
sys.meta_path.remove(hook)
break
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_installs_hook():
import yamlmod
hooks = []
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
hooks.append(hook)
eq_(len(hooks), 1, 'did not find exactly one hook')
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_fixture():
import fixture
eq_(fixture.debug, True)
eq_(fixture.domain, 'example.com')
eq_(fixture.users, ['alice', 'bob', 'cathy'])
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_hidden_attributes():
import fixture
eq_(fixture.__name__, 'fixture')
eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
| import os
import sys
from nose.tools import *
try:
from importlib import reload
except ImportError:
pass
def setup_yamlmod():
import yamlmod
reload(yamlmod)
def teardown_yamlmod():
import yamlmod
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
sys.meta_path.remove(hook)
break
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_installs_hook():
import yamlmod
hooks = []
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
hooks.append(hook)
eq_(len(hooks), 1, 'did not find exactly one hook')
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_fixture():
import fixture
eq_(fixture.debug, True)
eq_(fixture.domain, 'example.com')
eq_(fixture.users, ['alice', 'bob', 'cathy'])
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_hidden_attributes():
import fixture
eq_(fixture.__name__, 'fixture')
eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
| Fix tests on python 3 | Fix tests on python 3
| Python | mit | sciyoshi/yamlmod | import os
import sys
from nose.tools import *
try:
from importlib import reload
except ImportError:
pass
def setup_yamlmod():
import yamlmod
reload(yamlmod)
def teardown_yamlmod():
import yamlmod
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
sys.meta_path.remove(hook)
break
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_installs_hook():
import yamlmod
hooks = []
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
hooks.append(hook)
eq_(len(hooks), 1, 'did not find exactly one hook')
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_fixture():
import fixture
eq_(fixture.debug, True)
eq_(fixture.domain, 'example.com')
eq_(fixture.users, ['alice', 'bob', 'cathy'])
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_hidden_attributes():
import fixture
eq_(fixture.__name__, 'fixture')
eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
| Fix tests on python 3
import os
import sys
from nose.tools import *
def setup_yamlmod():
import yamlmod
reload(yamlmod)
def teardown_yamlmod():
import yamlmod
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
sys.meta_path.remove(hook)
break
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_installs_hook():
import yamlmod
hooks = []
for hook in sys.meta_path:
if isinstance(hook, yamlmod.YamlImportHook):
hooks.append(hook)
eq_(len(hooks), 1, 'did not find exactly one hook')
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_import_fixture():
import fixture
eq_(fixture.debug, True)
eq_(fixture.domain, 'example.com')
eq_(fixture.users, ['alice', 'bob', 'cathy'])
@with_setup(setup_yamlmod, teardown_yamlmod)
def test_hidden_attributes():
import fixture
eq_(fixture.__name__, 'fixture')
eq_(fixture.__file__, os.path.join(os.path.dirname(__file__), 'fixture.yml'))
|
356dd5294280db3334f86354202f0d68881254b9 | joerd/check.py | joerd/check.py | import zipfile
import tarfile
import shutil
import tempfile
from osgeo import gdal
def is_zip(tmp):
"""
Returns True if the NamedTemporaryFile given as the argument appears to be
a well-formed Zip file.
"""
try:
zip_file = zipfile.ZipFile(tmp.name, 'r')
test_result = zip_file.testzip()
return test_result is None
except:
pass
return False
def tar_gz_has_gdal(member_name):
"""
Returns a function which, when called with a NamedTemporaryFile, returns
True if that file is a GZip-encoded TAR file containing a `member_name`
member which can be opened with GDAL.
"""
def func(tmp):
try:
tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2)
with tempfile.NamedTemporaryFile() as tmp_member:
shutil.copyfileobj(tar.extractfile(member_name), tmp_member)
return is_gdal(tmp_member)
except (tarfile.TarError, IOError, OSError) as e:
return False
def is_gdal(tmp):
"""
Returns true if the NamedTemporaryFile given as the argument appears to be
a well-formed GDAL raster file.
"""
try:
ds = gdal.Open(tmp.name)
band = ds.GetRasterBand(1)
band.ComputeBandStats()
return True
except:
pass
return False
| import zipfile
import tarfile
import shutil
import tempfile
from osgeo import gdal
def is_zip(tmp):
"""
Returns True if the NamedTemporaryFile given as the argument appears to be
a well-formed Zip file.
"""
try:
zip_file = zipfile.ZipFile(tmp.name, 'r')
test_result = zip_file.testzip()
return test_result is None
except:
pass
return False
def tar_gz_has_gdal(member_name):
"""
Returns a function which, when called with a NamedTemporaryFile, returns
True if that file is a GZip-encoded TAR file containing a `member_name`
member which can be opened with GDAL.
"""
def func(tmp):
try:
tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2)
with tempfile.NamedTemporaryFile() as tmp_member:
shutil.copyfileobj(tar.extractfile(member_name), tmp_member)
tmp_member.seek(0)
return is_gdal(tmp_member)
except (tarfile.TarError, IOError, OSError) as e:
return False
return func
def is_gdal(tmp):
"""
Returns true if the NamedTemporaryFile given as the argument appears to be
a well-formed GDAL raster file.
"""
try:
ds = gdal.Open(tmp.name)
band = ds.GetRasterBand(1)
band.ComputeBandStats()
return True
except:
pass
return False
| Return verifier function, not None. Also reset the temporary file to the beginning before verifying it. | Return verifier function, not None. Also reset the temporary file to the beginning before verifying it.
| Python | mit | mapzen/joerd,tilezen/joerd | import zipfile
import tarfile
import shutil
import tempfile
from osgeo import gdal
def is_zip(tmp):
"""
Returns True if the NamedTemporaryFile given as the argument appears to be
a well-formed Zip file.
"""
try:
zip_file = zipfile.ZipFile(tmp.name, 'r')
test_result = zip_file.testzip()
return test_result is None
except:
pass
return False
def tar_gz_has_gdal(member_name):
"""
Returns a function which, when called with a NamedTemporaryFile, returns
True if that file is a GZip-encoded TAR file containing a `member_name`
member which can be opened with GDAL.
"""
def func(tmp):
try:
tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2)
with tempfile.NamedTemporaryFile() as tmp_member:
shutil.copyfileobj(tar.extractfile(member_name), tmp_member)
tmp_member.seek(0)
return is_gdal(tmp_member)
except (tarfile.TarError, IOError, OSError) as e:
return False
return func
def is_gdal(tmp):
"""
Returns true if the NamedTemporaryFile given as the argument appears to be
a well-formed GDAL raster file.
"""
try:
ds = gdal.Open(tmp.name)
band = ds.GetRasterBand(1)
band.ComputeBandStats()
return True
except:
pass
return False
| Return verifier function, not None. Also reset the temporary file to the beginning before verifying it.
import zipfile
import tarfile
import shutil
import tempfile
from osgeo import gdal
def is_zip(tmp):
"""
Returns True if the NamedTemporaryFile given as the argument appears to be
a well-formed Zip file.
"""
try:
zip_file = zipfile.ZipFile(tmp.name, 'r')
test_result = zip_file.testzip()
return test_result is None
except:
pass
return False
def tar_gz_has_gdal(member_name):
"""
Returns a function which, when called with a NamedTemporaryFile, returns
True if that file is a GZip-encoded TAR file containing a `member_name`
member which can be opened with GDAL.
"""
def func(tmp):
try:
tar = tarfile.open(tmp.name, mode='r:gz', errorlevel=2)
with tempfile.NamedTemporaryFile() as tmp_member:
shutil.copyfileobj(tar.extractfile(member_name), tmp_member)
return is_gdal(tmp_member)
except (tarfile.TarError, IOError, OSError) as e:
return False
def is_gdal(tmp):
"""
Returns true if the NamedTemporaryFile given as the argument appears to be
a well-formed GDAL raster file.
"""
try:
ds = gdal.Open(tmp.name)
band = ds.GetRasterBand(1)
band.ComputeBandStats()
return True
except:
pass
return False
|
6cd640eb09d674afaff1c96e69322705a843dde9 | src/commands/user/user.py | src/commands/user/user.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
User command...
"""
import grp
class User(object):
"""Something, something, something darkside...."""
def __init__(self, settingsInstance, commandInstance, cmdName, *args):
super(User, self).__init__()
self.settingsInstance = settingsInstance
self.commandInstance = commandInstance
if cmdName is not None:
if args[0] is not None:
getattr(self, cmdName)(*args)
else:
getattr(self, cmdName)()
def test(self):
self.commandInstance.replyWithMessage(
self.commandInstance.user
)
def users(self):
"""Get a list of users in the users group."""
self.commandInstance.replyWithMessage(self._users())
def _users(self, output='string'):
for group in grp.getgrall():
if group.gr_name == 'users':
members = group.gr_mem
if output == 'list':
return members
else:
return ', '.join(members)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
User command...
"""
import grp
class User(object):
"""Something, something, something darkside...."""
def __init__(self, settingsInstance, commandInstance, cmdName, *args):
super(User, self).__init__()
self.settingsInstance = settingsInstance
self.commandInstance = commandInstance
if cmdName is not None:
if args[0] is not None:
getattr(self, cmdName)(*args)
else:
getattr(self, cmdName)()
def testuser(self):
self.commandInstance.replyWithMessage(
self.commandInstance.user
)
def testchannel(self):
self.commandInstance.replyWithMessage(
self.commandInstance.channel
)
def users(self):
"""Get a list of users in the users group."""
self.commandInstance.replyWithMessage(self._users())
def _users(self, output='string'):
for group in grp.getgrall():
if group.gr_name == 'users':
members = group.gr_mem
if output == 'list':
return members
else:
return ', '.join(members)
| Test if you can return the channel. | Test if you can return the channel.
| Python | bsd-3-clause | Tehnix/PyIRCb | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
User command...
"""
import grp
class User(object):
"""Something, something, something darkside...."""
def __init__(self, settingsInstance, commandInstance, cmdName, *args):
super(User, self).__init__()
self.settingsInstance = settingsInstance
self.commandInstance = commandInstance
if cmdName is not None:
if args[0] is not None:
getattr(self, cmdName)(*args)
else:
getattr(self, cmdName)()
def testuser(self):
self.commandInstance.replyWithMessage(
self.commandInstance.user
)
def testchannel(self):
self.commandInstance.replyWithMessage(
self.commandInstance.channel
)
def users(self):
"""Get a list of users in the users group."""
self.commandInstance.replyWithMessage(self._users())
def _users(self, output='string'):
for group in grp.getgrall():
if group.gr_name == 'users':
members = group.gr_mem
if output == 'list':
return members
else:
return ', '.join(members)
| Test if you can return the channel.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
User command...
"""
import grp
class User(object):
"""Something, something, something darkside...."""
def __init__(self, settingsInstance, commandInstance, cmdName, *args):
super(User, self).__init__()
self.settingsInstance = settingsInstance
self.commandInstance = commandInstance
if cmdName is not None:
if args[0] is not None:
getattr(self, cmdName)(*args)
else:
getattr(self, cmdName)()
def test(self):
self.commandInstance.replyWithMessage(
self.commandInstance.user
)
def users(self):
"""Get a list of users in the users group."""
self.commandInstance.replyWithMessage(self._users())
def _users(self, output='string'):
for group in grp.getgrall():
if group.gr_name == 'users':
members = group.gr_mem
if output == 'list':
return members
else:
return ', '.join(members)
|
daf6468079e7ff3e00550db0f3a16bc109184027 | osgtest/tests/test_49_jobs.py | osgtest/tests/test_49_jobs.py | #pylint: disable=C0301
#pylint: disable=C0111
#pylint: disable=R0201
#pylint: disable=R0904
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
| #pylint: disable=C0301
#pylint: disable=C0111
#pylint: disable=R0201
#pylint: disable=R0904
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
| Drop job env backup cleanup dependence on osg-configure | Drop job env backup cleanup dependence on osg-configure
We already dropped the creation of the job env files in 840ea8
| Python | apache-2.0 | efajardo/osg-test,efajardo/osg-test | #pylint: disable=C0301
#pylint: disable=C0111
#pylint: disable=R0201
#pylint: disable=R0904
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
| Drop job env backup cleanup dependence on osg-configure
We already dropped the creation of the job env files in 840ea8
#pylint: disable=C0301
#pylint: disable=C0111
#pylint: disable=R0201
#pylint: disable=R0904
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
|
ba063bd052284571ab6e51b0fcebe238c415071f | setup.py | setup.py | import os
import sys
import setuptools
from keystoneclient.openstack.common import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup(
name="python-keystoneclient",
version=setup.get_post_version('keystoneclient'),
description="Client library for OpenStack Keystone API",
long_description=read('README.rst'),
url='https://github.com/openstack/python-keystoneclient',
license='Apache',
author='Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss',
author_email='[email protected]',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=requires,
dependency_links=depend_links,
cmdclass=setup.get_cmdclass(),
tests_require=tests_require,
test_suite="nose.collector",
entry_points={
'console_scripts': ['keystone = keystoneclient.shell:main']
}
)
| import os
import sys
import setuptools
from keystoneclient.openstack.common import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup(
name="python-keystoneclient",
version=setup.get_post_version('keystoneclient'),
description="Client library for OpenStack Keystone API",
long_description=read('README.rst'),
url='https://github.com/openstack/python-keystoneclient',
license='Apache',
author='Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss',
author_email='[email protected]',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: OpenStack',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=requires,
dependency_links=depend_links,
cmdclass=setup.get_cmdclass(),
tests_require=tests_require,
test_suite="nose.collector",
entry_points={
'console_scripts': ['keystone = keystoneclient.shell:main']
}
)
| Add OpenStack trove classifier for PyPI | Add OpenStack trove classifier for PyPI
Add trove classifier to have the client listed among the
other OpenStack-related projets on PyPI.
Change-Id: I1ddae8d1272a2b1c5e4c666c9aa4e4a274431415
Signed-off-by: Doug Hellmann <[email protected]>
| Python | apache-2.0 | jamielennox/keystoneauth,sileht/keystoneauth,citrix-openstack-build/keystoneauth | import os
import sys
import setuptools
from keystoneclient.openstack.common import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup(
name="python-keystoneclient",
version=setup.get_post_version('keystoneclient'),
description="Client library for OpenStack Keystone API",
long_description=read('README.rst'),
url='https://github.com/openstack/python-keystoneclient',
license='Apache',
author='Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss',
author_email='[email protected]',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: OpenStack',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=requires,
dependency_links=depend_links,
cmdclass=setup.get_cmdclass(),
tests_require=tests_require,
test_suite="nose.collector",
entry_points={
'console_scripts': ['keystone = keystoneclient.shell:main']
}
)
| Add OpenStack trove classifier for PyPI
Add trove classifier to have the client listed among the
other OpenStack-related projets on PyPI.
Change-Id: I1ddae8d1272a2b1c5e4c666c9aa4e4a274431415
Signed-off-by: Doug Hellmann <[email protected]>
import os
import sys
import setuptools
from keystoneclient.openstack.common import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup(
name="python-keystoneclient",
version=setup.get_post_version('keystoneclient'),
description="Client library for OpenStack Keystone API",
long_description=read('README.rst'),
url='https://github.com/openstack/python-keystoneclient',
license='Apache',
author='Nebula Inc, based on work by Rackspace and Jacob Kaplan-Moss',
author_email='[email protected]',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires=requires,
dependency_links=depend_links,
cmdclass=setup.get_cmdclass(),
tests_require=tests_require,
test_suite="nose.collector",
entry_points={
'console_scripts': ['keystone = keystoneclient.shell:main']
}
)
|
c0633bc60dda6b81e623795f2c65a1eb0ba5933d | blinkytape/blinkyplayer.py | blinkytape/blinkyplayer.py | import time
class BlinkyPlayer(object):
FOREVER = -1
def __init__(self, blinkytape):
self._blinkytape = blinkytape
def play(self, animation, num_cycles = FOREVER):
finished = self._make_finished_predicate(animation, num_cycles)
animation.begin()
while not finished():
pixels = animation.next_frame()
self._blinkytape.update(pixels)
time.sleep(animation.frame_period_sec)
animation.end()
def _make_finished_predicate(self, animation, num_cycles):
if num_cycles < 0 and num_cycles != self.FOREVER: raise ValueError
if num_cycles == self.FOREVER:
predicate = lambda: False
else:
self._num_frames = animation.frame_count * num_cycles
def predicate():
finished = self._num_frames <= 0
self._num_frames = self._num_frames - 1
return finished
return predicate
| import time
class BlinkyPlayer(object):
FOREVER = -1
def __init__(self, blinkytape):
self._blinkytape = blinkytape
def play(self, animation, num_cycles = FOREVER):
finished = self._finished_predicate(animation, num_cycles)
animation.begin()
while not finished():
pixels = animation.next_frame()
self._blinkytape.update(pixels)
time.sleep(animation.frame_period_sec)
animation.end()
def _finished_predicate(self, animation, num_cycles):
if num_cycles < 0 and num_cycles != self.FOREVER: raise ValueError
if num_cycles == self.FOREVER:
predicate = self._forever_predicate()
else:
self._num_frames = animation.frame_count * num_cycles
predicate = self._frame_count_predicate()
return predicate
def _forever_predicate(self):
return lambda: False
def _frame_count_predicate(self):
def predicate():
finished = self._num_frames <= 0
self._num_frames = self._num_frames - 1
return finished
return predicate
| Clean up BlinkyPlayer a little | Clean up BlinkyPlayer a little
| Python | mit | jonspeicher/blinkyfun | import time
class BlinkyPlayer(object):
FOREVER = -1
def __init__(self, blinkytape):
self._blinkytape = blinkytape
def play(self, animation, num_cycles = FOREVER):
finished = self._finished_predicate(animation, num_cycles)
animation.begin()
while not finished():
pixels = animation.next_frame()
self._blinkytape.update(pixels)
time.sleep(animation.frame_period_sec)
animation.end()
def _finished_predicate(self, animation, num_cycles):
if num_cycles < 0 and num_cycles != self.FOREVER: raise ValueError
if num_cycles == self.FOREVER:
predicate = self._forever_predicate()
else:
self._num_frames = animation.frame_count * num_cycles
predicate = self._frame_count_predicate()
return predicate
def _forever_predicate(self):
return lambda: False
def _frame_count_predicate(self):
def predicate():
finished = self._num_frames <= 0
self._num_frames = self._num_frames - 1
return finished
return predicate
| Clean up BlinkyPlayer a little
import time
class BlinkyPlayer(object):
FOREVER = -1
def __init__(self, blinkytape):
self._blinkytape = blinkytape
def play(self, animation, num_cycles = FOREVER):
finished = self._make_finished_predicate(animation, num_cycles)
animation.begin()
while not finished():
pixels = animation.next_frame()
self._blinkytape.update(pixels)
time.sleep(animation.frame_period_sec)
animation.end()
def _make_finished_predicate(self, animation, num_cycles):
if num_cycles < 0 and num_cycles != self.FOREVER: raise ValueError
if num_cycles == self.FOREVER:
predicate = lambda: False
else:
self._num_frames = animation.frame_count * num_cycles
def predicate():
finished = self._num_frames <= 0
self._num_frames = self._num_frames - 1
return finished
return predicate
|
b32f4955665b8618a9623f6898a15d4da40dc58e | dxtbx/command_line/print_header.py | dxtbx/command_line/print_header.py | def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
| def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
print 'Using header reader: %s' % format.__name__
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
| Print the Format class used | Print the Format class used | Python | bsd-3-clause | dials/dials,dials/dials,dials/dials,dials/dials,dials/dials | def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
print 'Using header reader: %s' % format.__name__
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
| Print the Format class used
def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
|
cb9b1a2163f960e34721f74bad30622fda71e43b | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | packages/Python/lldbsuite/test/lang/objc/modules-cache/TestClangModulesCache.py | """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Darwin")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@skipUnlessDarwin
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Foo")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function. | Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@326640 91177308-0d34-0410-b5e6-96231b3b80d8
| Python | apache-2.0 | apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb | """Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@skipUnlessDarwin
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Foo")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
| Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function.
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@326640 91177308-0d34-0410-b5e6-96231b3b80d8
"""Test that the clang modules cache directory can be controlled."""
from __future__ import print_function
import unittest2
import os
import time
import platform
import shutil
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ObjCModulesTestCase(TestBase):
NO_DEBUG_INFO_TESTCASE = True
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
def test_expr(self):
self.build()
self.main_source_file = lldb.SBFileSpec("main.m")
self.runCmd("settings set target.auto-import-clang-modules true")
mod_cache = self.getBuildArtifact("my-clang-modules-cache")
if os.path.isdir(mod_cache):
shutil.rmtree(mod_cache)
self.assertFalse(os.path.isdir(mod_cache),
"module cache should not exist")
self.runCmd('settings set clang.modules-cache-path "%s"' % mod_cache)
self.runCmd('settings set target.clang-module-search-paths "%s"'
% self.getSourceDir())
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(
self, "Set breakpoint here", self.main_source_file)
self.runCmd("expr @import Darwin")
self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
|
60a10e8fbfd40197db8226f0791c7064c80fe370 | run.py | run.py | import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--run', action="store_true")
parser.add_argument('--deploy', action="store_true")
args = parser.parse_args()
if not any(vars(args).values()):
parser.print_help()
elif args.run:
os.system("ENVIRONMENT=development python server.py")
elif args.deploy:
os.system("git push heroku master")
| import sys
import os
import argparse
import shutil
from efselab import build
parser = argparse.ArgumentParser()
parser.add_argument('--run', action="store_true")
parser.add_argument('--deploy', action="store_true")
parser.add_argument('--update', action="store_true")
args = parser.parse_args()
if not any(vars(args).values()):
parser.print_help()
elif args.run:
os.system("ENVIRONMENT=development python server.py")
elif args.deploy:
os.system("git push heroku master")
elif args.update:
if not os.path.exists("../efselab/"):
sys.exit("Couldn't find a local efselab checkout...")
shutil.copy("../efselab/fasthash.c", "./efselab")
shutil.copy("../efselab/lemmatize.c", "./efselab")
shutil.copy("../efselab/pysuc.c", "./efselab/suc.c")
if not os.path.exists("../efselab/swe-pipeline"):
sys.exit("Couldn't find a local swe-pipeline directory for models...")
shutil.copy("../efselab/swe-pipeline/suc.bin", "./efselab")
shutil.copy("../efselab/swe-pipeline/suc-saldo.lemmas", "./efselab")
print("Building new files...")
os.chdir("efselab")
build.main()
| Add new update command that updates efselab dependencies. | Add new update command that updates efselab dependencies.
Former-commit-id: 6cfed1b9af9c0bbf34b7e58e3aa8ac3bada85aa7 | Python | mit | EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger,EmilStenstrom/json-tagger | import sys
import os
import argparse
import shutil
from efselab import build
parser = argparse.ArgumentParser()
parser.add_argument('--run', action="store_true")
parser.add_argument('--deploy', action="store_true")
parser.add_argument('--update', action="store_true")
args = parser.parse_args()
if not any(vars(args).values()):
parser.print_help()
elif args.run:
os.system("ENVIRONMENT=development python server.py")
elif args.deploy:
os.system("git push heroku master")
elif args.update:
if not os.path.exists("../efselab/"):
sys.exit("Couldn't find a local efselab checkout...")
shutil.copy("../efselab/fasthash.c", "./efselab")
shutil.copy("../efselab/lemmatize.c", "./efselab")
shutil.copy("../efselab/pysuc.c", "./efselab/suc.c")
if not os.path.exists("../efselab/swe-pipeline"):
sys.exit("Couldn't find a local swe-pipeline directory for models...")
shutil.copy("../efselab/swe-pipeline/suc.bin", "./efselab")
shutil.copy("../efselab/swe-pipeline/suc-saldo.lemmas", "./efselab")
print("Building new files...")
os.chdir("efselab")
build.main()
| Add new update command that updates efselab dependencies.
Former-commit-id: 6cfed1b9af9c0bbf34b7e58e3aa8ac3bada85aa7
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--run', action="store_true")
parser.add_argument('--deploy', action="store_true")
args = parser.parse_args()
if not any(vars(args).values()):
parser.print_help()
elif args.run:
os.system("ENVIRONMENT=development python server.py")
elif args.deploy:
os.system("git push heroku master")
|
0ba671698bf4e268ae3f17e11078a5eb669a174c | indico/modules/events/roles/__init__.py | indico/modules/events/roles/__init__.py | # This file is part of Indico.
# Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import session
from indico.core import signals
from indico.core.logger import Logger
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('events.roles')
@signals.menu.items.connect_via('event-management-sidemenu')
def _sidemenu_items(sender, event, **kwargs):
if event.can_manage(session.user):
return SideMenuItem('roles', _('Roles'), url_for('event_roles.manage', event), 65, icon='medal')
| # This file is part of Indico.
# Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import session
from indico.core import signals
from indico.core.logger import Logger
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('events.roles')
@signals.menu.items.connect_via('event-management-sidemenu')
def _sidemenu_items(sender, event, **kwargs):
if event.can_manage(session.user):
roles_section = 'organization' if event.type == 'conference' else 'advanced'
return SideMenuItem('roles', _('Roles'), url_for('event_roles.manage', event), section=roles_section)
| Move roles menu item into a submenu | Move roles menu item into a submenu
- 'organization' for conferences
- 'advanced' for other event types
| Python | mit | mic4ael/indico,indico/indico,mic4ael/indico,pferreir/indico,DirkHoffmann/indico,OmeGak/indico,pferreir/indico,OmeGak/indico,DirkHoffmann/indico,indico/indico,mvidalgarcia/indico,ThiefMaster/indico,indico/indico,OmeGak/indico,ThiefMaster/indico,DirkHoffmann/indico,DirkHoffmann/indico,mvidalgarcia/indico,ThiefMaster/indico,pferreir/indico,mvidalgarcia/indico,mvidalgarcia/indico,ThiefMaster/indico,pferreir/indico,mic4ael/indico,indico/indico,OmeGak/indico,mic4ael/indico | # This file is part of Indico.
# Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import session
from indico.core import signals
from indico.core.logger import Logger
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('events.roles')
@signals.menu.items.connect_via('event-management-sidemenu')
def _sidemenu_items(sender, event, **kwargs):
if event.can_manage(session.user):
roles_section = 'organization' if event.type == 'conference' else 'advanced'
return SideMenuItem('roles', _('Roles'), url_for('event_roles.manage', event), section=roles_section)
| Move roles menu item into a submenu
- 'organization' for conferences
- 'advanced' for other event types
# This file is part of Indico.
# Copyright (C) 2002 - 2018 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import session
from indico.core import signals
from indico.core.logger import Logger
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem
logger = Logger.get('events.roles')
@signals.menu.items.connect_via('event-management-sidemenu')
def _sidemenu_items(sender, event, **kwargs):
if event.can_manage(session.user):
return SideMenuItem('roles', _('Roles'), url_for('event_roles.manage', event), 65, icon='medal')
|
ea4746f6b809c0c3b2a6931bc863121c07ee2c9a | lib/plugins/method/__init__.py | lib/plugins/method/__init__.py | from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
| from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
| Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently | Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently
| Python | mit | factorial-io/fabalicious,factorial-io/fabalicious | from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
@staticmethod
def supports(methodName):
raise NotImplementedError
| Make supports method throw NotImplementedError so that methods failing to implement it does not fail silently
from yapsy.IPlugin import IPlugin
from lib.methods import BaseMethod
class IMethodPlugin(BaseMethod, IPlugin):
def __init__(self):
pass
def setNameAndFactory(self, name, factory):
self.methodName = name
self.factory = factory
|
f4026b34f97c4e42a2229d47e778fbe09b351eb1 | tools/tabulate_events.py | tools/tabulate_events.py | #!/usr/bin/env python
# note: must be invoked from the top-level sts directory
import time
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from sts.replay_event import *
from sts.dataplane_traces.trace import Trace
from sts.input_traces.log_parser import parse
from tools.pretty_print_input_trace import default_fields, field_formatters
class EventGrouping(object):
def __init__(self, name):
self.name = name
self.events = []
def append(self, event):
self.events.append(event)
def printToConsole(self):
# TODO(cs): bad feng-shui to have side-effects rather than returning a
# string. Should refactor field_formatters to not have side-effects
# either.
title_str = "====================== %s ======================" % self.name
print title_str
for event in self.events:
for field in default_fields:
field_formatters[field](event)
print "=" * len(title_str)
def main(args):
# N.B. it would be nice to include link discovery or host location discovery
# events here, but that's specific to the Controller's log output.
network_failure_events = EventGrouping("Topology Change Events")
controlplane_failure_events = EventGrouping("Control Plane Blockages")
controller_failure_events = EventGrouping("Controller Change Events")
host_events = EventGrouping("Host Migrations")
event2grouping = {
SwitchFailure : network_failure_events,
SwitchRecovery : network_failure_events,
LinkFailure : network_failure_events,
LinkRecovery : network_failure_events,
ControlChannelBlock : controlplane_failure_events,
ControlChannelUnblock : controlplane_failure_events,
ControllerFailure : controller_failure_events,
ControllerRecovery : controller_failure_events,
BlockControllerPair : controller_failure_events,
UnblockControllerPair : controller_failure_events,
HostMigration : host_events,
# TODO(cs): support TrafficInjection, DataplaneDrop? Might get too noisy.
}
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) in event2grouping:
event2grouping[type(event)].append(event)
for grouping in [network_failure_events, controlplane_failure_events,
controller_failure_events, host_events]:
grouping.printToConsole()
print
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
args = parser.parse_args()
main(args)
| Add simple tool for tabulating classes of event types for readability | Add simple tool for tabulating classes of event types for readability
| Python | apache-2.0 | jmiserez/sts,jmiserez/sts,ucb-sts/sts,ucb-sts/sts | #!/usr/bin/env python
# note: must be invoked from the top-level sts directory
import time
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
from sts.replay_event import *
from sts.dataplane_traces.trace import Trace
from sts.input_traces.log_parser import parse
from tools.pretty_print_input_trace import default_fields, field_formatters
class EventGrouping(object):
def __init__(self, name):
self.name = name
self.events = []
def append(self, event):
self.events.append(event)
def printToConsole(self):
# TODO(cs): bad feng-shui to have side-effects rather than returning a
# string. Should refactor field_formatters to not have side-effects
# either.
title_str = "====================== %s ======================" % self.name
print title_str
for event in self.events:
for field in default_fields:
field_formatters[field](event)
print "=" * len(title_str)
def main(args):
# N.B. it would be nice to include link discovery or host location discovery
# events here, but that's specific to the Controller's log output.
network_failure_events = EventGrouping("Topology Change Events")
controlplane_failure_events = EventGrouping("Control Plane Blockages")
controller_failure_events = EventGrouping("Controller Change Events")
host_events = EventGrouping("Host Migrations")
event2grouping = {
SwitchFailure : network_failure_events,
SwitchRecovery : network_failure_events,
LinkFailure : network_failure_events,
LinkRecovery : network_failure_events,
ControlChannelBlock : controlplane_failure_events,
ControlChannelUnblock : controlplane_failure_events,
ControllerFailure : controller_failure_events,
ControllerRecovery : controller_failure_events,
BlockControllerPair : controller_failure_events,
UnblockControllerPair : controller_failure_events,
HostMigration : host_events,
# TODO(cs): support TrafficInjection, DataplaneDrop? Might get too noisy.
}
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) in event2grouping:
event2grouping[type(event)].append(event)
for grouping in [network_failure_events, controlplane_failure_events,
controller_failure_events, host_events]:
grouping.printToConsole()
print
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
args = parser.parse_args()
main(args)
| Add simple tool for tabulating classes of event types for readability
|
|
143e76eaf220e5200150653627642dc2bc3a645e | examples/network_correlations.py | examples/network_correlations.py | """
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
| Add correlation matrix heatmap example | Add correlation matrix heatmap example
| Python | bsd-3-clause | oesteban/seaborn,gef756/seaborn,arokem/seaborn,anntzer/seaborn,drewokane/seaborn,ischwabacher/seaborn,phobson/seaborn,ebothmann/seaborn,cwu2011/seaborn,sinhrks/seaborn,q1ang/seaborn,aashish24/seaborn,ashhher3/seaborn,dimarkov/seaborn,lypzln/seaborn,mwaskom/seaborn,JWarmenhoven/seaborn,olgabot/seaborn,dotsdl/seaborn,lukauskas/seaborn,phobson/seaborn,clarkfitzg/seaborn,dhimmel/seaborn,mclevey/seaborn,petebachant/seaborn,nileracecrew/seaborn,aashish24/seaborn,lukauskas/seaborn,uhjish/seaborn,kyleam/seaborn,bsipocz/seaborn,wrobstory/seaborn,mwaskom/seaborn,muku42/seaborn,sauliusl/seaborn,Lx37/seaborn,jakevdp/seaborn,Guokr1991/seaborn,parantapa/seaborn,tim777z/seaborn,anntzer/seaborn,jat255/seaborn,arokem/seaborn,huongttlan/seaborn,mia1rab/seaborn | """
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
| Add correlation matrix heatmap example
|
|
c5b73be1bf0f0edd05c4743c2449bee568d01c76 | setup.py | setup.py | from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| Add long description from README | Add long description from README
| Python | mit | Turbasen/turbasen.py | from distutils.core import setup
from os import path
from turbasen import VERSION
name = 'turbasen'
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
long_description=long_description,
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
| Add long description from README
from distutils.core import setup
from turbasen import VERSION
name = 'turbasen'
setup(
name=name,
packages=[name],
version=VERSION,
description='Client for Nasjonal Turbase REST API',
author='Ali Kaafarani',
author_email='[email protected]',
url='https://github.com/Turbasen/turbasen.py',
download_url='https://github.com/Turbasen/turbasen.py/tarball/v%s' % (VERSION),
keywords=['turbasen', 'nasjonalturbase', 'turistforening', 'rest-api'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Norwegian',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
install_requires=['requests'],
)
|
fc2a3635a37cd1dcfc2ea8705e2cae37b083b6a2 | lintcode/Easy/181_Flip_Bits.py | lintcode/Easy/181_Flip_Bits.py | class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
| Add solution to lintcode problem 181 | Add solution to lintcode problem 181
| Python | mit | Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode | class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
| Add solution to lintcode problem 181
|
|
699a2d8d97d8c526f9fb269245d5fb593d47d3ca | rasa/nlu/tokenizers/__init__.py | rasa/nlu/tokenizers/__init__.py | class Tokenizer:
pass
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
| import functools
class Tokenizer:
pass
@functools.total_ordering
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
def __eq__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text == other.text
def __lt__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text < other.text
| Fix to make sanitize_examples() be able to sort tokens | Fix to make sanitize_examples() be able to sort tokens
| Python | apache-2.0 | RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu | import functools
class Tokenizer:
pass
@functools.total_ordering
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
def __eq__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text == other.text
def __lt__(self, other):
if not isinstance(other, Token):
return NotImplemented
return self.text < other.text
| Fix to make sanitize_examples() be able to sort tokens
class Tokenizer:
pass
class Token:
def __init__(self, text, offset, data=None):
self.offset = offset
self.text = text
self.end = offset + len(text)
self.data = data if data else {}
def set(self, prop, info):
self.data[prop] = info
def get(self, prop, default=None):
return self.data.get(prop, default)
|
58cd27f4daa921a63d0a80c31f5ff1bf73cb1992 | lintcode/Medium/040_Implement_Queue_by_Two_Stacks.py | lintcode/Medium/040_Implement_Queue_by_Two_Stacks.py | class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
| Add solution to lintcode question 40 | Add solution to lintcode question 40
| Python | mit | Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode | class MyQueue:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, element):
# write your code here
self.stack1.append(element)
def top(self):
# write your code here
# return the top element
return self.stack1[0]
def pop(self):
# write your code here
# pop and return the top element
res = None
while (self.stack1):
ele = self.stack1.pop(-1)
if (self.stack1):
self.stack2.append(ele)
else:
res = ele
while (self.stack2):
ele = self.stack2.pop(-1)
self.stack1.append(ele)
return res
| Add solution to lintcode question 40
|
|
7adab964e523ec6af96acbea0fa7f30efef78dc8 | examples/tracing/strlen_hist.py | examples/tracing/strlen_hist.py | #!/usr/bin/python
#
# strlen_hist.py Histogram of system-wide strlen return values
#
# A basic example of using uprobes along with a histogram to show
# distributions.
#
# Runs until ctrl-c is pressed.
#
# Copyright (c) PLUMgrid, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# Example output:
# $ sudo ./strlen_hist.py
# 22:12:52
# strlen return: : count distribution
# 0 -> 1 : 2106 |**************** |
# 2 -> 3 : 1172 |********* |
# 4 -> 7 : 3892 |****************************** |
# 8 -> 15 : 5096 |****************************************|
# 16 -> 31 : 2201 |***************** |
# 32 -> 63 : 547 |**** |
# 64 -> 127 : 106 | |
# 128 -> 255 : 13 | |
# 256 -> 511 : 27 | |
# 512 -> 1023 : 6 | |
# 1024 -> 2047 : 10 | |
# ^C$
#
from __future__ import print_function
import bcc
import time
text = """
#include <uapi/linux/ptrace.h>
BPF_HISTOGRAM(dist);
int count(struct pt_regs *ctx) {
dist.increment(bpf_log2l(ctx->ax));
return 0;
}
"""
b = bcc.BPF(text=text)
sym="strlen"
b.attach_uretprobe(name="c", sym=sym, fn_name="count")
dist = b["dist"]
try:
while True:
time.sleep(1)
print("%-8s\n" % time.strftime("%H:%M:%S"), end="")
dist.print_log2_hist(sym + " return:")
dist.clear()
except KeyboardInterrupt:
pass
| Add uprobe strlen histogram example | Add uprobe strlen histogram example
This example traces all calls to libc's strlen(). The program is attached as a
retprobe, therefore giving access to the resulting string length. The value is
kept in a log2 histogram that is printed to console once per second.
Example:
```
$ sudo ./strlen_hist.py
22:12:51
strlen return: : count distribution
0 -> 1 : 2041 |**************** |
2 -> 3 : 1120 |******** |
4 -> 7 : 3300 |************************** |
8 -> 15 : 4995 |****************************************|
16 -> 31 : 2130 |***************** |
32 -> 63 : 562 |**** |
^C
```
Signed-off-by: Brenden Blanco <[email protected]>
| Python | apache-2.0 | romain-intel/bcc,tuxology/bcc,brendangregg/bcc,mcaleavya/bcc,zaafar/bcc,romain-intel/bcc,tuxology/bcc,shodoco/bcc,shodoco/bcc,iovisor/bcc,iovisor/bcc,mkacik/bcc,mcaleavya/bcc,mkacik/bcc,iovisor/bcc,mcaleavya/bcc,mcaleavya/bcc,mkacik/bcc,mcaleavya/bcc,shodoco/bcc,brendangregg/bcc,brendangregg/bcc,zaafar/bcc,tuxology/bcc,zaafar/bcc,iovisor/bcc,tuxology/bcc,mkacik/bcc,tuxology/bcc,brendangregg/bcc,romain-intel/bcc,iovisor/bcc,shodoco/bcc,shodoco/bcc,zaafar/bcc,mkacik/bcc,brendangregg/bcc,romain-intel/bcc,zaafar/bcc,romain-intel/bcc | #!/usr/bin/python
#
# strlen_hist.py Histogram of system-wide strlen return values
#
# A basic example of using uprobes along with a histogram to show
# distributions.
#
# Runs until ctrl-c is pressed.
#
# Copyright (c) PLUMgrid, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# Example output:
# $ sudo ./strlen_hist.py
# 22:12:52
# strlen return: : count distribution
# 0 -> 1 : 2106 |**************** |
# 2 -> 3 : 1172 |********* |
# 4 -> 7 : 3892 |****************************** |
# 8 -> 15 : 5096 |****************************************|
# 16 -> 31 : 2201 |***************** |
# 32 -> 63 : 547 |**** |
# 64 -> 127 : 106 | |
# 128 -> 255 : 13 | |
# 256 -> 511 : 27 | |
# 512 -> 1023 : 6 | |
# 1024 -> 2047 : 10 | |
# ^C$
#
from __future__ import print_function
import bcc
import time
text = """
#include <uapi/linux/ptrace.h>
BPF_HISTOGRAM(dist);
int count(struct pt_regs *ctx) {
dist.increment(bpf_log2l(ctx->ax));
return 0;
}
"""
b = bcc.BPF(text=text)
sym="strlen"
b.attach_uretprobe(name="c", sym=sym, fn_name="count")
dist = b["dist"]
try:
while True:
time.sleep(1)
print("%-8s\n" % time.strftime("%H:%M:%S"), end="")
dist.print_log2_hist(sym + " return:")
dist.clear()
except KeyboardInterrupt:
pass
| Add uprobe strlen histogram example
This example traces all calls to libc's strlen(). The program is attached as a
retprobe, therefore giving access to the resulting string length. The value is
kept in a log2 histogram that is printed to console once per second.
Example:
```
$ sudo ./strlen_hist.py
22:12:51
strlen return: : count distribution
0 -> 1 : 2041 |**************** |
2 -> 3 : 1120 |******** |
4 -> 7 : 3300 |************************** |
8 -> 15 : 4995 |****************************************|
16 -> 31 : 2130 |***************** |
32 -> 63 : 562 |**** |
^C
```
Signed-off-by: Brenden Blanco <[email protected]>
|
|
3c0f8899521465fcb2d4685b6e6e6e3e61c0eabc | kitchen/dashboard/graphs.py | kitchen/dashboard/graphs.py | """Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
| """Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
| Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated | Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated
| Python | apache-2.0 | edelight/kitchen,edelight/kitchen,edelight/kitchen,edelight/kitchen | """Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
try:
client_roles = node[attr]['client_roles']
except (TypeError, KeyError):
continue
for client_node in nodes:
if set.intersection(
set(client_roles), set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
| Change to "ask for forgiveness", as the 'client_roles' condition could get too complicated
"""Facility to render node graphs using pydot"""
import os
import pydot
from kitchen.settings import STATIC_ROOT, REPO
def generate_node_map(nodes):
"""Generates a graphviz nodemap"""
graph = pydot.Dot(graph_type='digraph')
graph_nodes = {}
# Create nodes
for node in nodes:
label = node['name'] + "\n" + "\n".join(
[role for role in node['role'] \
if not role.startswith(REPO['EXCLUDE_ROLE_PREFIX'])])
node_el = pydot.Node(label,
shape="box",
style="filled",
fillcolor="lightyellow",
fontsize="8")
graph_nodes[node['name']] = node_el
graph.add_node(node_el)
# Create links
for node in nodes:
for attr in node.keys():
if isinstance(node[attr], dict) and 'client_roles' in node[attr]:
for client_node in nodes:
if set.intersection(set(node[attr]['client_roles']),
set(client_node['roles'])):
edge = pydot.Edge(graph_nodes[client_node['name']],
graph_nodes[node['name']],
fontsize="7")
edge.set_label(attr)
graph.add_edge(edge)
# Generate graph
graph.write_png(os.path.join(STATIC_ROOT, 'img', 'node_map.png'))
|
30d70f30b24454affaf56299a014e577089dc885 | tools/telemetry/catapult_base/__init__.py | tools/telemetry/catapult_base/__init__.py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
| Add catapult_base folder to tools/telemetry to make the refactor easier. | Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}
| Python | bsd-3-clause | axinging/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,hgl888/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Just-D/chromium-1,Chilledheart/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
| Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}
|
|
62494cd7125d498d8de058ab3ebe556cd9686f6e | calvin/runtime/north/plugins/coders/messages/msgpack_coder.py | calvin/runtime/north/plugins/coders/messages/msgpack_coder.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
| # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
| Use umsgpack package for msgpack coder | coder/msgpack: Use umsgpack package for msgpack coder
| Python | apache-2.0 | EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import umsgpack
from message_coder import MessageCoderBase
umsgpack.compatibility = True
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return umsgpack.packb(data)
def decode(self, data):
data = umsgpack.unpackb(data)
return data
| coder/msgpack: Use umsgpack package for msgpack coder
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import msgpack
from message_coder import MessageCoderBase
# set of functions to encode/decode data tokens to/from a json description
class MessageCoder(MessageCoderBase):
def encode(self, data):
return msgpack.packb(data)
def decode(self, data):
data = msgpack.unpackb(data)
return data
|
4514c5c5644796413c01f6132b3b6afece73ce01 | txircd/modules/cmode_s.py | txircd/modules/cmode_s.py | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | Make +s actually definitely clear the cdata dictionary | Make +s actually definitely clear the cdata dictionary
| Python | bsd-3-clause | ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd | from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"].clear()
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) | Make +s actually definitely clear the cdata dictionary
from txircd.modbase import Mode
class SecretMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "s" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
data["cdata"] = {}
# other +s stuff is hiding in other modules.
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_s = None
def spawn(self):
self.mode_s = SecretMode()
return {
"modes": {
"cns": self.mode_s
},
"actions": {
"commandextra": [self.mode_s.listOutput]
}
}
def cleanup(self):
self.ircd.removeMode("cns")
self.ircd.actions["commandextra"].remove(self.mode_s.listOutput) |
9406ed1d55151bb47760947c54c2bd29fcc1d3a3 | knowledge_repo/converters/md.py | knowledge_repo/converters/md.py | from ..constants import MD
from ..converter import KnowledgePostConverter
from knowledge_repo.utils.files import read_text
class MdConverter(KnowledgePostConverter):
_registry_keys = [MD]
def from_file(self, filename):
self.kp_write(read_text(filename))
| from ..constants import MD
from ..converter import KnowledgePostConverter
from knowledge_repo.utils.files import read_text
class MdConverter(KnowledgePostConverter):
_registry_keys = [MD]
def from_file(self, filename):
self.kp_write(read_text(filename))
| Fix a lint required empty lines issue | Fix a lint required empty lines issue
| Python | apache-2.0 | airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo | from ..constants import MD
from ..converter import KnowledgePostConverter
from knowledge_repo.utils.files import read_text
class MdConverter(KnowledgePostConverter):
_registry_keys = [MD]
def from_file(self, filename):
self.kp_write(read_text(filename))
| Fix a lint required empty lines issue
from ..constants import MD
from ..converter import KnowledgePostConverter
from knowledge_repo.utils.files import read_text
class MdConverter(KnowledgePostConverter):
_registry_keys = [MD]
def from_file(self, filename):
self.kp_write(read_text(filename))
|
27ee2752a71ee415154c40e1978edb9d5221a331 | IPython/lib/tests/test_deepreload.py | IPython/lib/tests/test_deepreload.py | """Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| # -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| Reformat test to a standard style. | Reformat test to a standard style.
| Python | bsd-3-clause | ipython/ipython,ipython/ipython | # -*- coding: utf-8 -*-
"""Test suite for the deepreload module."""
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
#-----------------------------------------------------------------------------
# Test functions begin
#-----------------------------------------------------------------------------
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
| Reformat test to a standard style.
"""Test suite for the deepreload module."""
from IPython.testing import decorators as dec
from IPython.lib.deepreload import reload as dreload
@dec.skipif_not_numpy
def test_deepreload_numpy():
import numpy
exclude = [
# Standard exclusions:
'sys', 'os.path', '__builtin__', '__main__',
# Test-related exclusions:
'unittest',
]
dreload(numpy, exclude=exclude)
|
cfaaf421bb9627f1741a9ef4074517fd5daaec86 | wsgi/setup.py | wsgi/setup.py |
import subprocess
import sys
import setup_util
import os
def start(args):
subprocess.Popen("gunicorn hello:app -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="wsgi")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0 |
import subprocess
import sys
import setup_util
import os
def start(args):
subprocess.Popen('gunicorn hello:app --worker-class="egg:meinheld#gunicorn_worker" -b 0.0.0.0:8080 -w '
+ str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="wsgi")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
| Use meinheld worker (same as other Python Frameworks) | wsgi: Use meinheld worker (same as other Python Frameworks)
| Python | bsd-3-clause | torhve/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,denkab/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,grob/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,actframework/FrameworkBenchmarks,denkab/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,testn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jamming/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,dmacd/FB-try1,thousandsofthem/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,herloct/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,methane/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zapov/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Verber/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sgml/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,herloct/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,testn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,grob/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,doom369/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zloster/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,torhve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,sgml/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,herloct/FrameworkBenchmarks,methane/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,leafo/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,dmacd/FB-try1,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,testn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,leafo/FrameworkBenchmarks,dmacd/FB-try1,xitrum-framework/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sgml/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,denkab/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zloster/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,testn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,doom369/FrameworkBenchmarks,dmacd/FB-try1,martin-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,khellang/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,grob/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zapov/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,herloct/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jamming/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,methane/FrameworkBenchmarks,zloster/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zapov/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sxend/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,leafo/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,actframework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,torhve/FrameworkBenchmarks,herloct/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,valyala/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,dmacd/FB-try1,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,leafo/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,methane/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,khellang/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zapov/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,leafo/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,torhve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sgml/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sxend/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,testn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sxend/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,torhve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sxend/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zapov/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,valyala/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Verber/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,grob/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,leafo/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zloster/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,actframework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,dmacd/FB-try1,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,leafo/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,methane/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,dmacd/FB-try1,stefanocasazza/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,grob/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sxend/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,valyala/FrameworkBenchmarks,torhve/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,methane/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sgml/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,methane/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zapov/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zloster/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jamming/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,doom369/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,dmacd/FB-try1,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,herloct/FrameworkBenchmarks,testn/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,grob/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,torhve/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,dmacd/FB-try1,MTDdk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,grob/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,denkab/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,leafo/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Verber/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jamming/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,grob/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sxend/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,joshk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,dmacd/FB-try1,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,khellang/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,methane/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,testn/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,testn/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Verber/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,dmacd/FB-try1,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Verber/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,herloct/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,denkab/FrameworkBenchmarks,actframework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,leafo/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,testn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,actframework/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,valyala/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,actframework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,joshk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,valyala/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,doom369/FrameworkBenchmarks,dmacd/FB-try1,valyala/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,grob/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,testn/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,testn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,methane/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,actframework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,khellang/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,joshk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,torhve/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Verber/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,actframework/FrameworkBenchmarks |
import subprocess
import sys
import setup_util
import os
def start(args):
subprocess.Popen('gunicorn hello:app --worker-class="egg:meinheld#gunicorn_worker" -b 0.0.0.0:8080 -w '
+ str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="wsgi")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0
| wsgi: Use meinheld worker (same as other Python Frameworks)
import subprocess
import sys
import setup_util
import os
def start(args):
subprocess.Popen("gunicorn hello:app -b 0.0.0.0:8080 -w " + str((args.max_threads * 2)) + " --log-level=critical", shell=True, cwd="wsgi")
return 0
def stop():
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'gunicorn' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 9)
except OSError:
pass
return 0 |
6f75300037254f51f1512a271bf7850a4bc0a8f8 | djangospam/cookie/urls.py | djangospam/cookie/urls.py | # -*- coding: utf-8 -*-
"""URL for setting SPAM value to the `djangospam.cookie` cookie.
You must also add `(r"^somewhere/", include("djangospam.cookie.urls")`
to your url patterns (usually in your root urls.conf; `somewhere`
may be any path, except the one used for true posts).
"""
from django.conf.urls.defaults import patterns
urlpatterns = patterns('',
(r'^post$', 'djangospam.cookie.views.spammer_view'),) | # -*- coding: utf-8 -*-
"""URL for setting SPAM value to the `djangospam.cookie` cookie.
You must also add `(r"^somewhere/", include("djangospam.cookie.urls")`
to your url patterns (usually in your root urls.conf; `somewhere`
may be any path, except the one used for true posts).
"""
try:
from django.conf.urls import patterns
except ImportError:
from django.conf.urls.defaults import patterns
urlpatterns = patterns('',
(r'^post$', 'djangospam.cookie.views.spammer_view'),) | Add support for Django 1.4 and up | Add support for Django 1.4 and up
* Module django.conf.urls.defaults has been moved to django.conf.urls in
version 1.4.
Commit references issue #3.
| Python | bsd-2-clause | leandroarndt/djangospam,leandroarndt/djangospam | # -*- coding: utf-8 -*-
"""URL for setting SPAM value to the `djangospam.cookie` cookie.
You must also add `(r"^somewhere/", include("djangospam.cookie.urls")`
to your url patterns (usually in your root urls.conf; `somewhere`
may be any path, except the one used for true posts).
"""
try:
from django.conf.urls import patterns
except ImportError:
from django.conf.urls.defaults import patterns
urlpatterns = patterns('',
(r'^post$', 'djangospam.cookie.views.spammer_view'),) | Add support for Django 1.4 and up
* Module django.conf.urls.defaults has been moved to django.conf.urls in
version 1.4.
Commit references issue #3.
# -*- coding: utf-8 -*-
"""URL for setting SPAM value to the `djangospam.cookie` cookie.
You must also add `(r"^somewhere/", include("djangospam.cookie.urls")`
to your url patterns (usually in your root urls.conf; `somewhere`
may be any path, except the one used for true posts).
"""
from django.conf.urls.defaults import patterns
urlpatterns = patterns('',
(r'^post$', 'djangospam.cookie.views.spammer_view'),) |
f2fd224b5e3c8cb4a919e082c47c603d4469a564 | jacquard/buckets/tests/test_bucket.py | jacquard/buckets/tests/test_bucket.py | import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
| import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
# Force bucket to a string in order to reify the fields. This validates
# that the fields are accessible.
str(bucket)
| Use an explicit test here | Use an explicit test here
| Python | mit | prophile/jacquard,prophile/jacquard | import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
# Force bucket to a string in order to reify the fields. This validates
# that the fields are accessible.
str(bucket)
| Use an explicit test here
import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
|
0f0e0e91db679f18ad9dc7568047b76e447ac589 | stock_inventory_chatter/__openerp__.py | stock_inventory_chatter/__openerp__.py | # -*- coding: utf-8 -*-
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# (http://www.eficent.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
'name': 'Stock Inventory Chatter',
'version': '9.0.1.0.0',
'author': "Eficent, "
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/stock-logistics-warehouse",
'category': 'Warehouse',
'summary': "Log changes being done in Inventory Adjustments",
'depends': ['stock'],
"data": [
'data/stock_data.xml',
'views/stock_inventory_view.xml',
],
'license': 'AGPL-3',
'installable': True,
'application': False,
}
| # -*- coding: utf-8 -*-
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# Copyright 2018 initOS GmbH
# (http://www.eficent.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
'name': 'Stock Inventory Chatter',
'version': '8.0.1.0.0',
'author': "Eficent, "
"initOS GmbH, "
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/stock-logistics-warehouse",
'category': 'Warehouse',
'summary': "Log changes being done in Inventory Adjustments",
'depends': ['stock'],
"data": [
'data/stock_data.xml',
'views/stock_inventory_view.xml',
],
'license': 'AGPL-3',
'installable': True,
'application': False,
}
| Change of the module version | Change of the module version
| Python | agpl-3.0 | kmee/stock-logistics-warehouse,acsone/stock-logistics-warehouse,open-synergy/stock-logistics-warehouse | # -*- coding: utf-8 -*-
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# Copyright 2018 initOS GmbH
# (http://www.eficent.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
'name': 'Stock Inventory Chatter',
'version': '8.0.1.0.0',
'author': "Eficent, "
"initOS GmbH, "
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/stock-logistics-warehouse",
'category': 'Warehouse',
'summary': "Log changes being done in Inventory Adjustments",
'depends': ['stock'],
"data": [
'data/stock_data.xml',
'views/stock_inventory_view.xml',
],
'license': 'AGPL-3',
'installable': True,
'application': False,
}
| Change of the module version
# -*- coding: utf-8 -*-
# Copyright 2017 Eficent Business and IT Consulting Services S.L.
# (http://www.eficent.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
'name': 'Stock Inventory Chatter',
'version': '9.0.1.0.0',
'author': "Eficent, "
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/stock-logistics-warehouse",
'category': 'Warehouse',
'summary': "Log changes being done in Inventory Adjustments",
'depends': ['stock'],
"data": [
'data/stock_data.xml',
'views/stock_inventory_view.xml',
],
'license': 'AGPL-3',
'installable': True,
'application': False,
}
|
9dee48fb0964b12780f57cef26c5b84072448232 | ds/api/serializer/app.py | ds/api/serializer/app.py | from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
| from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
| Add provider information to App | Add provider information to App
| Python | apache-2.0 | jkimbo/freight,rshk/freight,jkimbo/freight,getsentry/freight,jkimbo/freight,rshk/freight,klynton/freight,rshk/freight,getsentry/freight,klynton/freight,getsentry/freight,rshk/freight,klynton/freight,getsentry/freight,getsentry/freight,jkimbo/freight,klynton/freight | from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
'provider': item.provider,
'provider_config': item.provider_config,
}
| Add provider information to App
from __future__ import absolute_import
from ds.models import App
from .base import Serializer
from .manager import add
@add(App)
class AppSerializer(Serializer):
def serialize(self, item, attrs):
return {
'id': str(item.id),
'name': item.name,
}
|
56cd2b9804718caeb8728c3b01fb6f0bc0f2d0d4 | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.8',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 7.0.9. | Update the PyPI version to 7.0.9.
| Python | mit | Doist/todoist-python | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 7.0.9.
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.8',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='[email protected]',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
531d11ea10064fdbbad85b482bcdf075529c977d | tests/test_utils.py | tests/test_utils.py | import unittest
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), "..\\"))
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2 | Add test case for get_or_create util | Add test case for get_or_create util
| Python | mit | Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary | import unittest
import os, sys
sys.path.append(os.path.join(os.path.dirname(__file__), "..\\"))
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2 | Add test case for get_or_create util
|
|
d8295756e73cb096acd5e3ef7e2b076b8b871c31 | apps/domain/src/main/routes/general/routes.py | apps/domain/src/main/routes/general/routes.py | from .blueprint import root_blueprint as root_route
from ...core.node import node
# syft absolute
from syft.core.common.message import SignedImmediateSyftMessageWithReply
from syft.core.common.message import SignedImmediateSyftMessageWithoutReply
from syft.core.common.serde.deserialize import _deserialize
from flask import request, Response
import json
@root_route.route("/pysyft", methods=["POST"])
def root_route():
json_msg = request.get_json()
obj_msg = _deserialize(blob=json_msg, from_json=True)
if isinstance(obj_msg, SignedImmediateSyftMessageWithReply):
reply = node.recv_immediate_msg_with_reply(msg=obj_msg)
return reply.json()
elif isinstance(obj_msg, SignedImmediateSyftMessageWithoutReply):
node.recv_immediate_msg_without_reply(msg=obj_msg)
else:
node.recv_eventual_msg_without_reply(msg=obj_msg)
return ""
| from .blueprint import root_blueprint as root_route
from ...core.node import node
# syft absolute
from syft.core.common.message import SignedImmediateSyftMessageWithReply
from syft.core.common.message import SignedImmediateSyftMessageWithoutReply
from syft.core.common.serde.deserialize import _deserialize
from flask import request, Response
import json
@root_route.route("/pysyft", methods=["POST"])
def root_route():
data = request.get_data()
obj_msg = _deserialize(blob=data, from_bytes=True)
if isinstance(obj_msg, SignedImmediateSyftMessageWithReply):
reply = node.recv_immediate_msg_with_reply(msg=obj_msg)
r = Response(response=reply.serialize(to_bytes=True), status=200)
r.headers["Content-Type"] = "application/octet-stream"
return r
elif isinstance(obj_msg, SignedImmediateSyftMessageWithoutReply):
node.recv_immediate_msg_without_reply(msg=obj_msg)
else:
node.recv_eventual_msg_without_reply(msg=obj_msg)
return ""
| Update /users/login endpoint to return serialized metadata | Update /users/login endpoint to return serialized metadata
| Python | apache-2.0 | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft | from .blueprint import root_blueprint as root_route
from ...core.node import node
# syft absolute
from syft.core.common.message import SignedImmediateSyftMessageWithReply
from syft.core.common.message import SignedImmediateSyftMessageWithoutReply
from syft.core.common.serde.deserialize import _deserialize
from flask import request, Response
import json
@root_route.route("/pysyft", methods=["POST"])
def root_route():
data = request.get_data()
obj_msg = _deserialize(blob=data, from_bytes=True)
if isinstance(obj_msg, SignedImmediateSyftMessageWithReply):
reply = node.recv_immediate_msg_with_reply(msg=obj_msg)
r = Response(response=reply.serialize(to_bytes=True), status=200)
r.headers["Content-Type"] = "application/octet-stream"
return r
elif isinstance(obj_msg, SignedImmediateSyftMessageWithoutReply):
node.recv_immediate_msg_without_reply(msg=obj_msg)
else:
node.recv_eventual_msg_without_reply(msg=obj_msg)
return ""
| Update /users/login endpoint to return serialized metadata
from .blueprint import root_blueprint as root_route
from ...core.node import node
# syft absolute
from syft.core.common.message import SignedImmediateSyftMessageWithReply
from syft.core.common.message import SignedImmediateSyftMessageWithoutReply
from syft.core.common.serde.deserialize import _deserialize
from flask import request, Response
import json
@root_route.route("/pysyft", methods=["POST"])
def root_route():
json_msg = request.get_json()
obj_msg = _deserialize(blob=json_msg, from_json=True)
if isinstance(obj_msg, SignedImmediateSyftMessageWithReply):
reply = node.recv_immediate_msg_with_reply(msg=obj_msg)
return reply.json()
elif isinstance(obj_msg, SignedImmediateSyftMessageWithoutReply):
node.recv_immediate_msg_without_reply(msg=obj_msg)
else:
node.recv_eventual_msg_without_reply(msg=obj_msg)
return ""
|
f76a766f7be4936d34dc14e65a0f1fd974055b20 | fireplace/cards/tgt/paladin.py | fireplace/cards/tgt/paladin.py | from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
| from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
| Fix Eadric the Pure's target selection | Fix Eadric the Pure's target selection
| Python | agpl-3.0 | liujimj/fireplace,beheh/fireplace,NightKev/fireplace,smallnamespace/fireplace,jleclanche/fireplace,amw2104/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,Ragowit/fireplace,amw2104/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,liujimj/fireplace,Meerkov/fireplace,smallnamespace/fireplace | from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
| Fix Eadric the Pure's target selection
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
74ceceb6ccdb3b205a72aa6ca75b833c66eb659c | HearthStone2/copy_data.py | HearthStone2/copy_data.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
| Add a script to copy data files conveniently. | Add a script to copy data files conveniently.
| Python | mit | fyabc/MiniGames | #! /usr/bin/python
# -*- coding: utf-8 -*-
"""Copy data from the given zip file to the project."""
import argparse
import fnmatch
import os
import time
import zipfile
__author__ = 'fyabc'
DataDir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'MyHearthStone')
DataFilePattern = '*/resources/*'
def main(args=None):
parser = argparse.ArgumentParser(description='Copy data from data zip file into this project.')
parser.add_argument('file', help='path to data zip file')
args = parser.parse_args(args)
time_start = time.time()
n_files = 0
with zipfile.ZipFile(args.file) as zip_file:
rc_names = fnmatch.filter((name for name in zip_file.namelist() if not name.endswith('/')), DataFilePattern)
for name in rc_names:
print('Copying file {} to {} ... '.format(name, os.path.join(DataDir, name)), end='')
zip_file.extract(name, path=DataDir)
n_files += 1
print('done')
print('Copy {} files in {:.6f}s.'.format(n_files, time.time() - time_start))
if __name__ == '__main__':
main()
| Add a script to copy data files conveniently.
|
|
29e6e77b03569d39e484b47efd3b8230f30ee195 | eduid_signup/db.py | eduid_signup/db.py | import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| Allow Mongo connections to Mongo Replicaset Cluster | Allow Mongo connections to Mongo Replicaset Cluster
| Python | bsd-3-clause | SUNET/eduid-signup,SUNET/eduid-signup,SUNET/eduid-signup | import pymongo
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.MongoClient):
self.db_uri = db_uri
self.connection = connection_factory(
host=self.db_uri,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
| Allow Mongo connections to Mongo Replicaset Cluster
import pymongo
from eduid_signup.compat import urlparse
DEFAULT_MONGODB_HOST = 'localhost'
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_NAME = 'eduid'
DEFAULT_MONGODB_URI = 'mongodb://%s:%d/%s' % (DEFAULT_MONGODB_HOST,
DEFAULT_MONGODB_PORT,
DEFAULT_MONGODB_NAME)
class MongoDB(object):
"""Simple wrapper to get pymongo real objects from the settings uri"""
def __init__(self, db_uri=DEFAULT_MONGODB_URI,
connection_factory=pymongo.Connection):
self.db_uri = urlparse.urlparse(db_uri)
self.connection = connection_factory(
host=self.db_uri.hostname or DEFAULT_MONGODB_HOST,
port=self.db_uri.port or DEFAULT_MONGODB_PORT,
tz_aware=True)
if self.db_uri.path:
self.database_name = self.db_uri.path[1:]
else:
self.database_name = DEFAULT_MONGODB_NAME
def get_connection(self):
return self.connection
def get_database(self):
database = self.connection[self.database_name]
if self.db_uri.username and self.db_uri.password:
database.authenticate(self.db_uri.username, self.db_uri.password)
return database
def get_db(request):
return request.registry.settings['mongodb'].get_database()
|
ae463e9f27bd1266125d0d3d94dd88171df997d2 | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='[email protected]',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='[email protected]',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
| Mark version 1.1.1 with license. | Mark version 1.1.1 with license.
| Python | mit | ironfroggy/straight.plugin,pombredanne/straight.plugin | #!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.1.1',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='[email protected]',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
| Mark version 1.1.1 with license.
#!/usr/bin/env python
from distutils.core import setup
setup(name='straight.plugin',
version='1.0',
description='A simple namespaced plugin facility',
author='Calvin Spealman',
author_email='[email protected]',
url='https://github.com/ironfroggy/straight.plugin',
packages=['straight', 'straight.plugin'],
)
|
2e9472e4989985ebdb770c193856a02616a3d8e4 | plugoo/assets.py | plugoo/assets.py | class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return line.replace('\n','')
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
| class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
| Add a method for line by line asset parsing | Add a method for line by line asset parsing
| Python | bsd-2-clause | 0xPoly/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,hackerberry/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe | class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
| Add a method for line by line asset parsing
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return line.replace('\n','')
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
29519614965e6629debcd2d08fd1fe2e0debe08f | test/test_paramval.py | test/test_paramval.py | import logging
import luigi
import sciluigi as sl
import os
import time
import unittest
log = logging.getLogger('sciluigi-interface')
log.setLevel(logging.WARNING)
class IntParamTask(sl.Task):
an_int_param = luigi.IntParameter()
def out_int_val(self):
return sl.TargetInfo(self, '/tmp/intparamtask_intval_%d.txt' % self.an_int_param)
def run(self):
with self.out_int_val().open('w') as outfile:
outfile.write('%d' % self.an_int_param)
class NonStringParamWF(sl.WorkflowTask):
def workflow(self):
intparam_task = self.new_task('intparam_task', IntParamTask,
an_int_param = 123)
return intparam_task
class TestNonStringParameterValues(unittest.TestCase):
def setUp(self):
self.w = luigi.worker.Worker()
self.nsp_wf = NonStringParamWF(instance_name='nonstring_param_wf')
self.w.add(self.nsp_wf)
def test_intparam_gets_set(self):
self.assertEquals(self.nsp_wf._tasks['intparam_task'].an_int_param, 123)
def test_intparam_gets_set(self):
self.w.run()
with self.nsp_wf.workflow().out_int_val().open() as infile:
val = infile.read()
self.assertEquals(val, '123')
def tearDown(self):
pass
| Add test for non-string (integer) parameter value | Add test for non-string (integer) parameter value
| Python | mit | pharmbio/sciluigi,pharmbio/sciluigi,samuell/sciluigi | import logging
import luigi
import sciluigi as sl
import os
import time
import unittest
log = logging.getLogger('sciluigi-interface')
log.setLevel(logging.WARNING)
class IntParamTask(sl.Task):
an_int_param = luigi.IntParameter()
def out_int_val(self):
return sl.TargetInfo(self, '/tmp/intparamtask_intval_%d.txt' % self.an_int_param)
def run(self):
with self.out_int_val().open('w') as outfile:
outfile.write('%d' % self.an_int_param)
class NonStringParamWF(sl.WorkflowTask):
def workflow(self):
intparam_task = self.new_task('intparam_task', IntParamTask,
an_int_param = 123)
return intparam_task
class TestNonStringParameterValues(unittest.TestCase):
def setUp(self):
self.w = luigi.worker.Worker()
self.nsp_wf = NonStringParamWF(instance_name='nonstring_param_wf')
self.w.add(self.nsp_wf)
def test_intparam_gets_set(self):
self.assertEquals(self.nsp_wf._tasks['intparam_task'].an_int_param, 123)
def test_intparam_gets_set(self):
self.w.run()
with self.nsp_wf.workflow().out_int_val().open() as infile:
val = infile.read()
self.assertEquals(val, '123')
def tearDown(self):
pass
| Add test for non-string (integer) parameter value
|
|
e40c295967e8d0b1a190c173dedebefe9eb89462 | Python/66_PlusOne.py | Python/66_PlusOne.py | class Solution(object):
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
digits[len(digits)-1] += 1
if digits[len(digits)-1] < 10:
return digits
for i in xrange(len(digits)-1,0,-1):
if digits[i] == 10:
digits[i] = 0
digits[i-1] += 1
else:
break
if digits[0] == 10:
digits[0] = 0
digits = [1] + digits
return digits
digits = [0,9,7,9]
print Solution().plusOne(digits)
| Add solution for 66 Plus One. | Add solution for 66 Plus One.
| Python | mit | comicxmz001/LeetCode,comicxmz001/LeetCode | class Solution(object):
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
digits[len(digits)-1] += 1
if digits[len(digits)-1] < 10:
return digits
for i in xrange(len(digits)-1,0,-1):
if digits[i] == 10:
digits[i] = 0
digits[i-1] += 1
else:
break
if digits[0] == 10:
digits[0] = 0
digits = [1] + digits
return digits
digits = [0,9,7,9]
print Solution().plusOne(digits)
| Add solution for 66 Plus One.
|
|
81215120afffe54b17be3f38bbc2ac292452c0c4 | addons/mail/models/ir_attachment.py | addons/mail/models/ir_attachment.py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
@api.multi
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
@api.multi
def unlink(self):
self.remove_as_main_attachment()
super(IrAttachment, self).unlink()
@api.multi
def remove_as_main_attachment(self):
for attachment in self:
related_record = self.env[attachment.res_model].browse(attachment.res_id)
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if related_record.message_main_attachment_id == attachment:
related_record.message_main_attachment_id = False
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
related_record = self.env[self.res_model].browse(self.res_id)
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
related_record.message_main_attachment_id = self
| # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
@api.multi
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
related_record = self.env[self.res_model].browse(self.res_id)
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
related_record.message_main_attachment_id = self
| Revert "[FIX] mail: remove attachment as main at unlink" | Revert "[FIX] mail: remove attachment as main at unlink"
This reverts commit abc45b1
Since by default the ondelete attribute of a many2one is `set null`,
this was completely unnecessary to begin with.
Bug caused by this commit:
Unlink a record that has some attachments.
The unlink first removes the record, then its related attachments.
It calls remove_as_main_attachment, which reads the attachment res_model and
res_id. This triggers a check that the related record can be read.
However the related record has already been removed, an exception is raised.
It is thus impossible to unlink a record.
Closes #32563
closes odoo/odoo#32572
Signed-off-by: Raphael Collet (rco) <[email protected]> | Python | agpl-3.0 | ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
@api.multi
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
related_record = self.env[self.res_model].browse(self.res_id)
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
related_record.message_main_attachment_id = self
| Revert "[FIX] mail: remove attachment as main at unlink"
This reverts commit abc45b1
Since by default the ondelete attribute of a many2one is `set null`,
this was completely unnecessary to begin with.
Bug caused by this commit:
Unlink a record that has some attachments.
The unlink first removes the record, then its related attachments.
It calls remove_as_main_attachment, which reads the attachment res_model and
res_id. This triggers a check that the related record can be read.
However the related record has already been removed, an exception is raised.
It is thus impossible to unlink a record.
Closes #32563
closes odoo/odoo#32572
Signed-off-by: Raphael Collet (rco) <[email protected]>
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class IrAttachment(models.Model):
_inherit = 'ir.attachment'
@api.multi
def _post_add_create(self):
""" Overrides behaviour when the attachment is created through the controller
"""
super(IrAttachment, self)._post_add_create()
for record in self:
record.register_as_main_attachment(force=False)
@api.multi
def unlink(self):
self.remove_as_main_attachment()
super(IrAttachment, self).unlink()
@api.multi
def remove_as_main_attachment(self):
for attachment in self:
related_record = self.env[attachment.res_model].browse(attachment.res_id)
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if related_record.message_main_attachment_id == attachment:
related_record.message_main_attachment_id = False
def register_as_main_attachment(self, force=True):
""" Registers this attachment as the main one of the model it is
attached to.
"""
self.ensure_one()
related_record = self.env[self.res_model].browse(self.res_id)
# message_main_attachment_id field can be empty, that's why we compare to False;
# we are just checking that it exists on the model before writing it
if related_record and hasattr(related_record, 'message_main_attachment_id'):
if force or not related_record.message_main_attachment_id:
related_record.message_main_attachment_id = self
|
a581253c6daee875855ac1677717eb1cf464e704 | froide/publicbody/migrations/0021_proposedpublicbody.py | froide/publicbody/migrations/0021_proposedpublicbody.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
| Add proposed publicbody proxy model migration | Add proposed publicbody proxy model migration | Python | mit | fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-07-19 10:35
from __future__ import unicode_literals
from django.db import migrations
import froide.publicbody.models
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0020_foilaw_requires_signature'),
]
operations = [
migrations.CreateModel(
name='ProposedPublicBody',
fields=[
],
options={
'verbose_name': 'Proposed Public Body',
'verbose_name_plural': 'Proposed Public Bodies',
'proxy': True,
'indexes': [],
},
bases=('publicbody.publicbody',),
managers=[
('objects', froide.publicbody.models.ProposedPublicBodyManager()),
],
),
]
| Add proposed publicbody proxy model migration
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.