commit
stringlengths 40
40
| old_file
stringlengths 5
117
| new_file
stringlengths 5
117
| old_contents
stringlengths 0
1.93k
| new_contents
stringlengths 19
3.3k
| subject
stringlengths 17
320
| message
stringlengths 18
3.28k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
42.4k
| completion
stringlengths 19
3.3k
| prompt
stringlengths 21
3.65k
|
---|---|---|---|---|---|---|---|---|---|---|---|
faf35a814d045ce3d71921ed0d4ac268d5a9811c | app/notify_client/provider_client.py | app/notify_client/provider_client.py |
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
| Add provider client method to get provider version history | Add provider client method to get provider version history
| Python | mit | gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin |
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def get_provider_versions(self, provider_id):
return self.get(
url='/provider-details/{}/versions'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
| Add provider client method to get provider version history
from app.notify_client import _attach_current_user, NotifyAdminAPIClient
class ProviderClient(NotifyAdminAPIClient):
def __init__(self):
super().__init__("a", "b", "c")
def init_app(self, app):
self.base_url = app.config['API_HOST_NAME']
self.service_id = app.config['ADMIN_CLIENT_USER_NAME']
self.api_key = app.config['ADMIN_CLIENT_SECRET']
def get_all_providers(self):
return self.get(
url='/provider-details'
)
def get_provider_by_id(self, provider_id):
return self.get(
url='/provider-details/{}'.format(provider_id)
)
def update_provider(self, provider_id, priority):
data = {
"priority": priority
}
data = _attach_current_user(data)
return self.post(url='/provider-details/{}'.format(provider_id), data=data)
|
b6e9215457eba813f91c9eb4a8b96f8652bcd5fc | functional_tests/pages/settings.py | functional_tests/pages/settings.py | # -*- coding: utf-8 -*-
from selenium.webdriver.support.ui import Select
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
return_link = PageElement(css='.mui--text-title a.appbar-correct')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm')
content = PageElement(id_='content')
sidebar = PageElement(id_='sidebar')
sidebar_return_link = PageElement(css='#sidebar a#return')
_settings_list = MultiPageElement(tag_name='label')
@property
def settings_list(self):
return [setting.text for setting in self._settings_list]
_language_elem = PageElement(name='language')
@property
def language(self):
return Select(self._language_elem)
| # -*- coding: utf-8 -*-
from selenium.webdriver.support.ui import Select
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
return_link = PageElement(css='#sidebar-brand a')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm')
content = PageElement(id_='content')
sidebar = PageElement(id_='sidebar')
sidebar_return_link = PageElement(css='#sidebar a#return')
_settings_list = MultiPageElement(tag_name='label')
@property
def settings_list(self):
return [setting.text for setting in self._settings_list]
_language_elem = PageElement(name='language')
@property
def language(self):
return Select(self._language_elem)
| Make the return link work again | Make the return link work again
| Python | mit | XeryusTC/projman,XeryusTC/projman,XeryusTC/projman | # -*- coding: utf-8 -*-
from selenium.webdriver.support.ui import Select
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
return_link = PageElement(css='#sidebar-brand a')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm')
content = PageElement(id_='content')
sidebar = PageElement(id_='sidebar')
sidebar_return_link = PageElement(css='#sidebar a#return')
_settings_list = MultiPageElement(tag_name='label')
@property
def settings_list(self):
return [setting.text for setting in self._settings_list]
_language_elem = PageElement(name='language')
@property
def language(self):
return Select(self._language_elem)
| Make the return link work again
# -*- coding: utf-8 -*-
from selenium.webdriver.support.ui import Select
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
return_link = PageElement(css='.mui--text-title a.appbar-correct')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm')
content = PageElement(id_='content')
sidebar = PageElement(id_='sidebar')
sidebar_return_link = PageElement(css='#sidebar a#return')
_settings_list = MultiPageElement(tag_name='label')
@property
def settings_list(self):
return [setting.text for setting in self._settings_list]
_language_elem = PageElement(name='language')
@property
def language(self):
return Select(self._language_elem)
|
111d0bd356c18d0c028c73cd8c84c9d3e3ae591c | astropy/io/misc/asdf/tags/tests/helpers.py | astropy/io/misc/asdf/tags/tests/helpers.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import os
import urllib.parse
import yaml
import numpy as np
def run_schema_example_test(organization, standard, name, version, check_func=None):
import asdf
from asdf.tests import helpers
from asdf.types import format_tag
from asdf.resolver import default_resolver
tag = format_tag(organization, standard, version, name)
schema_path = urllib.parse.urlparse(default_resolver(tag)).path
with open(schema_path, 'rb') as ff:
schema = yaml.load(ff)
examples = []
for node in asdf.treeutil.iter_tree(schema):
if (isinstance(node, dict) and
'examples' in node and
isinstance(node['examples'], list)):
for desc, example in node['examples']:
examples.append(example)
for example in examples:
buff = helpers.yaml_to_asdf('example: ' + example.strip())
ff = asdf.AsdfFile(uri=schema_path)
# Add some dummy blocks so that the ndarray examples work
for i in range(3):
b = asdf.block.Block(np.zeros((1024*1024*8), dtype=np.uint8))
b._used = True
ff.blocks.add(b)
ff._open_impl(ff, buff, mode='r')
if check_func:
check_func(ff)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import os
import urllib.parse
import urllib.request
import yaml
import numpy as np
def run_schema_example_test(organization, standard, name, version, check_func=None):
import asdf
from asdf.tests import helpers
from asdf.types import format_tag
from asdf.resolver import default_tag_to_url_mapping
from asdf.schema import load_schema
tag = format_tag(organization, standard, version, name)
uri = asdf.resolver.default_tag_to_url_mapping(tag)
r = asdf.AsdfFile().resolver
examples = []
schema = load_schema(uri, resolver=r)
for node in asdf.treeutil.iter_tree(schema):
if (isinstance(node, dict) and
'examples' in node and
isinstance(node['examples'], list)):
for desc, example in node['examples']:
examples.append(example)
for example in examples:
buff = helpers.yaml_to_asdf('example: ' + example.strip())
ff = asdf.AsdfFile(uri=uri)
# Add some dummy blocks so that the ndarray examples work
for i in range(3):
b = asdf.block.Block(np.zeros((1024*1024*8), dtype=np.uint8))
b._used = True
ff.blocks.add(b)
ff._open_impl(ff, buff, mode='r')
if check_func:
check_func(ff)
| Fix ASDF tag test helper to load schemas correctly | Fix ASDF tag test helper to load schemas correctly
| Python | bsd-3-clause | pllim/astropy,astropy/astropy,lpsinger/astropy,larrybradley/astropy,StuartLittlefair/astropy,mhvk/astropy,pllim/astropy,MSeifert04/astropy,saimn/astropy,dhomeier/astropy,lpsinger/astropy,pllim/astropy,stargaser/astropy,larrybradley/astropy,larrybradley/astropy,bsipocz/astropy,StuartLittlefair/astropy,astropy/astropy,dhomeier/astropy,bsipocz/astropy,pllim/astropy,astropy/astropy,mhvk/astropy,MSeifert04/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,lpsinger/astropy,mhvk/astropy,astropy/astropy,StuartLittlefair/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,saimn/astropy,stargaser/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,dhomeier/astropy,astropy/astropy,dhomeier/astropy,pllim/astropy,StuartLittlefair/astropy,larrybradley/astropy,bsipocz/astropy,stargaser/astropy,mhvk/astropy,stargaser/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,saimn/astropy,dhomeier/astropy,saimn/astropy,mhvk/astropy,saimn/astropy,MSeifert04/astropy | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import os
import urllib.parse
import urllib.request
import yaml
import numpy as np
def run_schema_example_test(organization, standard, name, version, check_func=None):
import asdf
from asdf.tests import helpers
from asdf.types import format_tag
from asdf.resolver import default_tag_to_url_mapping
from asdf.schema import load_schema
tag = format_tag(organization, standard, version, name)
uri = asdf.resolver.default_tag_to_url_mapping(tag)
r = asdf.AsdfFile().resolver
examples = []
schema = load_schema(uri, resolver=r)
for node in asdf.treeutil.iter_tree(schema):
if (isinstance(node, dict) and
'examples' in node and
isinstance(node['examples'], list)):
for desc, example in node['examples']:
examples.append(example)
for example in examples:
buff = helpers.yaml_to_asdf('example: ' + example.strip())
ff = asdf.AsdfFile(uri=uri)
# Add some dummy blocks so that the ndarray examples work
for i in range(3):
b = asdf.block.Block(np.zeros((1024*1024*8), dtype=np.uint8))
b._used = True
ff.blocks.add(b)
ff._open_impl(ff, buff, mode='r')
if check_func:
check_func(ff)
| Fix ASDF tag test helper to load schemas correctly
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import os
import urllib.parse
import yaml
import numpy as np
def run_schema_example_test(organization, standard, name, version, check_func=None):
import asdf
from asdf.tests import helpers
from asdf.types import format_tag
from asdf.resolver import default_resolver
tag = format_tag(organization, standard, version, name)
schema_path = urllib.parse.urlparse(default_resolver(tag)).path
with open(schema_path, 'rb') as ff:
schema = yaml.load(ff)
examples = []
for node in asdf.treeutil.iter_tree(schema):
if (isinstance(node, dict) and
'examples' in node and
isinstance(node['examples'], list)):
for desc, example in node['examples']:
examples.append(example)
for example in examples:
buff = helpers.yaml_to_asdf('example: ' + example.strip())
ff = asdf.AsdfFile(uri=schema_path)
# Add some dummy blocks so that the ndarray examples work
for i in range(3):
b = asdf.block.Block(np.zeros((1024*1024*8), dtype=np.uint8))
b._used = True
ff.blocks.add(b)
ff._open_impl(ff, buff, mode='r')
if check_func:
check_func(ff)
|
2e6080f2d8c258700444129a9b989ca5db056a9d | elfi/examples/ma2.py | elfi/examples/ma2.py | import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
| import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
| Change autocorrelation to autocov. Variance infromation improves ABC results. | Change autocorrelation to autocov. Variance infromation improves ABC results.
| Python | bsd-3-clause | lintusj1/elfi,HIIT/elfi,lintusj1/elfi,elfi-dev/elfi,elfi-dev/elfi | import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
| Change autocorrelation to autocov. Variance infromation improves ABC results.
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
697833caade1323ddb9a0b4e51031f1d494262cd | 201705/migonzalvar/biggest_set.py | 201705/migonzalvar/biggest_set.py | #!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def less_than(secs):
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
print(f'Duration: {elapsed} seconds')
if elapsed >= secs:
print('Limit reached. Stopping.')
raise SystemExit(0)
def do():
for n in range(1, 100, 10):
source = range(1, n)
print(f'Length: {n} items')
with less_than(300):
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print('Continue...')
print()
if __name__ == '__main__':
do()
| #!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def less_than(secs):
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
duration.elapsed = elapsed
def nosolution_case(N):
return range(1, N + 1)
def negative_worst_case(N):
case = list(range(-N + 1, 0))
case += [abs(sum(case))]
return case
def positive_worst_case(N):
case = list(range(1, N))
case.insert(0, - sum(case))
return case
def do():
strategies = [nosolution_case, negative_worst_case, positive_worst_case]
for strategy in strategies:
print(f'## Using {strategy.__name__}')
print()
for n in range(1, 100, 10):
source = range(1, n)
print(f'Length: {n} items')
with less_than(300) as duration:
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print(f'Duration: {duration.elapsed} seconds')
if duration.elapsed >= secs:
print('Limit reached. Stopping.')
break
print('Continue searching...')
print()
if __name__ == '__main__':
do()
| Use several strategies for performance | Use several strategies for performance
| Python | bsd-3-clause | VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto | #!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def less_than(secs):
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
duration.elapsed = elapsed
def nosolution_case(N):
return range(1, N + 1)
def negative_worst_case(N):
case = list(range(-N + 1, 0))
case += [abs(sum(case))]
return case
def positive_worst_case(N):
case = list(range(1, N))
case.insert(0, - sum(case))
return case
def do():
strategies = [nosolution_case, negative_worst_case, positive_worst_case]
for strategy in strategies:
print(f'## Using {strategy.__name__}')
print()
for n in range(1, 100, 10):
source = range(1, n)
print(f'Length: {n} items')
with less_than(300) as duration:
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print(f'Duration: {duration.elapsed} seconds')
if duration.elapsed >= secs:
print('Limit reached. Stopping.')
break
print('Continue searching...')
print()
if __name__ == '__main__':
do()
| Use several strategies for performance
#!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def less_than(secs):
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
print(f'Duration: {elapsed} seconds')
if elapsed >= secs:
print('Limit reached. Stopping.')
raise SystemExit(0)
def do():
for n in range(1, 100, 10):
source = range(1, n)
print(f'Length: {n} items')
with less_than(300):
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print('Continue...')
print()
if __name__ == '__main__':
do()
|
10df3cd5e4c8517652efdb8381155253aa6a8157 | osfclient/tests/test_cli.py | osfclient/tests/test_cli.py | from unittest.mock import call
from unittest.mock import Mock
from unittest.mock import patch
from osfclient import cli
@patch('osfclient.cli.os.path.exists', return_value=True)
@patch('osfclient.cli.configparser.ConfigParser')
def test_config_file(MockConfigParser, os_path_exists):
MockConfigParser().__getitem__ = Mock(return_value={'project': '1234'})
config = cli.config_from_file()
assert config == {'project': '1234'}
assert call.read('.osfcli.config') in MockConfigParser().mock_calls
assert call('osf') in MockConfigParser().__getitem__.mock_calls
def test_config_from_env_replace_username():
def simple_getenv(key):
if key == 'OSF_USERNAME':
return 'theusername'
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'username': 'notusername'})
assert config == {'username': 'theusername'}
def test_config_from_env_username():
def simple_getenv(key):
if key == 'OSF_USERNAME':
return None
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'username': 'theusername'})
assert config == {'username': 'theusername'}
def test_config_from_env_replace_project():
def simple_getenv(key):
if key == 'OSF_PROJECT':
return 'theproject'
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'project': 'notproject'})
assert config == {'project': 'theproject'}
def test_config_from_env_project():
def simple_getenv(key):
if key == 'OSF_PROJECT':
return None
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'project': 'theproject'})
assert config == {'project': 'theproject'}
| Add test for file based configuration | Add test for file based configuration
| Python | bsd-3-clause | betatim/osf-cli,betatim/osf-cli | from unittest.mock import call
from unittest.mock import Mock
from unittest.mock import patch
from osfclient import cli
@patch('osfclient.cli.os.path.exists', return_value=True)
@patch('osfclient.cli.configparser.ConfigParser')
def test_config_file(MockConfigParser, os_path_exists):
MockConfigParser().__getitem__ = Mock(return_value={'project': '1234'})
config = cli.config_from_file()
assert config == {'project': '1234'}
assert call.read('.osfcli.config') in MockConfigParser().mock_calls
assert call('osf') in MockConfigParser().__getitem__.mock_calls
def test_config_from_env_replace_username():
def simple_getenv(key):
if key == 'OSF_USERNAME':
return 'theusername'
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'username': 'notusername'})
assert config == {'username': 'theusername'}
def test_config_from_env_username():
def simple_getenv(key):
if key == 'OSF_USERNAME':
return None
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'username': 'theusername'})
assert config == {'username': 'theusername'}
def test_config_from_env_replace_project():
def simple_getenv(key):
if key == 'OSF_PROJECT':
return 'theproject'
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'project': 'notproject'})
assert config == {'project': 'theproject'}
def test_config_from_env_project():
def simple_getenv(key):
if key == 'OSF_PROJECT':
return None
with patch('osfclient.cli.os.getenv', side_effect=simple_getenv):
config = cli.config_from_env({'project': 'theproject'})
assert config == {'project': 'theproject'}
| Add test for file based configuration
|
|
693b904a9053fbddc6c93cfab1d6448c4b644d1c | scripts/travis_build_dependent_projects.py | scripts/travis_build_dependent_projects.py | # -*- coding: utf-8 -*-
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if "config.json" in build.config.get("config", [""])[0]:
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
| # -*- coding: utf-8 -*-
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if 'drift' in build.config.get('drift_build_trigger', []):
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
| Fix Travis dependant build trigger | Fix Travis dependant build trigger
| Python | mit | dgnorth/drift,dgnorth/drift,dgnorth/drift | # -*- coding: utf-8 -*-
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if 'drift' in build.config.get('drift_build_trigger', []):
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
| Fix Travis dependant build trigger
# -*- coding: utf-8 -*-
import os
from click import echo
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
echo("Enumerate repos on {!r}".format(domain))
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
echo(u"Checking repo: {}\n{!r}".format(repo.slug, repo.description))
try:
build = conn.build(repo.last_build_id)
if "config.json" in build.config.get("config", [""])[0]:
echo("Found drift project: {!r}".format(repo.slug))
if not build.running:
echo("Restarting...")
build.restart()
restarted.append(repo.slug)
else:
echo("Build is already running!")
building.append(repo.slug)
else:
echo("Not a drift based project.")
except Exception as e:
echo("Can't build repo: {!r}".format(e))
echo()
if restarted:
echo("Repos restarted:")
for reponame in restarted:
echo("\t{}".format(reponame))
else:
echo("No builds restarted.")
if building:
echo("Repos already building:")
for reponame in building:
echo("\t{}".format(reponame))
if __name__ == "__main__":
main()
|
d8444cec60f38baa75b89892dda6163bf63917af | todo/__init__.py | todo/__init__.py | """django todo"""
__version__ = '1.5.dev'
__author__ = 'Scot Hacker'
__email__ = '[email protected]'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
| """django todo"""
__version__ = '1.5'
__author__ = 'Scot Hacker'
__email__ = '[email protected]'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
| Bump version number for release 1.5 | Bump version number for release 1.5
| Python | bsd-3-clause | jwiltshire/django-todo,shacker/django-todo,jwiltshire/django-todo,shacker/django-todo,jwiltshire/django-todo,shacker/django-todo | """django todo"""
__version__ = '1.5'
__author__ = 'Scot Hacker'
__email__ = '[email protected]'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
| Bump version number for release 1.5
"""django todo"""
__version__ = '1.5.dev'
__author__ = 'Scot Hacker'
__email__ = '[email protected]'
__url__ = 'https://github.com/shacker/django-todo'
__license__ = 'BSD License'
|
666b011ef95ef6e82e59cc134b52fb29443ff9d8 | iroha_cli/crypto.py | iroha_cli/crypto.py | import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
| import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
def raw_public_key(self):
return base64.b64decode(self.public_key)
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
| Add get raw key from KeyPair | Add get raw key from KeyPair
| Python | apache-2.0 | MizukiSonoko/iroha-cli,MizukiSonoko/iroha-cli | import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
def raw_public_key(self):
return base64.b64decode(self.public_key)
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
| Add get raw key from KeyPair
import base64
import sha3
import os
from collections import namedtuple
class KeyPair:
def __init__(self, pub, pri):
self.private_key = pri
self.public_key = pub
from iroha_cli.crypto_ed25519 import generate_keypair_ed25519, sign_ed25519, verify_ed25519, ed25519_sha3_512, \
ed25519_sha3_256
def generate_keypair():
return generate_keypair_ed25519()
def sign(key_pair, message):
return sign_ed25519(key_pair, message)
def verify(pub_key, sig, message):
return verify_ed25519(pub_key, sig, message)
def sha3_256(message):
return ed25519_sha3_256(message)
def sha3_512(message):
return ed25519_sha3_512(message)
|
a4808284731ebcc7ae9c29bfeee4db7e943e1b2a | pyinfra/__init__.py | pyinfra/__init__.py | # pyinfra
# File: pyinfra/__init__.py
# Desc: some global state for pyinfra
'''
Welcome to pyinfra.
'''
import logging
# Global pyinfra logger
logger = logging.getLogger('pyinfra')
# Setup package level version
from .version import __version__ # noqa
# Trigger pseudo_* creation
from . import pseudo_modules # noqa
# Trigger fact index creation
from . import facts # noqa
# Trigger module imports
from . import modules # noqa
| # pyinfra
# File: pyinfra/__init__.py
# Desc: some global state for pyinfra
'''
Welcome to pyinfra.
'''
import logging
# Global flag set True by `pyinfra_cli.__main__`
is_cli = False
# Global pyinfra logger
logger = logging.getLogger('pyinfra')
# Setup package level version
from .version import __version__ # noqa
# Trigger pseudo_* creation
from . import pseudo_modules # noqa
# Trigger fact index creation
from . import facts # noqa
# Trigger module imports
from . import modules # noqa
| Add default for `is_cli` to pyinfra. | Add default for `is_cli` to pyinfra.
| Python | mit | Fizzadar/pyinfra,Fizzadar/pyinfra | # pyinfra
# File: pyinfra/__init__.py
# Desc: some global state for pyinfra
'''
Welcome to pyinfra.
'''
import logging
# Global flag set True by `pyinfra_cli.__main__`
is_cli = False
# Global pyinfra logger
logger = logging.getLogger('pyinfra')
# Setup package level version
from .version import __version__ # noqa
# Trigger pseudo_* creation
from . import pseudo_modules # noqa
# Trigger fact index creation
from . import facts # noqa
# Trigger module imports
from . import modules # noqa
| Add default for `is_cli` to pyinfra.
# pyinfra
# File: pyinfra/__init__.py
# Desc: some global state for pyinfra
'''
Welcome to pyinfra.
'''
import logging
# Global pyinfra logger
logger = logging.getLogger('pyinfra')
# Setup package level version
from .version import __version__ # noqa
# Trigger pseudo_* creation
from . import pseudo_modules # noqa
# Trigger fact index creation
from . import facts # noqa
# Trigger module imports
from . import modules # noqa
|
f5198851aebb000a6107b3f9ce34825da200abff | src/foremast/utils/get_template.py | src/foremast/utils/get_template.py | """Render Jinja2 template."""
import logging
import os
import jinja2
LOG = logging.getLogger(__name__)
def get_template(template_file='', **kwargs):
"""Get the Jinja2 template and renders with dict _kwargs_.
Args:
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template.
"""
here = os.path.dirname(os.path.realpath(__file__))
templatedir = '{0}/../templates/'.format(here)
LOG.debug('Template directory: %s', templatedir)
LOG.debug('Template file: %s', template_file)
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(templatedir))
template = jinjaenv.get_template(template_file)
for k,v in kwargs.items():
LOG.debug('%s => %s', k,v)
rendered_json = template.render(**kwargs)
LOG.debug('Rendered JSON:\n%s', rendered_json)
return rendered_json
| """Render Jinja2 template."""
import logging
import os
import jinja2
LOG = logging.getLogger(__name__)
def get_template(template_file='', **kwargs):
"""Get the Jinja2 template and renders with dict _kwargs_.
Args:
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template.
"""
here = os.path.dirname(os.path.realpath(__file__))
templatedir = '{0}/../templates/'.format(here)
LOG.debug('Template directory: %s', templatedir)
LOG.debug('Template file: %s', template_file)
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(templatedir))
template = jinjaenv.get_template(template_file)
for key, value in kwargs.items():
LOG.debug('%s => %s', key, value)
rendered_json = template.render(**kwargs)
LOG.debug('Rendered JSON:\n%s', rendered_json)
return rendered_json
| Use more descriptive variable names | style: Use more descriptive variable names
See also: PSOBAT-1197
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Render Jinja2 template."""
import logging
import os
import jinja2
LOG = logging.getLogger(__name__)
def get_template(template_file='', **kwargs):
"""Get the Jinja2 template and renders with dict _kwargs_.
Args:
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template.
"""
here = os.path.dirname(os.path.realpath(__file__))
templatedir = '{0}/../templates/'.format(here)
LOG.debug('Template directory: %s', templatedir)
LOG.debug('Template file: %s', template_file)
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(templatedir))
template = jinjaenv.get_template(template_file)
for key, value in kwargs.items():
LOG.debug('%s => %s', key, value)
rendered_json = template.render(**kwargs)
LOG.debug('Rendered JSON:\n%s', rendered_json)
return rendered_json
| style: Use more descriptive variable names
See also: PSOBAT-1197
"""Render Jinja2 template."""
import logging
import os
import jinja2
LOG = logging.getLogger(__name__)
def get_template(template_file='', **kwargs):
"""Get the Jinja2 template and renders with dict _kwargs_.
Args:
kwargs: Keywords to use for rendering the Jinja2 template.
Returns:
String of rendered JSON template.
"""
here = os.path.dirname(os.path.realpath(__file__))
templatedir = '{0}/../templates/'.format(here)
LOG.debug('Template directory: %s', templatedir)
LOG.debug('Template file: %s', template_file)
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader(templatedir))
template = jinjaenv.get_template(template_file)
for k,v in kwargs.items():
LOG.debug('%s => %s', k,v)
rendered_json = template.render(**kwargs)
LOG.debug('Rendered JSON:\n%s', rendered_json)
return rendered_json
|
e7cba721d78860d0151cc65793e567b0da719d39 | regserver/regulations/tests/partial_view_tests.py | regserver/regulations/tests/partial_view_tests.py | from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
| from unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
| Change test, so that view has a request object | Change test, so that view has a request object
| Python | cc0-1.0 | ascott1/regulations-site,18F/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,adderall/regulations-site,adderall/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,EricSchles/regulations-site,EricSchles/regulations-site,willbarton/regulations-site,adderall/regulations-site,willbarton/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,tadhg-ohiggins/regulations-site,jeremiak/regulations-site,tadhg-ohiggins/regulations-site,EricSchles/regulations-site,jeremiak/regulations-site,18F/regulations-site,EricSchles/regulations-site,adderall/regulations-site,jeremiak/regulations-site,18F/regulations-site,eregs/regulations-site,ascott1/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,eregs/regulations-site,tadhg-ohiggins/regulations-site,jeremiak/regulations-site,eregs/regulations-site,eregs/regulations-site | from unittest import TestCase
from mock import Mock, patch
from django.test import RequestFactory
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
paragraph_id = '103-3-a'
reg_version = '2013-10607'
request = RequestFactory().get('/fake-path')
view = PartialParagraphView.as_view(template_name='tree.html')
response = view(request, paragraph_id=paragraph_id, reg_version=reg_version)
self.assertEqual(response.context_data['node'],
generator.get_tree_paragraph.return_value)
| Change test, so that view has a request object
from unittest import TestCase
from mock import Mock, patch
from regulations.generator.layers.layers_applier import *
from regulations.views.partial import *
class PartialParagraphViewTests(TestCase):
@patch('regulations.views.partial.generator')
def test_get_context_data(self, generator):
generator.get_all_section_layers.return_value = (InlineLayersApplier(),
ParagraphLayersApplier(), SearchReplaceLayersApplier())
generator.get_tree_paragraph.return_value = {
'text': 'Some Text',
'children': [],
'label': {'text': '867-53-q', 'parts': ['867', '53', 'q']}
}
rpv = PartialParagraphView()
context = rpv.get_context_data(paragraph_id = '867-53-q',
reg_version = 'verver')
self.assertEqual(context['node'],
generator.get_tree_paragraph.return_value)
|
35c2c26ba379c4fc33465c11bb77a5cc8b4a7d2d | data/process_bigrams.py | data/process_bigrams.py | # Intended to be used with count_2w.txt which has the following format:
# A B\tFREQENCY
# Sometimes "A" is "<S>" for start and "</S>" for end.
# Output is similar with all output lower-cased (including "<S>" and "</S>").
import collections
from src.data import data
all_results = collections.defaultdict(int)
for line in data.open_project_path('data/count_2w.txt', errors='ignore'):
a, b, count = line.split()
key = ('%s %s' % (a, b)).lower()
all_results[key] += int(count)
for item in sorted(all_results.items(), key=lambda x: x[1], reverse=True):
print('%s\t%s' % item)
| Reformat words_2w.txt to sort and remove caps. | Reformat words_2w.txt to sort and remove caps.
| Python | mit | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge | # Intended to be used with count_2w.txt which has the following format:
# A B\tFREQENCY
# Sometimes "A" is "<S>" for start and "</S>" for end.
# Output is similar with all output lower-cased (including "<S>" and "</S>").
import collections
from src.data import data
all_results = collections.defaultdict(int)
for line in data.open_project_path('data/count_2w.txt', errors='ignore'):
a, b, count = line.split()
key = ('%s %s' % (a, b)).lower()
all_results[key] += int(count)
for item in sorted(all_results.items(), key=lambda x: x[1], reverse=True):
print('%s\t%s' % item)
| Reformat words_2w.txt to sort and remove caps.
|
|
e19b11c8598fe7a7e68640638a3489c05002f968 | tests/test_supercron.py | tests/test_supercron.py | #!/usr/bin/env python
import sys
import os
import unittest
from subprocess import Popen, PIPE
ROOT_DIR = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(ROOT_DIR)
from supercron import SuperCron
class RunTests(unittest.TestCase):
"""class that tests supercron for behavior correctness"""
def setUp(self):
pass
def get_crontab(self):
p = Popen(["crontab", "-l"], stdout=PIPE)
crontab_out, crontab_err = p.communicate()
return crontab_out
def test_midnight(self):
entry1 = b"@daily ls # ls"
entry2 = b"0 0 * * * ls # ls"
SuperCron.add_job("ls", "ls", "midnight")
user_crontab = self.get_crontab()
self.assertTrue(entry1 in user_crontab or entry2 in user_crontab)
def test_every_x_minutes(self):
hour, minute = SuperCron.get_time_now()
entry = b"*/5 {} * * * ls # ls".format(hour)
SuperCron.add_job("ls", "ls", "once every 5 minutes")
user_crontab = self.get_crontab()
self.assertTrue(entry in user_crontab)
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
import sys
import os
import unittest
from subprocess import Popen, PIPE
ROOT_DIR = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(ROOT_DIR)
from supercron import SuperCron
class RunTests(unittest.TestCase):
"""class that tests supercron for behavior correctness"""
def setUp(self):
pass
def tearDown(self):
SuperCron.delete_job("ls")
def get_crontab(self):
p = Popen(["crontab", "-l"], stdout=PIPE, stderr=PIPE)
crontab_out, crontab_err = p.communicate()
return crontab_out
def test_midnight(self):
entry1 = b"@daily ls # ls"
entry2 = b"0 0 * * * ls # ls"
SuperCron.add_job("ls", "ls", "midnight")
user_crontab = self.get_crontab()
self.assertTrue(entry1 in user_crontab or entry2 in user_crontab)
def test_every_x_minutes(self):
hour, minute = SuperCron.get_time_now()
entry = b"*/5 {} * * * ls # ls".format(hour)
SuperCron.add_job("ls", "ls", "once every 5 minutes")
user_crontab = self.get_crontab()
self.assertTrue(entry in user_crontab)
if __name__ == "__main__":
unittest.main()
| Delete the jobs in tearDown() in tests | Delete the jobs in tearDown() in tests
| Python | bsd-3-clause | linostar/SuperCron | #!/usr/bin/env python
import sys
import os
import unittest
from subprocess import Popen, PIPE
ROOT_DIR = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(ROOT_DIR)
from supercron import SuperCron
class RunTests(unittest.TestCase):
"""class that tests supercron for behavior correctness"""
def setUp(self):
pass
def tearDown(self):
SuperCron.delete_job("ls")
def get_crontab(self):
p = Popen(["crontab", "-l"], stdout=PIPE, stderr=PIPE)
crontab_out, crontab_err = p.communicate()
return crontab_out
def test_midnight(self):
entry1 = b"@daily ls # ls"
entry2 = b"0 0 * * * ls # ls"
SuperCron.add_job("ls", "ls", "midnight")
user_crontab = self.get_crontab()
self.assertTrue(entry1 in user_crontab or entry2 in user_crontab)
def test_every_x_minutes(self):
hour, minute = SuperCron.get_time_now()
entry = b"*/5 {} * * * ls # ls".format(hour)
SuperCron.add_job("ls", "ls", "once every 5 minutes")
user_crontab = self.get_crontab()
self.assertTrue(entry in user_crontab)
if __name__ == "__main__":
unittest.main()
| Delete the jobs in tearDown() in tests
#!/usr/bin/env python
import sys
import os
import unittest
from subprocess import Popen, PIPE
ROOT_DIR = os.path.join(os.path.dirname(__file__), "..")
sys.path.append(ROOT_DIR)
from supercron import SuperCron
class RunTests(unittest.TestCase):
"""class that tests supercron for behavior correctness"""
def setUp(self):
pass
def get_crontab(self):
p = Popen(["crontab", "-l"], stdout=PIPE)
crontab_out, crontab_err = p.communicate()
return crontab_out
def test_midnight(self):
entry1 = b"@daily ls # ls"
entry2 = b"0 0 * * * ls # ls"
SuperCron.add_job("ls", "ls", "midnight")
user_crontab = self.get_crontab()
self.assertTrue(entry1 in user_crontab or entry2 in user_crontab)
def test_every_x_minutes(self):
hour, minute = SuperCron.get_time_now()
entry = b"*/5 {} * * * ls # ls".format(hour)
SuperCron.add_job("ls", "ls", "once every 5 minutes")
user_crontab = self.get_crontab()
self.assertTrue(entry in user_crontab)
if __name__ == "__main__":
unittest.main()
|
b264313d48a66b847a1cfa6459745f2d35e10cee | tests/conftest.py | tests/conftest.py | import os
import subprocess
import sys
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CUPY_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
| Print installed packages in pytest | Print installed packages in pytest
| Python | mit | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | import os
import subprocess
import sys
def _is_pip_installed():
try:
import pip # NOQA
return True
except ImportError:
return False
def _is_in_ci():
ci_name = os.environ.get('CUPY_CI', '')
return ci_name != ''
def pytest_configure(config):
# Print installed packages
if _is_in_ci() and _is_pip_installed():
print("***** Installed packages *****", flush=True)
subprocess.check_call([sys.executable, '-m', 'pip', 'freeze', '--all'])
| Print installed packages in pytest
|
|
88d938fc4050ef99180ff364f0a6d27d31ecc16c | lambdautils/metadata.py | lambdautils/metadata.py | # -*- coding: utf-8 -*-
"""Project metadata."""
package = "lambdautils"
project = "lambdautils"
version = '0.2.8'
description = "Simple utilities for AWS Lambda functions"
authors = ["Innovative Travel Ltd"]
authors_string = ', '.join(authors)
emails = ['[email protected]']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://github.com/InnovativeTravel/humilis-lambdautils'
| # -*- coding: utf-8 -*-
"""Project metadata."""
package = "lambdautils"
project = "lambdautils"
version = '0.2.9'
description = "Simple utilities for AWS Lambda functions"
authors = ["Innovative Travel Ltd"]
authors_string = ', '.join(authors)
emails = ['[email protected]']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://github.com/InnovativeTravel/humilis-lambdautils'
| Support for externally produced Kinesis partition keys | Support for externally produced Kinesis partition keys
| Python | mit | humilis/humilis-lambdautils | # -*- coding: utf-8 -*-
"""Project metadata."""
package = "lambdautils"
project = "lambdautils"
version = '0.2.9'
description = "Simple utilities for AWS Lambda functions"
authors = ["Innovative Travel Ltd"]
authors_string = ', '.join(authors)
emails = ['[email protected]']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://github.com/InnovativeTravel/humilis-lambdautils'
| Support for externally produced Kinesis partition keys
# -*- coding: utf-8 -*-
"""Project metadata."""
package = "lambdautils"
project = "lambdautils"
version = '0.2.8'
description = "Simple utilities for AWS Lambda functions"
authors = ["Innovative Travel Ltd"]
authors_string = ', '.join(authors)
emails = ['[email protected]']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://github.com/InnovativeTravel/humilis-lambdautils'
|
2231c0384e56af56285999bc0bf7a096d3dd1cb9 | pyuploadcare/dj/models.py | pyuploadcare/dj/models.py | from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
| from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
| Add handling of InvalidRequestError in ``to_python` | Add handling of InvalidRequestError in ``to_python`
| Python | mit | uploadcare/pyuploadcare | from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
| Add handling of InvalidRequestError in ``to_python`
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
2cd19b395f4320330b66dff1ef98d149f3a40a31 | ckanext/syndicate/tests/test_plugin.py | ckanext/syndicate/tests/test_plugin.py | from mock import patch
import unittest
import ckan.model as model
from ckan.model.domain_object import DomainObjectOperation
from ckanext.syndicate.plugin import SyndicatePlugin
class TestPlugin(unittest.TestCase):
def test_notify_syndicates_task(self):
entity = model.Package()
entity.extras = {'syndicate': 'true'}
with patch('ckanext.syndicate.plugin.syndicate_task') as mock_syndicate:
plugin = SyndicatePlugin()
plugin.notify(entity, DomainObjectOperation.new)
mock_syndicate.assert_called_with(entity.id, 'dataset/create')
| from mock import patch
import unittest
import ckan.model as model
from ckan.model.domain_object import DomainObjectOperation
from ckanext.syndicate.plugin import SyndicatePlugin
class TestNotify(unittest.TestCase):
def setUp(self):
super(TestNotify, self).setUp()
self.entity = model.Package()
self.entity.extras = {'syndicate': 'true'}
self.syndicate_patch = patch('ckanext.syndicate.plugin.syndicate_task')
self.plugin = SyndicatePlugin()
def test_syndicates_task_for_dataset_create(self):
with self.syndicate_patch as mock_syndicate:
self.plugin.notify(self.entity, DomainObjectOperation.new)
mock_syndicate.assert_called_with(self.entity.id,
'dataset/create')
def test_syndicates_task_for_dataset_update(self):
with self.syndicate_patch as mock_syndicate:
self.plugin.notify(self.entity, DomainObjectOperation.changed)
mock_syndicate.assert_called_with(self.entity.id,
'dataset/update')
| Add test for notify dataset/update | Add test for notify dataset/update
| Python | agpl-3.0 | aptivate/ckanext-syndicate,sorki/ckanext-redmine-autoissues,aptivate/ckanext-syndicate,sorki/ckanext-redmine-autoissues | from mock import patch
import unittest
import ckan.model as model
from ckan.model.domain_object import DomainObjectOperation
from ckanext.syndicate.plugin import SyndicatePlugin
class TestNotify(unittest.TestCase):
def setUp(self):
super(TestNotify, self).setUp()
self.entity = model.Package()
self.entity.extras = {'syndicate': 'true'}
self.syndicate_patch = patch('ckanext.syndicate.plugin.syndicate_task')
self.plugin = SyndicatePlugin()
def test_syndicates_task_for_dataset_create(self):
with self.syndicate_patch as mock_syndicate:
self.plugin.notify(self.entity, DomainObjectOperation.new)
mock_syndicate.assert_called_with(self.entity.id,
'dataset/create')
def test_syndicates_task_for_dataset_update(self):
with self.syndicate_patch as mock_syndicate:
self.plugin.notify(self.entity, DomainObjectOperation.changed)
mock_syndicate.assert_called_with(self.entity.id,
'dataset/update')
| Add test for notify dataset/update
from mock import patch
import unittest
import ckan.model as model
from ckan.model.domain_object import DomainObjectOperation
from ckanext.syndicate.plugin import SyndicatePlugin
class TestPlugin(unittest.TestCase):
def test_notify_syndicates_task(self):
entity = model.Package()
entity.extras = {'syndicate': 'true'}
with patch('ckanext.syndicate.plugin.syndicate_task') as mock_syndicate:
plugin = SyndicatePlugin()
plugin.notify(entity, DomainObjectOperation.new)
mock_syndicate.assert_called_with(entity.id, 'dataset/create')
|
b5744150da20f9b3b0f37704eb91878de21072cf | deploy/scripts/upgrade-web.py | deploy/scripts/upgrade-web.py | #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
| #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
| Install uwsgi in venv on web upgrade | Install uwsgi in venv on web upgrade
| Python | mit | clicheio/cliche,clicheio/cliche,item4/cliche,clicheio/cliche,item4/cliche | #!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
with (workdir / 'etc' / 'revision.txt').open('r') as revision_file:
revision = (revision_file.readline().strip())
venv_dir = pathlib.Path('/home/cliche/venv_{}'.format(revision))
subprocess.check_call(
[
'sudo',
'-ucliche',
str(venv_dir / 'bin' / 'pip'),
'install',
'uwsgi',
]
)
if __name__ == '__main__':
main()
| Install uwsgi in venv on web upgrade
#!/usr/bin/python3
import errno
import pathlib
import platform
import sys
import subprocess
def main():
dist = platform.dist()
if dist[0] != 'debian' and dist[0] != 'Ubuntu':
print('This script can only be run on Debian GNU/Linux or Ubuntu.')
sys.exit(errno.EPERM)
workdir = pathlib.Path(__file__).resolve().parent.parent
subprocess.check_call(
[
'cp'
] +
[str(path) for path in ((workdir / 'scripts').glob('*'))] +
[
str(workdir / 'deploy' / 'tmp' / 'scripts')
]
)
if __name__ == '__main__':
main()
|
a11d33f5e1df23f044cac709ebbbb5d369d0e6ca | tests/test_add_language/test_update_language_list.py | tests/test_add_language/test_update_language_list.py | # test_update_language_list
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
from tests.test_add_language.decorators import redirect_stdout
@nose.with_setup(set_up, tear_down)
@redirect_stdout
def test_update_languge_list_add(out):
"""should add new languages to language list"""
add_lang.update_language_list('kln', 'Klingon')
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
klingon_lang = None
for lang in langs:
if lang['id'] == 'kln':
klingon_lang = lang
nose.assert_is_not_none(klingon_lang)
nose.assert_equal(klingon_lang['name'], 'Klingon')
| Add first test for update_language_list function | Add first test for update_language_list function
| Python | mit | caleb531/youversion-suggest,caleb531/youversion-suggest | # test_update_language_list
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
from tests.test_add_language.decorators import redirect_stdout
@nose.with_setup(set_up, tear_down)
@redirect_stdout
def test_update_languge_list_add(out):
"""should add new languages to language list"""
add_lang.update_language_list('kln', 'Klingon')
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
klingon_lang = None
for lang in langs:
if lang['id'] == 'kln':
klingon_lang = lang
nose.assert_is_not_none(klingon_lang)
nose.assert_equal(klingon_lang['name'], 'Klingon')
| Add first test for update_language_list function
|
|
4b52f2c237ff3c73af15846e7ae23436af8ab6c7 | airesources/Python/BasicBot.py | airesources/Python/BasicBot.py | from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
turtleFactor = random.randint(1, 20)
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < turtleFactor*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
| from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
| Revert basic bot random turtle factor | Revert basic bot random turtle factor
Former-commit-id: 53ffe42cf718cfedaa3ec329b0688c093513683c
Former-commit-id: 6a282c036f4e11a0aa9e954f72050053059ac557
Former-commit-id: c52f52d401c4a3768c7d590fb02f3d08abd38002 | Python | mit | HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite-II,yangle/HaliteIO,yangle/HaliteIO,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,lanyudhy/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,HaliteChallenge/Halite-II,HaliteChallenge/Halite-II,HaliteChallenge/Halite,yangle/HaliteIO,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite-II,yangle/HaliteIO,HaliteChallenge/Halite,HaliteChallenge/Halite-II,lanyudhy/Halite-II,HaliteChallenge/Halite,HaliteChallenge/Halite-II | from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < 5*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
| Revert basic bot random turtle factor
Former-commit-id: 53ffe42cf718cfedaa3ec329b0688c093513683c
Former-commit-id: 6a282c036f4e11a0aa9e954f72050053059ac557
Former-commit-id: c52f52d401c4a3768c7d590fb02f3d08abd38002
from hlt import *
from networking import *
playerTag, gameMap = getInit()
sendInit("BasicBot"+str(playerTag))
turtleFactor = random.randint(1, 20)
while True:
moves = []
gameMap = getFrame()
for y in range(0, len(gameMap.contents)):
for x in range(0, len(gameMap.contents[y])):
site = gameMap.contents[y][x]
if site.owner == playerTag:
direction = random.randint(0, 5)
if site.strength < turtleFactor*site.production:
direction = STILL
else:
for d in CARDINALS:
if gameMap.getSite(Location(x, y), d).owner != playerTag:
direction = d
break
moves.append(Move(Location(x, y), direction))
sendFrame(moves)
|
8fb60650f8ff1da16d537402e7227f78667b434e | tests/test_schema_loader.py | tests/test_schema_loader.py | import contextlib
import json
import os
import tempfile
import unittest
from faker_schema.schema_loader import load_json_from_file, load_json_from_string
class TestFakerSchema(unittest.TestCase):
def test_load_json_from_string(self):
schema_json_string = '{"Full Name": "name", "Address": "address", "Email": "email"}'
schema = load_json_from_string(schema_json_string)
self.assertEqual(schema, {'Full Name': 'name', 'Address': 'address', 'Email': 'email'})
def test_load_json_from_string_incorrect_json(self):
schema_json_string = '{"Full Name": "name", }'
with self.assertRaises(ValueError):
load_json_from_string(schema_json_string)
@contextlib.contextmanager
def _write_to_temp_file(self, data, write_to_json=False):
with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file:
if write_to_json:
json.dump(data, temp_file)
else:
temp_file.write(data)
try:
yield temp_file.name
finally:
os.remove(temp_file.name)
def test_load_json_from_file(self):
schema = {'Full Name': 'name', 'Address': 'address', 'Email': 'email'}
with self._write_to_temp_file(schema, write_to_json=True) as temp_file:
schema = load_json_from_file(temp_file)
self.assertEqual(schema, {'Full Name': 'name', 'Address': 'address', 'Email': 'email'})
def test_load_json_from_file_incorrect_json(self):
schema = '{"Full Name": ["name", "place", ]}'
with self._write_to_temp_file(schema) as temp_file:
with self.assertRaises(ValueError):
load_json_from_file(temp_file) | Add unit tests for schema loader module | Add unit tests for schema loader module
| Python | mit | ueg1990/faker-schema | import contextlib
import json
import os
import tempfile
import unittest
from faker_schema.schema_loader import load_json_from_file, load_json_from_string
class TestFakerSchema(unittest.TestCase):
def test_load_json_from_string(self):
schema_json_string = '{"Full Name": "name", "Address": "address", "Email": "email"}'
schema = load_json_from_string(schema_json_string)
self.assertEqual(schema, {'Full Name': 'name', 'Address': 'address', 'Email': 'email'})
def test_load_json_from_string_incorrect_json(self):
schema_json_string = '{"Full Name": "name", }'
with self.assertRaises(ValueError):
load_json_from_string(schema_json_string)
@contextlib.contextmanager
def _write_to_temp_file(self, data, write_to_json=False):
with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file:
if write_to_json:
json.dump(data, temp_file)
else:
temp_file.write(data)
try:
yield temp_file.name
finally:
os.remove(temp_file.name)
def test_load_json_from_file(self):
schema = {'Full Name': 'name', 'Address': 'address', 'Email': 'email'}
with self._write_to_temp_file(schema, write_to_json=True) as temp_file:
schema = load_json_from_file(temp_file)
self.assertEqual(schema, {'Full Name': 'name', 'Address': 'address', 'Email': 'email'})
def test_load_json_from_file_incorrect_json(self):
schema = '{"Full Name": ["name", "place", ]}'
with self._write_to_temp_file(schema) as temp_file:
with self.assertRaises(ValueError):
load_json_from_file(temp_file) | Add unit tests for schema loader module
|
|
fd114dbd5036735a9c3bcbd49fd8d31c2e750a8a | nailgun/nailgun/test/unit/test_requirements.py | nailgun/nailgun/test/unit/test_requirements.py | # -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkg_resources import require
def test_check_requirements_conflicts():
require('nailgun')
| Check if nailgun's requirements do not conflict | Check if nailgun's requirements do not conflict
Added simple test that will try to parse nailgun's
requirements and see if there are any problems with them.
Change-Id: I342eda0a3b019780e0d452455734591aab91f6e9
Closes-Bug: #1462281
| Python | apache-2.0 | huntxu/fuel-web,stackforge/fuel-web,eayunstack/fuel-web,prmtl/fuel-web,stackforge/fuel-web,prmtl/fuel-web,SmartInfrastructures/fuel-web-dev,prmtl/fuel-web,nebril/fuel-web,huntxu/fuel-web,eayunstack/fuel-web,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,prmtl/fuel-web,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,huntxu/fuel-web,nebril/fuel-web,nebril/fuel-web,eayunstack/fuel-web,huntxu/fuel-web,stackforge/fuel-web,nebril/fuel-web,nebril/fuel-web,SmartInfrastructures/fuel-web-dev,huntxu/fuel-web,prmtl/fuel-web,SmartInfrastructures/fuel-web-dev | # -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkg_resources import require
def test_check_requirements_conflicts():
require('nailgun')
| Check if nailgun's requirements do not conflict
Added simple test that will try to parse nailgun's
requirements and see if there are any problems with them.
Change-Id: I342eda0a3b019780e0d452455734591aab91f6e9
Closes-Bug: #1462281
|
|
a466a89cd18252c6d90fd3b590148ca3268ff637 | karabo_data/tests/test_lpd_geometry.py | karabo_data/tests/test_lpd_geometry.py | from matplotlib.figure import Figure
import numpy as np
from karabo_data.geometry2 import LPD_1MGeometry
def test_inspect():
geom = LPD_1MGeometry.from_quad_positions([
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)
])
# Smoketest
fig = geom.inspect()
assert isinstance(fig, Figure)
def test_snap_assemble_data():
geom = LPD_1MGeometry.from_quad_positions([
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)
])
stacked_data = np.zeros((16, 256, 256))
img, centre = geom.position_modules_fast(stacked_data)
assert img.shape == (1202, 1104)
assert tuple(centre) == (604, 547)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
| Add a couple of simple tests for LPD geometry | Add a couple of simple tests for LPD geometry
| Python | bsd-3-clause | European-XFEL/h5tools-py | from matplotlib.figure import Figure
import numpy as np
from karabo_data.geometry2 import LPD_1MGeometry
def test_inspect():
geom = LPD_1MGeometry.from_quad_positions([
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)
])
# Smoketest
fig = geom.inspect()
assert isinstance(fig, Figure)
def test_snap_assemble_data():
geom = LPD_1MGeometry.from_quad_positions([
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)
])
stacked_data = np.zeros((16, 256, 256))
img, centre = geom.position_modules_fast(stacked_data)
assert img.shape == (1202, 1104)
assert tuple(centre) == (604, 547)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
| Add a couple of simple tests for LPD geometry
|
|
ae9392137c66832e2e4fa0a51938aad2e6fdb8a4 | django_q/__init__.py | django_q/__init__.py | import os
import sys
import django
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
| # import os
# import sys
import django
# myPath = os.path.dirname(os.path.abspath(__file__))
# sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
| Change path location of django q | Change path location of django q
| Python | mit | Koed00/django-q | # import os
# import sys
import django
# myPath = os.path.dirname(os.path.abspath(__file__))
# sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
| Change path location of django q
import os
import sys
import django
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath)
VERSION = (0, 9, 2)
default_app_config = 'django_q.apps.DjangoQConfig'
# root imports will slowly be deprecated.
# please import from the relevant sub modules
if django.VERSION[:2] < (1, 9):
from .tasks import async, schedule, result, result_group, fetch, fetch_group, count_group, delete_group, queue_size
from .models import Task, Schedule, Success, Failure
from .cluster import Cluster
from .status import Stat
from .brokers import get_broker
__all__ = ['conf', 'cluster', 'models', 'tasks']
|
ea5bcdb8864fe326fcaa66f43313311d954ed759 | arx/sources/test/s3.py | arx/sources/test/s3.py | import pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
| Test S3 parsing and validation | Test S3 parsing and validation
| Python | mit | drcloud/arx | import pytest
from ...decorators import InvalidScheme
from ..s3 import S3, S3Jar, S3Tar, Invalid
def test_http():
src = S3('s3://bucket/key')
assert src.authority == 'bucket'
assert src.path == '/key'
assert src.fragment is None
with pytest.raises(Invalid):
src = S3('s3://bucket/key#pieces')
with pytest.raises(InvalidScheme):
src = S3('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3('jar+s3://bucket/key')
def test_tar():
src = S3Tar('tar+s3://bucket/key.tbz')
assert src.scheme == 'tar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.tbz'
with pytest.raises(InvalidScheme):
src = S3Tar('https://aol.com/aol.tgz')
def test_jar():
src = S3Jar('jar+s3://bucket/key.jar')
assert src.scheme == 'jar+s3'
assert src.authority == 'bucket'
assert src.path == '/key.jar'
assert src.fragment is None
with pytest.raises(Invalid):
S3Jar('jar+s3://bucket/key.jar#web.xml')
with pytest.raises(InvalidScheme):
src = S3Jar('tar+s3://bucket/key')
with pytest.raises(InvalidScheme):
src = S3Jar('https://aol.com/web.jar')
| Test S3 parsing and validation
|
|
a670b598f4416b0e99acd7442e5a51295a5daaa3 | tests/test_utils.py | tests/test_utils.py | import os
import time
import unittest
from helpers.utils import sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
pass
def os_waitpid(a, b):
return (0, 0)
def time_sleep(_):
sigchld_handler(None, None)
class TestUtils(unittest.TestCase):
def __init__(self, method_name='runTest'):
self.setUp = self.set_up
self.tearDown = self.tear_down
super(TestUtils, self).__init__(method_name)
def set_up(self):
self.time_sleep = time.sleep
time.sleep = nop
def tear_down(self):
time.sleep = self.time_sleep
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
def test_sigchld_handler(self):
sigchld_handler(None, None)
os.waitpid = os_waitpid
sigchld_handler(None, None)
def test_sleep(self):
time.sleep = time_sleep
sleep(0.01)
| import os
import time
import unittest
from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
pass
def os_waitpid(a, b):
return (0, 0)
def time_sleep(_):
sigchld_handler(None, None)
class TestUtils(unittest.TestCase):
def __init__(self, method_name='runTest'):
self.setUp = self.set_up
self.tearDown = self.tear_down
super(TestUtils, self).__init__(method_name)
def set_up(self):
self.time_sleep = time.sleep
time.sleep = nop
def tear_down(self):
time.sleep = self.time_sleep
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
def test_reap_children(self):
reap_children()
os.waitpid = os_waitpid
sigchld_handler(None, None)
reap_children()
def test_sleep(self):
time.sleep = time_sleep
sleep(0.01)
| Implement unit test for reap_children function | Implement unit test for reap_children function
| Python | mit | jinty/patroni,sean-/patroni,jinty/patroni,pgexperts/patroni,sean-/patroni,zalando/patroni,pgexperts/patroni,zalando/patroni | import os
import time
import unittest
from helpers.utils import reap_children, sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
pass
def os_waitpid(a, b):
return (0, 0)
def time_sleep(_):
sigchld_handler(None, None)
class TestUtils(unittest.TestCase):
def __init__(self, method_name='runTest'):
self.setUp = self.set_up
self.tearDown = self.tear_down
super(TestUtils, self).__init__(method_name)
def set_up(self):
self.time_sleep = time.sleep
time.sleep = nop
def tear_down(self):
time.sleep = self.time_sleep
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
def test_reap_children(self):
reap_children()
os.waitpid = os_waitpid
sigchld_handler(None, None)
reap_children()
def test_sleep(self):
time.sleep = time_sleep
sleep(0.01)
| Implement unit test for reap_children function
import os
import time
import unittest
from helpers.utils import sigchld_handler, sigterm_handler, sleep
def nop(*args, **kwargs):
pass
def os_waitpid(a, b):
return (0, 0)
def time_sleep(_):
sigchld_handler(None, None)
class TestUtils(unittest.TestCase):
def __init__(self, method_name='runTest'):
self.setUp = self.set_up
self.tearDown = self.tear_down
super(TestUtils, self).__init__(method_name)
def set_up(self):
self.time_sleep = time.sleep
time.sleep = nop
def tear_down(self):
time.sleep = self.time_sleep
def test_sigterm_handler(self):
self.assertRaises(SystemExit, sigterm_handler, None, None)
def test_sigchld_handler(self):
sigchld_handler(None, None)
os.waitpid = os_waitpid
sigchld_handler(None, None)
def test_sleep(self):
time.sleep = time_sleep
sleep(0.01)
|
a47f8ce5166b6b95b55136c2fd104e5c7b5dbf7a | swaggery/keywords.py | swaggery/keywords.py | '''A utility module to import all boilerplate Swaggery keywords into a module.
Usage:
from swaggery.keywords import *
'''
from .api import Api, Resource, operations
from .utils import Ptypes
from .logger import log
from .flowcontrol import Respond
from .models import (
Model,
Void,
Integer,
Float,
String,
Boolean,
Date,
DateTime,
List,
Set) | '''A utility module to import all boilerplate Swaggery keywords into a module.
Usage:
from swaggery.keywords import *
'''
from .api import Api, Resource, operations
from .utils import Ptypes
from .logger import log
from .flowcontrol import Respond
from .models import (
Model,
Void,
Integer,
Float,
String,
Boolean,
Date,
DateTime,
List,
Set
)
| Add newline to end of file | Add newline to end of file
| Python | agpl-3.0 | quasipedia/swaggery,quasipedia/swaggery | '''A utility module to import all boilerplate Swaggery keywords into a module.
Usage:
from swaggery.keywords import *
'''
from .api import Api, Resource, operations
from .utils import Ptypes
from .logger import log
from .flowcontrol import Respond
from .models import (
Model,
Void,
Integer,
Float,
String,
Boolean,
Date,
DateTime,
List,
Set
)
| Add newline to end of file
'''A utility module to import all boilerplate Swaggery keywords into a module.
Usage:
from swaggery.keywords import *
'''
from .api import Api, Resource, operations
from .utils import Ptypes
from .logger import log
from .flowcontrol import Respond
from .models import (
Model,
Void,
Integer,
Float,
String,
Boolean,
Date,
DateTime,
List,
Set) |
52cc08dd2df39d8b64ac1a95b6861985ca7ac487 | erpnext/manufacturing/doctype/bom_update_tool/test_bom_update_tool.py | erpnext/manufacturing/doctype/bom_update_tool/test_bom_update_tool.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
test_records = frappe.get_test_records('BOM')
class TestBOMUpdateTool(unittest.TestCase):
def test_replace_bom(self):
current_bom = "BOM-_Test Item Home Desktop Manufactured-001"
bom_doc = frappe.copy_doc(test_records[0])
bom_doc.items[1].item_code = "_Test Item"
bom_doc.insert()
update_tool = frappe.get_doc("BOM Update Tool")
update_tool.current_bom = current_bom
update_tool.new_bom = bom_doc.name
update_tool.replace_bom()
self.assertFalse(frappe.db.sql("select name from `tabBOM Item` where bom_no=%s", current_bom))
self.assertTrue(frappe.db.sql("select name from `tabBOM Item` where bom_no=%s", bom_doc.name)) | Test case added for replacing BOM | Test case added for replacing BOM
| Python | agpl-3.0 | gsnbng/erpnext,geekroot/erpnext,geekroot/erpnext,indictranstech/erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,gsnbng/erpnext,gsnbng/erpnext,geekroot/erpnext,indictranstech/erpnext | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
test_records = frappe.get_test_records('BOM')
class TestBOMUpdateTool(unittest.TestCase):
def test_replace_bom(self):
current_bom = "BOM-_Test Item Home Desktop Manufactured-001"
bom_doc = frappe.copy_doc(test_records[0])
bom_doc.items[1].item_code = "_Test Item"
bom_doc.insert()
update_tool = frappe.get_doc("BOM Update Tool")
update_tool.current_bom = current_bom
update_tool.new_bom = bom_doc.name
update_tool.replace_bom()
self.assertFalse(frappe.db.sql("select name from `tabBOM Item` where bom_no=%s", current_bom))
self.assertTrue(frappe.db.sql("select name from `tabBOM Item` where bom_no=%s", bom_doc.name)) | Test case added for replacing BOM
|
|
f2752572d915563ea5a3361dbb7a3fee08b04660 | tests/test_mmstats.py | tests/test_mmstats.py | import mmstats
def test_uint():
class MyStats(mmstats.MmStats):
apples = mmstats.UIntStat()
oranges = mmstats.UIntStat()
mmst = MyStats()
# Basic format
assert mmst.mmap[0] == '\x01'
assert mmst.mmap.find('applesL') != -1
assert mmst.mmap.find('orangesL') != -1
# Stat manipulation
assert mmst.apples == 0
assert mmst.oranges == 0
mmst.apples = 1
assert mmst.apples == 1
assert mmst.oranges == 0
| import mmstats
def test_uint():
class MyStats(mmstats.MmStats):
zebras = mmstats.UIntStat()
apples = mmstats.UIntStat()
oranges = mmstats.UIntStat()
mmst = MyStats()
# Basic format
assert mmst.mmap[0] == '\x01'
assert mmst.mmap.find('applesL') != -1
assert mmst.mmap.find('orangesL') != -1
assert mmst.mmap.find('zebrasL') != -1
# Stat manipulation
assert mmst.apples == 0
assert mmst.oranges == 0
assert mmst.zebras == 0
mmst.apples = 1
assert mmst.apples == 1
assert mmst.oranges == 0
assert mmst.zebras == 0
mmst.zebras = 9001
assert mmst.apples == 1
assert mmst.oranges == 0
assert mmst.zebras == 9001
| Make basic test a bit more thorough | Make basic test a bit more thorough
| Python | bsd-3-clause | schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats | import mmstats
def test_uint():
class MyStats(mmstats.MmStats):
zebras = mmstats.UIntStat()
apples = mmstats.UIntStat()
oranges = mmstats.UIntStat()
mmst = MyStats()
# Basic format
assert mmst.mmap[0] == '\x01'
assert mmst.mmap.find('applesL') != -1
assert mmst.mmap.find('orangesL') != -1
assert mmst.mmap.find('zebrasL') != -1
# Stat manipulation
assert mmst.apples == 0
assert mmst.oranges == 0
assert mmst.zebras == 0
mmst.apples = 1
assert mmst.apples == 1
assert mmst.oranges == 0
assert mmst.zebras == 0
mmst.zebras = 9001
assert mmst.apples == 1
assert mmst.oranges == 0
assert mmst.zebras == 9001
| Make basic test a bit more thorough
import mmstats
def test_uint():
class MyStats(mmstats.MmStats):
apples = mmstats.UIntStat()
oranges = mmstats.UIntStat()
mmst = MyStats()
# Basic format
assert mmst.mmap[0] == '\x01'
assert mmst.mmap.find('applesL') != -1
assert mmst.mmap.find('orangesL') != -1
# Stat manipulation
assert mmst.apples == 0
assert mmst.oranges == 0
mmst.apples = 1
assert mmst.apples == 1
assert mmst.oranges == 0
|
f331780f48d9f053ba770cade487417537cc2a93 | data_structures/graphs/adjacency_list.py | data_structures/graphs/adjacency_list.py | # -*- coding: utf-8 -*-
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from pprint import pprint as ppr
class AbstractGraphList(object):
def __init__(self):
# We're using a dict since the vertices are labeled, but the lists
# are contained within: "a collection of unordered lists."
self.nodes = {}
class AdjacencyList(AbstractGraphList):
"""
[Wikipedia]
"In graph theory and computer science, an adjacency list representation
of a graph is a collection of unordered lists, one for each vertex
in the graph. Each list describes the set of neighbors of its vertex.
See "Storing a sparse matrix" for an alternative approach." """
def __str__(self):
divider = '-' * 40
print(divider)
for node, adjacent in self.nodes.iteritems():
print('{} is adjacent to {} '.format(node, ', '.join(adjacent)))
print(divider)
return ''
def __setitem__(self, node, neighbors):
self.nodes[node] = neighbors
def __getitem__(self, node):
return self.nodes[node]
def report(self, vertex):
return self.__getitem__(vertex)
if __name__ == '__main__':
with Section('Adjacency list'):
AList = AdjacencyList()
AList['A'] = ['B', 'C', 'D']
AList['B'] = ['A', 'C', 'D']
AList['C'] = ['A', 'B', 'D']
AList['D'] = ['A', 'B', 'C']
print(AList)
ppr(AList.nodes)
print(AList.report('B'))
| Add adjacency list data structure | Add adjacency list data structure
| Python | apache-2.0 | christabor/MoAL,christabor/MoAL,christabor/MoAL,christabor/MoAL,christabor/MoAL | # -*- coding: utf-8 -*-
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from pprint import pprint as ppr
class AbstractGraphList(object):
def __init__(self):
# We're using a dict since the vertices are labeled, but the lists
# are contained within: "a collection of unordered lists."
self.nodes = {}
class AdjacencyList(AbstractGraphList):
"""
[Wikipedia]
"In graph theory and computer science, an adjacency list representation
of a graph is a collection of unordered lists, one for each vertex
in the graph. Each list describes the set of neighbors of its vertex.
See "Storing a sparse matrix" for an alternative approach." """
def __str__(self):
divider = '-' * 40
print(divider)
for node, adjacent in self.nodes.iteritems():
print('{} is adjacent to {} '.format(node, ', '.join(adjacent)))
print(divider)
return ''
def __setitem__(self, node, neighbors):
self.nodes[node] = neighbors
def __getitem__(self, node):
return self.nodes[node]
def report(self, vertex):
return self.__getitem__(vertex)
if __name__ == '__main__':
with Section('Adjacency list'):
AList = AdjacencyList()
AList['A'] = ['B', 'C', 'D']
AList['B'] = ['A', 'C', 'D']
AList['C'] = ['A', 'B', 'D']
AList['D'] = ['A', 'B', 'C']
print(AList)
ppr(AList.nodes)
print(AList.report('B'))
| Add adjacency list data structure
|
|
9fba6f871068b0d40b71b9de4f69ac59bc33f567 | tests/test_CheckButton.py | tests/test_CheckButton.py | #!/usr/bin/env python
import unittest
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
class CheckButtonTest(unittest.TestCase):
def testForBool(self):
myChkBtn = ProxyCheckButton()
# PyGObject bug, we cannot set bool in the constructor with
# introspection
#self.assertEqual(myChkBtn.props.data_type, 'bool')
# this test doens't work... maybe be a pygtk bug
#self.assertRaises(TypeError, myChkBtn.set_property, 'data-type', str)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
import unittest
import gtk
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
class CheckButtonTest(unittest.TestCase):
def testForBool(self):
myChkBtn = ProxyCheckButton()
assert isinstance(myChkBtn, gtk.CheckButton)
# PyGObject bug, we cannot set bool in the constructor with
# introspection
#self.assertEqual(myChkBtn.props.data_type, 'bool')
# this test doens't work... maybe be a pygtk bug
#self.assertRaises(TypeError, myChkBtn.set_property, 'data-type', str)
if __name__ == '__main__':
unittest.main()
| Add a silly assert to avoid a pyflakes warning | Add a silly assert to avoid a pyflakes warning | Python | lgpl-2.1 | stoq/kiwi | #!/usr/bin/env python
import unittest
import gtk
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
class CheckButtonTest(unittest.TestCase):
def testForBool(self):
myChkBtn = ProxyCheckButton()
assert isinstance(myChkBtn, gtk.CheckButton)
# PyGObject bug, we cannot set bool in the constructor with
# introspection
#self.assertEqual(myChkBtn.props.data_type, 'bool')
# this test doens't work... maybe be a pygtk bug
#self.assertRaises(TypeError, myChkBtn.set_property, 'data-type', str)
if __name__ == '__main__':
unittest.main()
| Add a silly assert to avoid a pyflakes warning
#!/usr/bin/env python
import unittest
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
class CheckButtonTest(unittest.TestCase):
def testForBool(self):
myChkBtn = ProxyCheckButton()
# PyGObject bug, we cannot set bool in the constructor with
# introspection
#self.assertEqual(myChkBtn.props.data_type, 'bool')
# this test doens't work... maybe be a pygtk bug
#self.assertRaises(TypeError, myChkBtn.set_property, 'data-type', str)
if __name__ == '__main__':
unittest.main()
|
556e9f5a9f04b730260268a769cbd7170868f693 | opps/__init__.py | opps/__init__.py | # See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
| Fix pkg resources declare namespace | Fix pkg resources declare namespace
| Python | mit | opps/opps-polls,opps/opps-polls | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pkg_resources
pkg_resources.declare_namespace(__name__)
| Fix pkg resources declare namespace
# See http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
e4967c60c172ee85c6050744b487156daee13c23 | Dice.py | Dice.py | import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
| Add base Die functionality(roll, hold, get) | Add base Die functionality(roll, hold, get)
| Python | mit | achyutreddy24/DiceGame | import random
class Die(object):
def __init__(self, sides = 6):
self.sides = sides
self.held = False
self.die_face = 1
def change_held(self, held):
self.held = held
def roll_die(self):
if (self.held == False):
self.die_face = random.randint(1, self.sides)
else:
pass
def get_die_face(self):
return self.die_face
| Add base Die functionality(roll, hold, get)
|
|
b35d4292e50e8a8dc56635bddeac5a1fc42a5d19 | tveebot_tracker/source.py | tveebot_tracker/source.py | from abc import ABC, abstractmethod
class TVShowNotFound(Exception):
""" Raised when a reference does not match any TV Show available """
class EpisodeSource(ABC):
"""
Abstract base class to define the interface for and episode source.
An episode source is used by the tracker to obtain episode files. A
source is usually based on a feed that provides links to TV Show's
episodes.
Every source has its own protocol to obtain the information and it uses
its own format to present that information. Implementations of this
interface are responsible for implementing the details of how to obtain
the episode files' information and present them to the tracker.
"""
# Called by the tracker when it wants to get the episodes available for
# a specific TVShow
@abstractmethod
def get_episodes_for(self, tvshow_reference: str) -> list:
"""
Retrieve all available episode files corresponding to the specified
TV show. Multiple files for the same episode may be retrieved.
The TV show to obtain the episodes from is identified by some reference
that uniquely identifies it within the episode source in question.
:param tvshow_reference: reference that uniquely identifies the TV show
to get the episodes for
:return: a list containing all episode files available for the specified
TV Show. An empty list if none is found.
:raise TVShowNotFound: if the specified reference does not match to any
TV show available
"""
| from abc import ABC, abstractmethod
class TVShowNotFound(Exception):
""" Raised when a reference does not match any TV Show available """
class EpisodeSource(ABC):
"""
Abstract base class to define the interface for and episode source.
An episode source is used by the tracker to obtain episode files. A
source is usually based on a feed that provides links to TV Show's
episodes.
Every source has its own protocol to obtain the information and it uses
its own format to present that information. Implementations of this
interface are responsible for implementing the details of how to obtain
the episode files' information and present them to the tracker.
"""
# Called by the tracker when it wants to get the episodes available for
# a specific TVShow
@abstractmethod
def fetch(self, tvshow_reference: str) -> list:
"""
Fetches all available episode files, corresponding to the specified
TV show. Multiple files for the same episode may be retrieved.
The TV show to obtain the episodes from is identified by some reference
that uniquely identifies it within the episode source in question.
:param tvshow_reference: reference that uniquely identifies the TV show
to get the episodes for
:return: a list containing all episode files available for the specified
TV Show. An empty list if none is found.
:raise TVShowNotFound: if the specified reference does not match to any
TV show available
"""
| Rename Source's get_episodes_for() method to fetch() | Rename Source's get_episodes_for() method to fetch()
| Python | mit | tveebot/tracker | from abc import ABC, abstractmethod
class TVShowNotFound(Exception):
""" Raised when a reference does not match any TV Show available """
class EpisodeSource(ABC):
"""
Abstract base class to define the interface for and episode source.
An episode source is used by the tracker to obtain episode files. A
source is usually based on a feed that provides links to TV Show's
episodes.
Every source has its own protocol to obtain the information and it uses
its own format to present that information. Implementations of this
interface are responsible for implementing the details of how to obtain
the episode files' information and present them to the tracker.
"""
# Called by the tracker when it wants to get the episodes available for
# a specific TVShow
@abstractmethod
def fetch(self, tvshow_reference: str) -> list:
"""
Fetches all available episode files, corresponding to the specified
TV show. Multiple files for the same episode may be retrieved.
The TV show to obtain the episodes from is identified by some reference
that uniquely identifies it within the episode source in question.
:param tvshow_reference: reference that uniquely identifies the TV show
to get the episodes for
:return: a list containing all episode files available for the specified
TV Show. An empty list if none is found.
:raise TVShowNotFound: if the specified reference does not match to any
TV show available
"""
| Rename Source's get_episodes_for() method to fetch()
from abc import ABC, abstractmethod
class TVShowNotFound(Exception):
""" Raised when a reference does not match any TV Show available """
class EpisodeSource(ABC):
"""
Abstract base class to define the interface for and episode source.
An episode source is used by the tracker to obtain episode files. A
source is usually based on a feed that provides links to TV Show's
episodes.
Every source has its own protocol to obtain the information and it uses
its own format to present that information. Implementations of this
interface are responsible for implementing the details of how to obtain
the episode files' information and present them to the tracker.
"""
# Called by the tracker when it wants to get the episodes available for
# a specific TVShow
@abstractmethod
def get_episodes_for(self, tvshow_reference: str) -> list:
"""
Retrieve all available episode files corresponding to the specified
TV show. Multiple files for the same episode may be retrieved.
The TV show to obtain the episodes from is identified by some reference
that uniquely identifies it within the episode source in question.
:param tvshow_reference: reference that uniquely identifies the TV show
to get the episodes for
:return: a list containing all episode files available for the specified
TV Show. An empty list if none is found.
:raise TVShowNotFound: if the specified reference does not match to any
TV show available
"""
|
f70d73b5a67ca13dc243f72ed701e1f8d5924405 | setup.py | setup.py | from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='[email protected]',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
| from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='[email protected]',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
| Revert "Perhaps need to modify the name" | Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.
| Python | mit | dpetzold/django-templated-email,vintasoftware/django-templated-email,ScanTrust/django-templated-email,vintasoftware/django-templated-email,mypebble/django-templated-email,dpetzold/django-templated-email,BradWhittington/django-templated-email,hator/django-templated-email,ScanTrust/django-templated-email,BradWhittington/django-templated-email,mypebble/django-templated-email,hator/django-templated-email | from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='[email protected]',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
| Revert "Perhaps need to modify the name"
This reverts commit d4ee1a1d91cd13bf0cb844be032eaa527806fad1.
from setuptools import setup
DESCRIPTION = "A Django oriented templated / transaction email abstraction"
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='denaje-django-templated-email',
version='0.4.9',
packages=['templated_email', 'templated_email.backends'],
author='Bradley Whittington',
author_email='[email protected]',
url='http://github.com/bradwhittington/django-templated-email/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
)
|
424fc74377ba4385e4c25fe90f888d39d5f14abd | runtests.py | runtests.py | #!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| #!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS. | Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS.
| Python | mit | extertioner/django-localeurl,carljm/django-localeurl,gonnado/django-localeurl | #!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
SITE_ID=1,
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| Add SITE_ID to test settings since contrib.sites is in INSTALLED_APPS.
#!/usr/bin/env python
from os.path import dirname, abspath
import sys
from django.conf import settings
if not settings.configured:
from django import VERSION
settings_dict = dict(
INSTALLED_APPS=(
'localeurl',
'localeurl.tests',
'django.contrib.sites', # for sitemap test
),
ROOT_URLCONF='localeurl.tests.test_urls',
)
if VERSION >= (1, 2):
settings_dict["DATABASES"] = {
"default": {
"ENGINE": "django.db.backends.sqlite3"
}}
else:
settings_dict["DATABASE_ENGINE"] = "sqlite3"
settings.configure(**settings_dict)
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = dirname(abspath(__file__))
sys.path.insert(0, parent)
try:
from django.test.simple import DjangoTestSuiteRunner
def run_tests(test_args, verbosity, interactive):
runner = DjangoTestSuiteRunner(
verbosity=verbosity, interactive=interactive, failfast=False)
return runner.run_tests(test_args)
except ImportError:
# for Django versions that don't have DjangoTestSuiteRunner
from django.test.simple import run_tests
failures = run_tests(
test_args, verbosity=1, interactive=True)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
|
b56b7ed23ce60a352f163d21fedff63fe2a1c44a | scheduler/scheduled_external_program.py | scheduler/scheduled_external_program.py | import luigi
from luigi.contrib.external_program import ExternalProgramTask, ExternalProgramRunError
from subprocess import Popen, PIPE, check_call
import os
import datetime
import logging
logger = logging.getLogger('luigi-interface')
class Scheduler(object):
@classmethod
def fromblurb(cls, blurb):
for subcls in cls.__subclasses__():
if subcls.blurb == blurb:
return subcls()
else:
raise ValueError('{} is not a reckognized scheduler.'.format(blurb))
@classmethod
def run(self, task):
raise NotImplemented
class LocalScheduler(Scheduler):
blurb = 'local'
@classmethod
def run(self, task):
args = list(map(str, task.program_args()))
env = task.program_environment()
logger.info('Running command {}'.format(' '.join(args)))
proc = Popen(args, env=env, stdout=PIPE, stderr=PIPE, universal_newlines=True)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise ExternalProgramRunError('Program exited with non-zero return code.', args, env, stdout, stderr)
if task.capture_output:
logger.info('Program stdout:\n{}'.format(stdout))
logger.info('Program stderr:\n{}'.format(stderr))
class SlurmScheduler(Scheduler):
blurb = 'slurm'
@classmethod
def run(self, task):
srun_args = [
'--time', '{}:{}:{}'.format(task.walltime.seconds % 3600, (task.walltime.seconds // 3600) % 60, ((task.walltime.seconds // 3600) // 60)),
'--mem', '{}M'.format(task.memory),
'--cpus-per-task', str(task.ncpus)]
args = list(map(str, task.program_args()))
env = task.program_environment()
logger.info('Running command {}'.format(' '.join(args)))
proc = Popen(['srun'] + srun_args + args, env=env, stdout=PIPE, stderr=PIPE, universal_newlines=True)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise ExternalProgramRunError('Program exited with non-zero return code.', args, env, stdout, stderr)
if task.capture_output:
logger.info('Program stdout:\n{}'.format(stdout))
logger.info('Program stderr:\n{}'.format(stderr))
class ScheduledExternalProgramTask(ExternalProgramTask):
"""
Variant of luigi.contrib.external_program.ExternalProgramTask that runs on
a job scheduler.
"""
walltime = luigi.TimeDeltaParameter(default=datetime.timedelta(hours=1))
ncpus = luigi.IntParameter(default=1)
memory = luigi.FloatParameter(default=1024)
scheduler = luigi.ChoiceParameter(choices=[cls.blurb for cls in Scheduler.__subclasses__()], default='local')
def run(self):
return Scheduler.fromblurb(self.scheduler).run(self)
| Implement a scheduled version of Luigi's external program module | Implement a scheduled version of Luigi's external program module
| Python | unlicense | ppavlidis/rnaseq-pipeline,ppavlidis/rnaseq-pipeline,ppavlidis/rnaseq-pipeline | import luigi
from luigi.contrib.external_program import ExternalProgramTask, ExternalProgramRunError
from subprocess import Popen, PIPE, check_call
import os
import datetime
import logging
logger = logging.getLogger('luigi-interface')
class Scheduler(object):
@classmethod
def fromblurb(cls, blurb):
for subcls in cls.__subclasses__():
if subcls.blurb == blurb:
return subcls()
else:
raise ValueError('{} is not a reckognized scheduler.'.format(blurb))
@classmethod
def run(self, task):
raise NotImplemented
class LocalScheduler(Scheduler):
blurb = 'local'
@classmethod
def run(self, task):
args = list(map(str, task.program_args()))
env = task.program_environment()
logger.info('Running command {}'.format(' '.join(args)))
proc = Popen(args, env=env, stdout=PIPE, stderr=PIPE, universal_newlines=True)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise ExternalProgramRunError('Program exited with non-zero return code.', args, env, stdout, stderr)
if task.capture_output:
logger.info('Program stdout:\n{}'.format(stdout))
logger.info('Program stderr:\n{}'.format(stderr))
class SlurmScheduler(Scheduler):
blurb = 'slurm'
@classmethod
def run(self, task):
srun_args = [
'--time', '{}:{}:{}'.format(task.walltime.seconds % 3600, (task.walltime.seconds // 3600) % 60, ((task.walltime.seconds // 3600) // 60)),
'--mem', '{}M'.format(task.memory),
'--cpus-per-task', str(task.ncpus)]
args = list(map(str, task.program_args()))
env = task.program_environment()
logger.info('Running command {}'.format(' '.join(args)))
proc = Popen(['srun'] + srun_args + args, env=env, stdout=PIPE, stderr=PIPE, universal_newlines=True)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
raise ExternalProgramRunError('Program exited with non-zero return code.', args, env, stdout, stderr)
if task.capture_output:
logger.info('Program stdout:\n{}'.format(stdout))
logger.info('Program stderr:\n{}'.format(stderr))
class ScheduledExternalProgramTask(ExternalProgramTask):
"""
Variant of luigi.contrib.external_program.ExternalProgramTask that runs on
a job scheduler.
"""
walltime = luigi.TimeDeltaParameter(default=datetime.timedelta(hours=1))
ncpus = luigi.IntParameter(default=1)
memory = luigi.FloatParameter(default=1024)
scheduler = luigi.ChoiceParameter(choices=[cls.blurb for cls in Scheduler.__subclasses__()], default='local')
def run(self):
return Scheduler.fromblurb(self.scheduler).run(self)
| Implement a scheduled version of Luigi's external program module
|
|
ea0d732972d4b86cffdc63d73ff04b3da06b6860 | osmwriter/_version.py | osmwriter/_version.py | """Store the version info so that setup.py and __init__ can access it. """
__version__ = "0.2.0"
| """Store the version info so that setup.py and __init__ can access it. """
__version__ = "0.2.0.dev"
| Prepare version 0.2.0.dev for next development cycle | Prepare version 0.2.0.dev for next development cycle
| Python | agpl-3.0 | rory/openstreetmap-writer | """Store the version info so that setup.py and __init__ can access it. """
__version__ = "0.2.0.dev"
| Prepare version 0.2.0.dev for next development cycle
"""Store the version info so that setup.py and __init__ can access it. """
__version__ = "0.2.0"
|
34061c55be17a19846833148e2cf6e015918efae | frameworks/C/onion/setup.py | frameworks/C/onion/setup.py | import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| Remove minor errors in onion | Remove minor errors in onion
| Python | bsd-3-clause | alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jamming/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,methane/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sgml/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,actframework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,sgml/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Verber/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sgml/FrameworkBenchmarks,doom369/FrameworkBenchmarks,valyala/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jamming/FrameworkBenchmarks,methane/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sxend/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,denkab/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,joshk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,testn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,khellang/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,khellang/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,grob/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,denkab/FrameworkBenchmarks,actframework/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,testn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,sgml/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,methane/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,joshk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,joshk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jamming/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,torhve/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zapov/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,grob/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,valyala/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,methane/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,valyala/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sxend/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,herloct/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,grob/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,methane/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,grob/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,doom369/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sgml/FrameworkBenchmarks,joshk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zapov/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,actframework/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,herloct/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jamming/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,denkab/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sxend/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,denkab/FrameworkBenchmarks,denkab/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,testn/FrameworkBenchmarks,zapov/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,testn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,actframework/FrameworkBenchmarks,methane/FrameworkBenchmarks,torhve/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,testn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,grob/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,methane/FrameworkBenchmarks,khellang/FrameworkBenchmarks,doom369/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,herloct/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,khellang/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zloster/FrameworkBenchmarks,grob/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Verber/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jamming/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,methane/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,joshk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks | import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm -f *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/ onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| Remove minor errors in onion
import subprocess
import sys
import os
import setup_util
def start(args, logfile, errfile):
setup_util.replace_text("onion/hello.c", "mysql_real_connect\(data.db\[i\], \".*\",", "mysql_real_connect(data.db[i], \"" + args.database_host + "\",")
subprocess.call("rm *.o", cwd="onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("cp -R $IROOT/onion/* onion/onion", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("rm CMakeCache.txt", shell=True, cwd="onion/onion/build", stderr=errfile, stdout=logfile)
subprocess.Popen("make && ./hello", shell=True, cwd="onion", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
|
ede7158c611bf618ee03989d33c5fe6a091b7d66 | tests/testapp/models.py | tests/testapp/models.py | from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
| from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
try:
from django.utils.encoding import python_2_unicode_compatible
except ImportError:
def python_2_unicode_compatible(c):
return c
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
| Add shim for python_2_unicode_compatible in tests | Add shim for python_2_unicode_compatible in tests
| Python | mit | dfunckt/django-rules,dfunckt/django-rules,ticosax/django-rules,ticosax/django-rules,dfunckt/django-rules,ticosax/django-rules | from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
try:
from django.utils.encoding import python_2_unicode_compatible
except ImportError:
def python_2_unicode_compatible(c):
return c
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
| Add shim for python_2_unicode_compatible in tests
from __future__ import absolute_import
import sys
from django.conf import settings
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
import rules
@python_2_unicode_compatible
class Book(models.Model):
isbn = models.CharField(max_length=50, unique=True)
title = models.CharField(max_length=100)
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def __str__(self):
return self.title
if sys.version_info.major >= 3:
from rules.contrib.models import RulesModel
class TestModel(RulesModel):
class Meta:
rules_permissions = {"add": rules.always_true, "view": rules.always_true}
@classmethod
def preprocess_rules_permissions(cls, perms):
perms["custom"] = rules.always_true
|
86522eab0758ac6bf92ad19b60417b279c71a42c | tabtranslator/model.py | tabtranslator/model.py | # coding: utf-8
class Sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class Bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, time_signature=4):
super(bar, self).__init__()
self.time_signature = time_signature
self.notes = list()
class Note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
| # coding: utf-8
class Sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(Sheet, self).__init__()
self.name = name
self.bars = list()
class Bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, time_signature=4):
super(Bar, self).__init__()
self.time_signature = time_signature
self.notes = list()
class Note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(Note, self).__init__()
self.pitch = pitch
self.duration = duration
| FIX class name in camel case: | FIX class name in camel case:
| Python | mit | ograndedjogo/tab-translator,ograndedjogo/tab-translator | # coding: utf-8
class Sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(Sheet, self).__init__()
self.name = name
self.bars = list()
class Bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, time_signature=4):
super(Bar, self).__init__()
self.time_signature = time_signature
self.notes = list()
class Note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(Note, self).__init__()
self.pitch = pitch
self.duration = duration
| FIX class name in camel case:
# coding: utf-8
class Sheet(object):
""" sheet: Top level object.
Models the entire music sheet """
def __init__(self, name):
super(sheet, self).__init__()
self.name = name
self.bars = list()
class Bar(object):
""" bar: Models a measure.
Compose the sheet as the temporal layer
=> Where the notes are displayed on the sheet """
def __init__(self, time_signature=4):
super(bar, self).__init__()
self.time_signature = time_signature
self.notes = list()
class Note(object):
""" note: Models the unit in music representation
Drives visual representation
=> What note must be displayed on the sheet """
def __init__(self, pitch, duration=1):
super(bote, self).__init__()
self.pitch = pitch
self.duration = duration
|
e8ee7ad6e2560a4fd0ca287adc55155f066eb815 | akanda/horizon/routers/views.py | akanda/horizon/routers/views.py | from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
| from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
| Remove wrong reference to quantum | Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <[email protected]>
| Python | apache-2.0 | dreamhost/akanda-horizon,dreamhost/akanda-horizon | from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.neutron.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
| Remove wrong reference to quantum
Change-Id: Ic3d8b26e061e85c1d128a79b115fd2da4412e705
Signed-off-by: Rosario Di Somma <[email protected]>
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
b16b701f6ad80d0df27ab6ea1d9f115a6e2b9106 | pymatgen/__init__.py | pymatgen/__init__.py | __author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, " + \
"Geoffroy Hautier, William Davidson Richard, Dan Gunter, " + \
"Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jul 27, 2012"
__version__ = "2.1.2"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import zopen
| __author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, " + \
"Geoffroy Hautier, William Davidson Richard, Dan Gunter, " + \
"Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jul 27, 2012"
__version__ = "2.1.3dev"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import zopen
| Increase minor version number + dev. | Increase minor version number + dev.
Former-commit-id: 44023123016583dcb692ce23c19978e6f5d90abd [formerly 01b7fa7fe0778c195d9f75d35d43618691778ef8]
Former-commit-id: a96aa4b8265bf7b15143879b0a3b98e30a9e5953 | Python | mit | blondegeek/pymatgen,tallakahath/pymatgen,dongsenfo/pymatgen,mbkumar/pymatgen,vorwerkc/pymatgen,mbkumar/pymatgen,johnson1228/pymatgen,setten/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,blondegeek/pymatgen,aykol/pymatgen,richardtran415/pymatgen,dongsenfo/pymatgen,setten/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,nisse3000/pymatgen,nisse3000/pymatgen,tallakahath/pymatgen,Bismarrck/pymatgen,tschaume/pymatgen,montoyjh/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,gpetretto/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,setten/pymatgen,mbkumar/pymatgen,gmatteo/pymatgen,mbkumar/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,nisse3000/pymatgen,gmatteo/pymatgen,fraricci/pymatgen,aykol/pymatgen,gpetretto/pymatgen,vorwerkc/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,xhqu1981/pymatgen,setten/pymatgen,montoyjh/pymatgen,blondegeek/pymatgen,Bismarrck/pymatgen,vorwerkc/pymatgen,vorwerkc/pymatgen,ndardenne/pymatgen,nisse3000/pymatgen,gVallverdu/pymatgen,aykol/pymatgen,ndardenne/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,richardtran415/pymatgen,matk86/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,richardtran415/pymatgen,johnson1228/pymatgen,tallakahath/pymatgen,tschaume/pymatgen,davidwaroquiers/pymatgen,czhengsci/pymatgen,czhengsci/pymatgen,fraricci/pymatgen,matk86/pymatgen,Bismarrck/pymatgen,xhqu1981/pymatgen,gpetretto/pymatgen,montoyjh/pymatgen,davidwaroquiers/pymatgen,xhqu1981/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen,gpetretto/pymatgen,gVallverdu/pymatgen,matk86/pymatgen | __author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, " + \
"Geoffroy Hautier, William Davidson Richard, Dan Gunter, " + \
"Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jul 27, 2012"
__version__ = "2.1.3dev"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import zopen
| Increase minor version number + dev.
Former-commit-id: 44023123016583dcb692ce23c19978e6f5d90abd [formerly 01b7fa7fe0778c195d9f75d35d43618691778ef8]
Former-commit-id: a96aa4b8265bf7b15143879b0a3b98e30a9e5953
__author__ = "Shyue Ping Ong, Anubhav Jain, Michael Kocher, " + \
"Geoffroy Hautier, William Davidson Richard, Dan Gunter, " + \
"Shreyas Cholia, Vincent L Chevrier, Rickard Armiento"
__date__ = "Jul 27, 2012"
__version__ = "2.1.2"
"""
Useful aliases for commonly used objects and modules.
"""
from pymatgen.core.periodic_table import Element, Specie
from pymatgen.core.structure import Structure, Molecule, Composition
from pymatgen.core.lattice import Lattice
from pymatgen.serializers.json_coders import PMGJSONEncoder, PMGJSONDecoder
from pymatgen.electronic_structure.core import Spin, Orbital
from pymatgen.util.io_utils import zopen
|
7c68a78a81721ecbbda0f999576b91b803a34a3e | .circleci/get-commit-range.py | .circleci/get-commit-range.py | #!/usr/bin/env python3
import os
import argparse
from github import Github
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
base = pr.base.ref
head = pr.head.ref
return f'origin/{base}...{head}'
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'project',
default=os.environ['CIRCLE_PROJECT_USERNAME'],
nargs='?'
)
argparser.add_argument(
'repo',
default=os.environ['CIRCLE_PROJECT_REPONAME'],
nargs='?'
)
argparser.add_argument(
'--pr-number',
type=int,
nargs='?'
)
args = argparser.parse_args()
if not args.pr_number:
pr_number = int(os.environ['CIRCLE_PR_NUMBER'])
else:
pr_number = args.pr_number
print(from_pr(args.project, args.repo, pr_number))
if __name__ == '__main__':
main() | #!/usr/bin/env python3
import os
import argparse
from github import Github
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
base = pr.base.sha
head = pr.base.sha
return f'{base}...{head}'
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'project',
default=os.environ['CIRCLE_PROJECT_USERNAME'],
nargs='?'
)
argparser.add_argument(
'repo',
default=os.environ['CIRCLE_PROJECT_REPONAME'],
nargs='?'
)
argparser.add_argument(
'--pr-number',
type=int,
nargs='?'
)
args = argparser.parse_args()
if not args.pr_number:
pr_number = int(os.environ['CIRCLE_PR_NUMBER'])
else:
pr_number = args.pr_number
print(from_pr(args.project, args.repo, pr_number))
if __name__ == '__main__':
main() | Use SHAs for commit_range rather than refs | Use SHAs for commit_range rather than refs
Refs are local and might not always be present in the checkout.
| Python | bsd-3-clause | ryanlovett/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub | #!/usr/bin/env python3
import os
import argparse
from github import Github
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
base = pr.base.sha
head = pr.base.sha
return f'{base}...{head}'
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'project',
default=os.environ['CIRCLE_PROJECT_USERNAME'],
nargs='?'
)
argparser.add_argument(
'repo',
default=os.environ['CIRCLE_PROJECT_REPONAME'],
nargs='?'
)
argparser.add_argument(
'--pr-number',
type=int,
nargs='?'
)
args = argparser.parse_args()
if not args.pr_number:
pr_number = int(os.environ['CIRCLE_PR_NUMBER'])
else:
pr_number = args.pr_number
print(from_pr(args.project, args.repo, pr_number))
if __name__ == '__main__':
main() | Use SHAs for commit_range rather than refs
Refs are local and might not always be present in the checkout.
#!/usr/bin/env python3
import os
import argparse
from github import Github
def from_pr(project, repo, pr_number):
gh = Github()
pr = gh.get_repo(f'{project}/{repo}').get_pull(pr_number)
base = pr.base.ref
head = pr.head.ref
return f'origin/{base}...{head}'
def main():
argparser = argparse.ArgumentParser()
argparser.add_argument(
'project',
default=os.environ['CIRCLE_PROJECT_USERNAME'],
nargs='?'
)
argparser.add_argument(
'repo',
default=os.environ['CIRCLE_PROJECT_REPONAME'],
nargs='?'
)
argparser.add_argument(
'--pr-number',
type=int,
nargs='?'
)
args = argparser.parse_args()
if not args.pr_number:
pr_number = int(os.environ['CIRCLE_PR_NUMBER'])
else:
pr_number = args.pr_number
print(from_pr(args.project, args.repo, pr_number))
if __name__ == '__main__':
main() |
6cfc9de7fe8fd048a75845a69bdeefc7c742bae4 | oneall/django_oneall/management/commands/emaillogin.py | oneall/django_oneall/management/commands/emaillogin.py | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "E-mail login without sending the actual e-mail."
def add_arguments(self, parser):
parser.add_argument('email', type=str)
def handle(self, email, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return 1
query_string = EmailTokenAuthBackend().issue(email)
self.stdout.write("Complete login with: %s?%s" % (reverse('oneall-login'), query_string))
| # -*- coding: utf-8 -*-
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "Issues an e-mail login token."
def add_arguments(self, parser):
parser.add_argument('-s', '--send', dest='send', action='store_true',
help="Actually e-mail the token instead of only displaying it.")
parser.add_argument('email', type=str)
def handle(self, email, send, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return
query_string = EmailTokenAuthBackend().issue(email)
msg = "Complete login with: %s?%s" % (reverse('oneall-login'), query_string)
self.stdout.write(msg)
if send:
mail = EmailMessage()
mail.to = [email]
mail.subject = 'Login Test'
mail.body = msg
try:
sent = mail.send()
self.stdout.write("Sent %d message." % sent)
except ConnectionError as e:
self.stderr.write(str(e))
| Add the possibility of testing SMTP from the command-line. | Add the possibility of testing SMTP from the command-line.
| Python | mit | leandigo/django-oneall,ckot/django-oneall,leandigo/django-oneall,ckot/django-oneall | # -*- coding: utf-8 -*-
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "Issues an e-mail login token."
def add_arguments(self, parser):
parser.add_argument('-s', '--send', dest='send', action='store_true',
help="Actually e-mail the token instead of only displaying it.")
parser.add_argument('email', type=str)
def handle(self, email, send, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return
query_string = EmailTokenAuthBackend().issue(email)
msg = "Complete login with: %s?%s" % (reverse('oneall-login'), query_string)
self.stdout.write(msg)
if send:
mail = EmailMessage()
mail.to = [email]
mail.subject = 'Login Test'
mail.body = msg
try:
sent = mail.send()
self.stdout.write("Sent %d message." % sent)
except ConnectionError as e:
self.stderr.write(str(e))
| Add the possibility of testing SMTP from the command-line.
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from django.core.urlresolvers import reverse
from ...auth import EmailTokenAuthBackend
class Command(BaseCommand):
help = "E-mail login without sending the actual e-mail."
def add_arguments(self, parser):
parser.add_argument('email', type=str)
def handle(self, email, **options):
if '@' not in email:
self.stderr.write("Failed. E-mail is mandatory.")
return 1
query_string = EmailTokenAuthBackend().issue(email)
self.stdout.write("Complete login with: %s?%s" % (reverse('oneall-login'), query_string))
|
8fb421831bb562a80edf5c3de84d71bf2a3eec4b | tools/scrub_database.py | tools/scrub_database.py | import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from museum_site.models import * # noqa: E402
from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402
def main():
print("WARNING! THIS WILL PERMANENTLY REMOVE DATA FROM THIS DATABASE")
print("Are you sure you wish to remove all non-public data?")
confirm = input("Type 'yes' to confirm: ")
if confirm == "yes":
print("Deleting articles...")
for a in Article.objects.filter(published=REMOVED_ARTICLE):
print(a)
a.delete()
print("Done!")
print("Deleting file objects...")
for f in File.objects.filter(details__id=DETAIL_REMOVED):
print(f)
f.delete()
print("Done!")
print("Private data has removed. Database can be publicly shared.")
print("DONE.")
else:
print("ABORTED.")
if __name__ == '__main__':
main()
| import datetime
import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from django.contrib.sessions.models import Session
from django.contrib.auth.models import User
from museum_site.models import * # noqa: E402
from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402
def main():
print("WARNING! THIS WILL PERMANENTLY REMOVE DATA FROM THIS DATABASE")
print("Are you sure you wish to remove all non-public data?")
confirm = input("Type 'yes' to confirm: ")
if confirm == "yes":
print("Deleting articles...")
for a in Article.objects.filter(published=REMOVED_ARTICLE):
print(a)
a.delete()
print("Done!")
print("Deleting file objects...")
for f in File.objects.filter(details__id=DETAIL_REMOVED):
print(f)
f.delete()
print("Done!")
print("Deleting sessions...")
Session.objects.all().delete()
print("Done!")
print("Clearing accounts...")
qs = User.objects.all()
for u in qs:
u.username = "USER #" + str(u.id)
u.first_name = ""
u.last_name= ""
u.email = "[email protected]"
u.password = u.set_password("password")
u.is_staff = False
u.is_superuser = False
u.save()
print("Private data has removed. Database can be publicly shared.")
print("DONE.")
else:
print("ABORTED.")
if __name__ == '__main__':
main()
| Remove sessions when scrubbing DB for public release | Remove sessions when scrubbing DB for public release
| Python | mit | DrDos0016/z2,DrDos0016/z2,DrDos0016/z2 | import datetime
import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from django.contrib.sessions.models import Session
from django.contrib.auth.models import User
from museum_site.models import * # noqa: E402
from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402
def main():
print("WARNING! THIS WILL PERMANENTLY REMOVE DATA FROM THIS DATABASE")
print("Are you sure you wish to remove all non-public data?")
confirm = input("Type 'yes' to confirm: ")
if confirm == "yes":
print("Deleting articles...")
for a in Article.objects.filter(published=REMOVED_ARTICLE):
print(a)
a.delete()
print("Done!")
print("Deleting file objects...")
for f in File.objects.filter(details__id=DETAIL_REMOVED):
print(f)
f.delete()
print("Done!")
print("Deleting sessions...")
Session.objects.all().delete()
print("Done!")
print("Clearing accounts...")
qs = User.objects.all()
for u in qs:
u.username = "USER #" + str(u.id)
u.first_name = ""
u.last_name= ""
u.email = "[email protected]"
u.password = u.set_password("password")
u.is_staff = False
u.is_superuser = False
u.save()
print("Private data has removed. Database can be publicly shared.")
print("DONE.")
else:
print("ABORTED.")
if __name__ == '__main__':
main()
| Remove sessions when scrubbing DB for public release
import os
import sys
import django
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from museum_site.models import * # noqa: E402
from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402
def main():
print("WARNING! THIS WILL PERMANENTLY REMOVE DATA FROM THIS DATABASE")
print("Are you sure you wish to remove all non-public data?")
confirm = input("Type 'yes' to confirm: ")
if confirm == "yes":
print("Deleting articles...")
for a in Article.objects.filter(published=REMOVED_ARTICLE):
print(a)
a.delete()
print("Done!")
print("Deleting file objects...")
for f in File.objects.filter(details__id=DETAIL_REMOVED):
print(f)
f.delete()
print("Done!")
print("Private data has removed. Database can be publicly shared.")
print("DONE.")
else:
print("ABORTED.")
if __name__ == '__main__':
main()
|
5252e86a9613545cbd6db2f0867276abac994282 | run.py | run.py | from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
| from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
| Add ability to programatically play sound after button press | Add ability to programatically play sound after button press
| Python | mit | ColdSauce/tw-1,zachlatta/tw-1,christophert/tw-1 | from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.play("http://demo.twilio.com/hellomonkey/monkey.mp3")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
| Add ability to programatically play sound after button press
from flask import Flask, request, redirect
import twilio.twiml
app = Flask(__name__)
@app.route("/", methods=['GET', 'POST'])
def hello_monkey():
"""Respond to incoming requests"""
resp = twilio.twiml.Response()
with resp.gather(numDigits=1, action="/handle-key", method="POST") as g:
g.say("press 1 or something")
return str(resp)
@app.route("/handle-key", methods=['GET', 'POST'])
def handle_key():
digit_pressed = request.values.get('Digits', None)
if digit_pressed == "1":
resp = twilio.twiml.Response()
resp.redirect("http://b9ff5a36.ngrok.io/twl/modified.xml")
return str(resp)
else:
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
|
3f136f153cdc60c1dcc757a8a35ef116bb892a1c | python/prep_policekml.py | python/prep_policekml.py | """
A collection of classes used to manipulate Police KML data, used with prepgml4ogr.py.
"""
import os
from lxml import etree
class prep_kml():
def __init__ (self, inputfile):
self.inputfile = inputfile
self.infile = os.path.basename(inputfile)
self.feat_types = ['Placemark']
def get_feat_types(self):
return self.feat_types
def prepare_feature(self, feat_str):
# Parse the xml string into something useful
feat_elm = etree.fromstring(feat_str)
feat_elm = self._prepare_feat_elm(feat_elm)
return etree.tostring(feat_elm, encoding='UTF-8', pretty_print=True).decode('utf_8');
def _prepare_feat_elm(self, feat_elm):
feat_elm = self._add_filename_elm(feat_elm)
return feat_elm
def _add_filename_elm(self, feat_elm):
# Create an element with the fid
elm = etree.SubElement(feat_elm, "name")
elm.text = self.infile[:-4]
elm = etree.SubElement(feat_elm, "description")
elm.text = os.path.dirname(self.inputfile).split('/')[-1]
return feat_elm
| """
prep_kml class used to manipulate police.uk KML data, used with prepgml4ogr.py
"""
import os
from lxml import etree
class prep_kml():
def __init__(self, inputfile):
self.inputfile = inputfile
self.infile = os.path.basename(inputfile)
self.feat_types = ['Placemark']
def get_feat_types(self):
return self.feat_types
def prepare_feature(self, feat_str):
# Parse the xml string into something useful
feat_elm = etree.fromstring(feat_str)
feat_elm = self._prepare_feat_elm(feat_elm)
return etree.tostring(feat_elm, encoding='UTF-8', pretty_print=True).decode('utf_8');
def _prepare_feat_elm(self, feat_elm):
feat_elm = self._add_filename_elm(feat_elm)
return feat_elm
def _add_filename_elm(self, feat_elm):
elm = etree.SubElement(feat_elm, "name")
elm.text = self.infile[:-4]
elm = etree.SubElement(feat_elm, "description")
elm.text = os.path.dirname(self.inputfile).split('/')[-1]
return feat_elm
| Remove stray comment, update docstring and minor PEP8 changes | Remove stray comment, update docstring and minor PEP8 changes
| Python | mit | AstunTechnology/Loader | """
prep_kml class used to manipulate police.uk KML data, used with prepgml4ogr.py
"""
import os
from lxml import etree
class prep_kml():
def __init__(self, inputfile):
self.inputfile = inputfile
self.infile = os.path.basename(inputfile)
self.feat_types = ['Placemark']
def get_feat_types(self):
return self.feat_types
def prepare_feature(self, feat_str):
# Parse the xml string into something useful
feat_elm = etree.fromstring(feat_str)
feat_elm = self._prepare_feat_elm(feat_elm)
return etree.tostring(feat_elm, encoding='UTF-8', pretty_print=True).decode('utf_8');
def _prepare_feat_elm(self, feat_elm):
feat_elm = self._add_filename_elm(feat_elm)
return feat_elm
def _add_filename_elm(self, feat_elm):
elm = etree.SubElement(feat_elm, "name")
elm.text = self.infile[:-4]
elm = etree.SubElement(feat_elm, "description")
elm.text = os.path.dirname(self.inputfile).split('/')[-1]
return feat_elm
| Remove stray comment, update docstring and minor PEP8 changes
"""
A collection of classes used to manipulate Police KML data, used with prepgml4ogr.py.
"""
import os
from lxml import etree
class prep_kml():
def __init__ (self, inputfile):
self.inputfile = inputfile
self.infile = os.path.basename(inputfile)
self.feat_types = ['Placemark']
def get_feat_types(self):
return self.feat_types
def prepare_feature(self, feat_str):
# Parse the xml string into something useful
feat_elm = etree.fromstring(feat_str)
feat_elm = self._prepare_feat_elm(feat_elm)
return etree.tostring(feat_elm, encoding='UTF-8', pretty_print=True).decode('utf_8');
def _prepare_feat_elm(self, feat_elm):
feat_elm = self._add_filename_elm(feat_elm)
return feat_elm
def _add_filename_elm(self, feat_elm):
# Create an element with the fid
elm = etree.SubElement(feat_elm, "name")
elm.text = self.infile[:-4]
elm = etree.SubElement(feat_elm, "description")
elm.text = os.path.dirname(self.inputfile).split('/')[-1]
return feat_elm
|
d7ab04186f3b8c7c58b654a7372b1d4f3ffad64e | tests/unit/test_domain_commands.py | tests/unit/test_domain_commands.py | from caspy.domain import command, models
class TestBook:
def test_prepare_new_book(self):
empty_book = models.Book()
result = command.prepare_book(empty_book, 'now')
assert isinstance(result, models.Book)
assert result.created_at == 'now'
def test_prepare_old_book(self):
dated_book = models.Book(created_at='last week')
result = command.prepare_book(dated_book, 'now')
assert isinstance(result, models.Book)
assert result.created_at == 'last week'
| Add unit tests for prepare_book | Add unit tests for prepare_book
| Python | bsd-3-clause | altaurog/django-caspy,altaurog/django-caspy,altaurog/django-caspy | from caspy.domain import command, models
class TestBook:
def test_prepare_new_book(self):
empty_book = models.Book()
result = command.prepare_book(empty_book, 'now')
assert isinstance(result, models.Book)
assert result.created_at == 'now'
def test_prepare_old_book(self):
dated_book = models.Book(created_at='last week')
result = command.prepare_book(dated_book, 'now')
assert isinstance(result, models.Book)
assert result.created_at == 'last week'
| Add unit tests for prepare_book
|
|
29cde856d41fc8654735aa5233e5983178a8e08e | wp2github/_version.py | wp2github/_version.py | __version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
| __version_info__ = (1, 0, 3)
__version__ = '.'.join(map(str, __version_info__))
| Replace Markdown README with reStructured text | Replace Markdown README with reStructured text
| Python | mit | r8/wp2github.py | __version_info__ = (1, 0, 3)
__version__ = '.'.join(map(str, __version_info__))
| Replace Markdown README with reStructured text
__version_info__ = (1, 0, 2)
__version__ = '.'.join(map(str, __version_info__))
|
ecbb3ffdf063bc53eae0f8bd180e62ae61f99fee | opencontrail_netns/vrouter_control.py | opencontrail_netns/vrouter_control.py |
from contrail_vrouter_api.vrouter_api import ContrailVRouterApi
def interface_register(vm, vmi, iface_name):
api = ContrailVRouterApi()
mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0]
api.add_port(vm.uuid, vmi.uuid, iface_name, mac)
def interface_unregister(vmi_uuid):
api = ContrailVRouterApi()
api.delete_port(vmi_uuid)
|
from contrail_vrouter_api.vrouter_api import ContrailVRouterApi
def interface_register(vm, vmi, iface_name):
api = ContrailVRouterApi()
mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0]
api.add_port(vm.uuid, vmi.uuid, iface_name, mac, port_type='NovaVMPort')
def interface_unregister(vmi_uuid):
api = ContrailVRouterApi()
api.delete_port(vmi_uuid)
| Use NovaVMPort type; otherwise the agent will believe it is a Use NovaVMPort as type; otherwise the agent will believe it is dealing with a service-instance and will not send a VM registration. | Use NovaVMPort type; otherwise the agent will believe it is a
Use NovaVMPort as type; otherwise the agent will believe it is dealing with
a service-instance and will not send a VM registration.
| Python | apache-2.0 | pedro-r-marques/opencontrail-netns,DreamLab/opencontrail-netns |
from contrail_vrouter_api.vrouter_api import ContrailVRouterApi
def interface_register(vm, vmi, iface_name):
api = ContrailVRouterApi()
mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0]
api.add_port(vm.uuid, vmi.uuid, iface_name, mac, port_type='NovaVMPort')
def interface_unregister(vmi_uuid):
api = ContrailVRouterApi()
api.delete_port(vmi_uuid)
| Use NovaVMPort type; otherwise the agent will believe it is a
Use NovaVMPort as type; otherwise the agent will believe it is dealing with
a service-instance and will not send a VM registration.
from contrail_vrouter_api.vrouter_api import ContrailVRouterApi
def interface_register(vm, vmi, iface_name):
api = ContrailVRouterApi()
mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0]
api.add_port(vm.uuid, vmi.uuid, iface_name, mac)
def interface_unregister(vmi_uuid):
api = ContrailVRouterApi()
api.delete_port(vmi_uuid)
|
389ca2213c2ba3c86c783372e3e933a12f90506e | ckanext/requestdata/controllers/admin.py | ckanext/requestdata/controllers/admin.py | from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html') | from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html') | Extend Admin instead of Base controller | Extend Admin instead of Base controller
| Python | agpl-3.0 | ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata,ViderumGlobal/ckanext-requestdata | from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
from ckan.controllers.admin import AdminController
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(AdminController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html') | Extend Admin instead of Base controller
from ckan.lib import base
from ckan import logic
from ckan.plugins import toolkit
get_action = logic.get_action
NotFound = logic.NotFound
NotAuthorized = logic.NotAuthorized
redirect = base.redirect
abort = base.abort
BaseController = base.BaseController
class AdminController(BaseController):
def email(self):
'''Email template admin tab.
:param :
:type
'''
return toolkit.render('admin/email.html')
def requests_data(self):
'''
Return all of the data requests in admin panel
:return:
'''
return toolkit.render('admin/all_requests_data.html') |
dcbb22300663f0484e81c13770f196e078e83ca5 | api/base/parsers.py | api/base/parsers.py |
from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
| from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
| Add parse method which flattens data dictionary. | Add parse method which flattens data dictionary.
| Python | apache-2.0 | icereval/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,abought/osf.io,sloria/osf.io,aaxelb/osf.io,zamattiac/osf.io,KAsante95/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,pattisdr/osf.io,laurenrevere/osf.io,zamattiac/osf.io,emetsger/osf.io,rdhyee/osf.io,cslzchen/osf.io,mluke93/osf.io,samchrisinger/osf.io,mattclark/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,icereval/osf.io,mluo613/osf.io,haoyuchen1992/osf.io,doublebits/osf.io,icereval/osf.io,jnayak1/osf.io,hmoco/osf.io,mattclark/osf.io,DanielSBrown/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,Ghalko/osf.io,doublebits/osf.io,ticklemepierce/osf.io,pattisdr/osf.io,alexschiller/osf.io,kch8qx/osf.io,acshi/osf.io,billyhunt/osf.io,billyhunt/osf.io,petermalcolm/osf.io,wearpants/osf.io,samchrisinger/osf.io,cwisecarver/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,ZobairAlijan/osf.io,kch8qx/osf.io,brandonPurvis/osf.io,felliott/osf.io,Nesiehr/osf.io,samchrisinger/osf.io,crcresearch/osf.io,cslzchen/osf.io,zachjanicki/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,petermalcolm/osf.io,SSJohns/osf.io,felliott/osf.io,caseyrollins/osf.io,emetsger/osf.io,billyhunt/osf.io,abought/osf.io,zachjanicki/osf.io,laurenrevere/osf.io,samanehsan/osf.io,Johnetordoff/osf.io,kwierman/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,cwisecarver/osf.io,chennan47/osf.io,caseyrollins/osf.io,zamattiac/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,mluo613/osf.io,DanielSBrown/osf.io,chrisseto/osf.io,caseyrygt/osf.io,danielneis/osf.io,danielneis/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,emetsger/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,amyshi188/osf.io,adlius/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,chennan47/osf.io,cosenal/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,njantrania/osf.io,kch8qx/osf.io,cosenal/osf.io,adlius/osf.io,binoculars/osf.io,chrisseto/osf.io,SSJohns/osf.io,RomanZWang/osf.io,zamattiac/osf.io,KAsante95/osf.io,chrisseto/osf.io,zachjanicki/osf.io,binoculars/osf.io,emetsger/osf.io,jnayak1/osf.io,felliott/osf.io,laurenrevere/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,amyshi188/osf.io,samanehsan/osf.io,abought/osf.io,TomHeatwole/osf.io,wearpants/osf.io,billyhunt/osf.io,sloria/osf.io,RomanZWang/osf.io,acshi/osf.io,rdhyee/osf.io,kwierman/osf.io,ticklemepierce/osf.io,jnayak1/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,binoculars/osf.io,amyshi188/osf.io,saradbowman/osf.io,felliott/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,aaxelb/osf.io,kwierman/osf.io,pattisdr/osf.io,caneruguz/osf.io,danielneis/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,chennan47/osf.io,sloria/osf.io,adlius/osf.io,cwisecarver/osf.io,aaxelb/osf.io,petermalcolm/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,danielneis/osf.io,mluo613/osf.io,doublebits/osf.io,brianjgeiger/osf.io,doublebits/osf.io,acshi/osf.io,mfraezz/osf.io,amyshi188/osf.io,baylee-d/osf.io,cslzchen/osf.io,baylee-d/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,samanehsan/osf.io,KAsante95/osf.io,mluke93/osf.io,mluke93/osf.io,chrisseto/osf.io,zachjanicki/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,aaxelb/osf.io,adlius/osf.io,abought/osf.io,alexschiller/osf.io,erinspace/osf.io,mattclark/osf.io,njantrania/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,SSJohns/osf.io,acshi/osf.io,monikagrabowska/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,kch8qx/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,RomanZWang/osf.io,hmoco/osf.io,mluke93/osf.io,kch8qx/osf.io,jnayak1/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,Ghalko/osf.io,erinspace/osf.io,doublebits/osf.io,petermalcolm/osf.io,mluo613/osf.io,acshi/osf.io,leb2dg/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,njantrania/osf.io,billyhunt/osf.io,caseyrygt/osf.io,SSJohns/osf.io,Ghalko/osf.io,crcresearch/osf.io,caneruguz/osf.io,caneruguz/osf.io,hmoco/osf.io,KAsante95/osf.io,ticklemepierce/osf.io,ZobairAlijan/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,kwierman/osf.io,saradbowman/osf.io,GageGaskins/osf.io,mluo613/osf.io,Ghalko/osf.io,njantrania/osf.io,cosenal/osf.io,alexschiller/osf.io | from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
from api.base.exceptions import JSONAPIException
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data
"""
result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context)
data = result.get('data', {})
if data:
if 'attributes' not in data:
raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.')
id = data.get('id')
type = data.get('type')
attributes = data.get('attributes')
parsed = {'id': id, 'type': type}
parsed.update(attributes)
return parsed
else:
raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.')
| Add parse method which flattens data dictionary.
from rest_framework.parsers import JSONParser
from api.base.renderers import JSONAPIRenderer
class JSONAPIParser(JSONParser):
"""
Parses JSON-serialized data. Overrides media_type.
"""
media_type = 'application/vnd.api+json'
renderer_class = JSONAPIRenderer
|
3588c52060b540f6d3ca791c7309b4e9185a60aa | config.py | config.py | class Config(object):
"""
Base configuration class. Contains one method that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
def __init__(self):
self.database_uri()
def database_uri(self):
if self.DIALECT == 'sqlite':
self.DATABASE_URI = r'sqlite://{name}'.format(name=self.DBNAME)
else:
self.DATABASE_URI = r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
| class Config(object):
"""
Base configuration class. Contains one property that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
@property
def database_uri(self):
return r'sqlite://{name}'.format(name=self.DBNAME) if self.DIALECT == 'sqlite' else \
r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
| Replace database_uri method with a property | Replace database_uri method with a property
| Python | mit | soccermetrics/marcotti-mls | class Config(object):
"""
Base configuration class. Contains one property that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
@property
def database_uri(self):
return r'sqlite://{name}'.format(name=self.DBNAME) if self.DIALECT == 'sqlite' else \
r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
| Replace database_uri method with a property
class Config(object):
"""
Base configuration class. Contains one method that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
def __init__(self):
self.database_uri()
def database_uri(self):
if self.DIALECT == 'sqlite':
self.DATABASE_URI = r'sqlite://{name}'.format(name=self.DBNAME)
else:
self.DATABASE_URI = r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
|
9c2bee9fe8442cad0761d196d78baaff37c9cb08 | mff_rams_plugin/config.py | mff_rams_plugin/config.py | from uber.common import *
config = parse_config(__file__)
c.include_plugin_config(config)
c.DEALER_BADGE_PRICE = c.BADGE_PRICE | from uber.common import *
config = parse_config(__file__)
c.include_plugin_config(config)
@Config.mixin
class ExtraConfig:
@property
def DEALER_BADGE_PRICE(self):
return self.get_attendee_price()
| Fix DB errors on stop/re-up Due to the fact that this code was being run before everything else, it would cause server-stopping errors -- but only when starting the server for the first time. It took a little bit to track down, but this is the correct way to override this variable. | Fix DB errors on stop/re-up
Due to the fact that this code was being run before everything else, it would cause server-stopping errors -- but only when starting the server for the first time. It took a little bit to track down, but this is the correct way to override this variable.
| Python | agpl-3.0 | MidwestFurryFandom/mff-rams-plugin,MidwestFurryFandom/mff-rams-plugin | from uber.common import *
config = parse_config(__file__)
c.include_plugin_config(config)
@Config.mixin
class ExtraConfig:
@property
def DEALER_BADGE_PRICE(self):
return self.get_attendee_price()
| Fix DB errors on stop/re-up
Due to the fact that this code was being run before everything else, it would cause server-stopping errors -- but only when starting the server for the first time. It took a little bit to track down, but this is the correct way to override this variable.
from uber.common import *
config = parse_config(__file__)
c.include_plugin_config(config)
c.DEALER_BADGE_PRICE = c.BADGE_PRICE |
45ee26fae4a8d31b66e3307c0ab4aed21678b4b6 | scrubadub/filth/named_entity.py | scrubadub/filth/named_entity.py | from .base import Filth
class NamedEntityFilth(Filth):
"""
Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org)
"""
type = 'named_entity'
def __init__(self, *args, label: str, **kwargs):
super(NamedEntityFilth, self).__init__(*args, **kwargs)
self.type = "{}_{}".format(self.type, label).lower()
| from .base import Filth
class NamedEntityFilth(Filth):
"""
Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org)
"""
type = 'named_entity'
def __init__(self, *args, label: str, **kwargs):
super(NamedEntityFilth, self).__init__(*args, **kwargs)
self.label = label.lower()
| Revert NamedEntityFilth name because it was a bad idea | Revert NamedEntityFilth name because it was a bad idea
| Python | mit | deanmalmgren/scrubadub,datascopeanalytics/scrubadub,deanmalmgren/scrubadub,datascopeanalytics/scrubadub | from .base import Filth
class NamedEntityFilth(Filth):
"""
Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org)
"""
type = 'named_entity'
def __init__(self, *args, label: str, **kwargs):
super(NamedEntityFilth, self).__init__(*args, **kwargs)
self.label = label.lower()
| Revert NamedEntityFilth name because it was a bad idea
from .base import Filth
class NamedEntityFilth(Filth):
"""
Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org)
"""
type = 'named_entity'
def __init__(self, *args, label: str, **kwargs):
super(NamedEntityFilth, self).__init__(*args, **kwargs)
self.type = "{}_{}".format(self.type, label).lower()
|
d7fa7d2bacd45a50f14e4e1aeae57cfc56a315db | __init__.py | __init__.py | from openedoo_project import db
from openedoo.core.libs import Blueprint
from .controllers.employee import EmployeeLogin, EmployeeLogout, AddEmployee, \
AssignEmployeeAsTeacher, EmployeeDashboard, EditEmployee, DeleteEmployee, \
SearchEmployee, AddSubject
module_employee = Blueprint('module_employee', __name__,
template_folder='templates',
static_folder='static')
module_employee.add_url_rule('/admin/dashboard',
view_func=EmployeeDashboard.as_view('dashboard'))
module_employee.add_url_rule('/admin/login',
view_func=EmployeeLogin.as_view('login'))
module_employee.add_url_rule('/admin/logout',
view_func=EmployeeLogout.as_view('logout'))
module_employee.add_url_rule('/admin/add',
view_func=AddEmployee.as_view('add'))
module_employee.add_url_rule('/admin/edit',
view_func=EditEmployee.as_view('edit'))
assignEmployeeAsTeacherView = AssignEmployeeAsTeacher.as_view('assign')
module_employee.add_url_rule('/admin/assign',
view_func=assignEmployeeAsTeacherView)
module_employee.add_url_rule('/admin/delete',
view_func=DeleteEmployee.as_view('delete'))
module_employee.add_url_rule('/search',
view_func=SearchEmployee.as_view('search'))
module_employee.add_url_rule('/admin/subject/add',
view_func=AddSubject.as_view('add_subject'))
| from openedoo_project import db
from openedoo.core.libs import Blueprint
from .controllers.employee import EmployeeLogin, EmployeeLogout, AddEmployee, \
AssignEmployeeAsTeacher, EmployeeDashboard, EditEmployee, DeleteEmployee, \
SearchEmployee, AddSubject
module_employee = Blueprint('module_employee', __name__,
template_folder='templates',
static_folder='static')
module_employee.add_url_rule('/admin',
view_func=EmployeeDashboard.as_view('dashboard'))
module_employee.add_url_rule('/admin/login',
view_func=EmployeeLogin.as_view('login'))
module_employee.add_url_rule('/admin/logout',
view_func=EmployeeLogout.as_view('logout'))
module_employee.add_url_rule('/admin/add',
view_func=AddEmployee.as_view('add'))
module_employee.add_url_rule('/admin/edit',
view_func=EditEmployee.as_view('edit'))
assignEmployeeAsTeacherView = AssignEmployeeAsTeacher.as_view('assign')
module_employee.add_url_rule('/admin/assign',
view_func=assignEmployeeAsTeacherView)
module_employee.add_url_rule('/admin/delete',
view_func=DeleteEmployee.as_view('delete'))
module_employee.add_url_rule('/search',
view_func=SearchEmployee.as_view('search'))
module_employee.add_url_rule('/admin/subject/add',
view_func=AddSubject.as_view('add_subject'))
| Make dashboard route become admin's default | Make dashboard route become admin's default
| Python | mit | openedoo/module_employee,openedoo/module_employee,openedoo/module_employee | from openedoo_project import db
from openedoo.core.libs import Blueprint
from .controllers.employee import EmployeeLogin, EmployeeLogout, AddEmployee, \
AssignEmployeeAsTeacher, EmployeeDashboard, EditEmployee, DeleteEmployee, \
SearchEmployee, AddSubject
module_employee = Blueprint('module_employee', __name__,
template_folder='templates',
static_folder='static')
module_employee.add_url_rule('/admin',
view_func=EmployeeDashboard.as_view('dashboard'))
module_employee.add_url_rule('/admin/login',
view_func=EmployeeLogin.as_view('login'))
module_employee.add_url_rule('/admin/logout',
view_func=EmployeeLogout.as_view('logout'))
module_employee.add_url_rule('/admin/add',
view_func=AddEmployee.as_view('add'))
module_employee.add_url_rule('/admin/edit',
view_func=EditEmployee.as_view('edit'))
assignEmployeeAsTeacherView = AssignEmployeeAsTeacher.as_view('assign')
module_employee.add_url_rule('/admin/assign',
view_func=assignEmployeeAsTeacherView)
module_employee.add_url_rule('/admin/delete',
view_func=DeleteEmployee.as_view('delete'))
module_employee.add_url_rule('/search',
view_func=SearchEmployee.as_view('search'))
module_employee.add_url_rule('/admin/subject/add',
view_func=AddSubject.as_view('add_subject'))
| Make dashboard route become admin's default
from openedoo_project import db
from openedoo.core.libs import Blueprint
from .controllers.employee import EmployeeLogin, EmployeeLogout, AddEmployee, \
AssignEmployeeAsTeacher, EmployeeDashboard, EditEmployee, DeleteEmployee, \
SearchEmployee, AddSubject
module_employee = Blueprint('module_employee', __name__,
template_folder='templates',
static_folder='static')
module_employee.add_url_rule('/admin/dashboard',
view_func=EmployeeDashboard.as_view('dashboard'))
module_employee.add_url_rule('/admin/login',
view_func=EmployeeLogin.as_view('login'))
module_employee.add_url_rule('/admin/logout',
view_func=EmployeeLogout.as_view('logout'))
module_employee.add_url_rule('/admin/add',
view_func=AddEmployee.as_view('add'))
module_employee.add_url_rule('/admin/edit',
view_func=EditEmployee.as_view('edit'))
assignEmployeeAsTeacherView = AssignEmployeeAsTeacher.as_view('assign')
module_employee.add_url_rule('/admin/assign',
view_func=assignEmployeeAsTeacherView)
module_employee.add_url_rule('/admin/delete',
view_func=DeleteEmployee.as_view('delete'))
module_employee.add_url_rule('/search',
view_func=SearchEmployee.as_view('search'))
module_employee.add_url_rule('/admin/subject/add',
view_func=AddSubject.as_view('add_subject'))
|
1b502cdf399b5b9cd4593aea82750b77114fe858 | examples/flask_hello.py | examples/flask_hello.py | from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
| import time
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/sleep')
def sleep():
time.sleep(0.1)
return 'Good morning!'
@app.route('/dosomething')
def do_something():
import requests
requests.get('http://google.com')
return 'Google says hello!'
| Add some more endpoints to the flask example | Add some more endpoints to the flask example
| Python | bsd-3-clause | joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument | import time
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/sleep')
def sleep():
time.sleep(0.1)
return 'Good morning!'
@app.route('/dosomething')
def do_something():
import requests
requests.get('http://google.com')
return 'Google says hello!'
| Add some more endpoints to the flask example
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
|
91db70d1fc266e3e3925e84fcaf83410e0504e37 | Lib/tkinter/test/test_tkinter/test_font.py | Lib/tkinter/test/test_tkinter/test_font.py | import unittest
import tkinter
from tkinter import font
from test.support import requires, run_unittest
import tkinter.test.support as support
requires('gui')
class FontTest(unittest.TestCase):
def setUp(self):
support.root_deiconify()
def tearDown(self):
support.root_withdraw()
def test_font_eq(self):
font1 = font.nametofont("TkDefaultFont")
font2 = font.nametofont("TkDefaultFont")
self.assertIsNot(font1, font2)
self.assertEqual(font1, font2)
self.assertNotEqual(font1, font1.copy())
self.assertNotEqual(font1, 0)
tests_gui = (FontTest, )
if __name__ == "__main__":
run_unittest(*tests_gui)
| import unittest
import tkinter
from tkinter import font
from test.support import requires, run_unittest
import tkinter.test.support as support
requires('gui')
class FontTest(unittest.TestCase):
def setUp(self):
support.root_deiconify()
def tearDown(self):
support.root_withdraw()
def test_font_eq(self):
fontname = "TkDefaultFont"
try:
f = font.Font(name=fontname, exists=True)
except tkinter._tkinter.TclError:
f = font.Font(name=fontname, exists=False)
font1 = font.nametofont(fontname)
font2 = font.nametofont(fontname)
self.assertIsNot(font1, font2)
self.assertEqual(font1, font2)
self.assertNotEqual(font1, font1.copy())
self.assertNotEqual(font1, 0)
tests_gui = (FontTest, )
if __name__ == "__main__":
run_unittest(*tests_gui)
| Fix test_tk under OS X with Tk 8.4. Patch by Ned Deily. This should fix some buildbot failures. | Fix test_tk under OS X with Tk 8.4. Patch by Ned Deily.
This should fix some buildbot failures.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | import unittest
import tkinter
from tkinter import font
from test.support import requires, run_unittest
import tkinter.test.support as support
requires('gui')
class FontTest(unittest.TestCase):
def setUp(self):
support.root_deiconify()
def tearDown(self):
support.root_withdraw()
def test_font_eq(self):
fontname = "TkDefaultFont"
try:
f = font.Font(name=fontname, exists=True)
except tkinter._tkinter.TclError:
f = font.Font(name=fontname, exists=False)
font1 = font.nametofont(fontname)
font2 = font.nametofont(fontname)
self.assertIsNot(font1, font2)
self.assertEqual(font1, font2)
self.assertNotEqual(font1, font1.copy())
self.assertNotEqual(font1, 0)
tests_gui = (FontTest, )
if __name__ == "__main__":
run_unittest(*tests_gui)
| Fix test_tk under OS X with Tk 8.4. Patch by Ned Deily.
This should fix some buildbot failures.
import unittest
import tkinter
from tkinter import font
from test.support import requires, run_unittest
import tkinter.test.support as support
requires('gui')
class FontTest(unittest.TestCase):
def setUp(self):
support.root_deiconify()
def tearDown(self):
support.root_withdraw()
def test_font_eq(self):
font1 = font.nametofont("TkDefaultFont")
font2 = font.nametofont("TkDefaultFont")
self.assertIsNot(font1, font2)
self.assertEqual(font1, font2)
self.assertNotEqual(font1, font1.copy())
self.assertNotEqual(font1, 0)
tests_gui = (FontTest, )
if __name__ == "__main__":
run_unittest(*tests_gui)
|
b80b781b8f446b8149b948a6ec4aeff63fd728ce | Orange/widgets/utils/plot/__init__.py | Orange/widgets/utils/plot/__init__.py | """
*************************
Plot classes and tools for use in Orange widgets
*************************
The main class of this module is :obj:`.OWPlot`, from which all plots
in visualization widgets should inherit.
This module also contains plot elements, which are normally used by the :obj:`.OWPlot`,
but can also be used directly or subclassed
"""
from .owplotgui import *
from .owpalette import *
from .owconstants import *
try:
from .owcurve import *
from .owpoint import *
from .owlegend import *
from .owaxis import *
from .owplot import *
from .owtools import *
except ImportError:
pass
| """
*************************
Plot classes and tools for use in Orange widgets
*************************
The main class of this module is :obj:`.OWPlot`, from which all plots
in visualization widgets should inherit.
This module also contains plot elements, which are normally used by the :obj:`.OWPlot`,
but can also be used directly or subclassed
"""
from .owplotgui import *
from .owpalette import *
from .owconstants import *
try:
from .owcurve import *
from .owpoint import *
from .owlegend import *
from .owaxis import *
from .owplot import *
from .owtools import *
except (ImportError, RuntimeError):
pass
| Handle PyQt 5.3 raising RuntimeError on incompatible orangeqt import | Handle PyQt 5.3 raising RuntimeError on incompatible orangeqt import
| Python | bsd-2-clause | cheral/orange3,cheral/orange3,cheral/orange3,cheral/orange3,cheral/orange3,cheral/orange3 | """
*************************
Plot classes and tools for use in Orange widgets
*************************
The main class of this module is :obj:`.OWPlot`, from which all plots
in visualization widgets should inherit.
This module also contains plot elements, which are normally used by the :obj:`.OWPlot`,
but can also be used directly or subclassed
"""
from .owplotgui import *
from .owpalette import *
from .owconstants import *
try:
from .owcurve import *
from .owpoint import *
from .owlegend import *
from .owaxis import *
from .owplot import *
from .owtools import *
except (ImportError, RuntimeError):
pass
| Handle PyQt 5.3 raising RuntimeError on incompatible orangeqt import
"""
*************************
Plot classes and tools for use in Orange widgets
*************************
The main class of this module is :obj:`.OWPlot`, from which all plots
in visualization widgets should inherit.
This module also contains plot elements, which are normally used by the :obj:`.OWPlot`,
but can also be used directly or subclassed
"""
from .owplotgui import *
from .owpalette import *
from .owconstants import *
try:
from .owcurve import *
from .owpoint import *
from .owlegend import *
from .owaxis import *
from .owplot import *
from .owtools import *
except ImportError:
pass
|
f9a4ae44f33279632396716fbd808e80773f0a71 | widelanguagedemo/assets.py | widelanguagedemo/assets.py | # -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
| # -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
| Work around a js minification bug. | Work around a js minification bug.
| Python | bsd-3-clause | larsyencken/wide-language-demo,larsyencken/wide-language-demo,larsyencken/wide-language-demo,larsyencken/wide-language-demo,larsyencken/wide-language-demo | # -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='rjsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
| Work around a js minification bug.
# -*- coding: utf-8 -*-
from flask.ext.assets import Bundle, Environment
css = Bundle(
"libs/bootstrap/dist/css/bootstrap.css",
"css/style.css",
filters="cssmin",
output="public/css/common.css"
)
js = Bundle(
"libs/jQuery/dist/jquery.js",
"libs/bootstrap/dist/js/bootstrap.js",
"libs/typeahead.bundle.js",
"libs/handlebars/handlebars.js",
"js/plugins.js",
filters='jsmin',
output="public/js/common.js"
)
assets = Environment()
assets.register("js_all", js)
assets.register("css_all", css)
|
8db65c9f6ec67e188dd6cd11f7a7933d371e323d | feed/tests/test_contactview.py | feed/tests/test_contactview.py | from django.contrib.auth.models import User
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from feed.views import ContactViewSet
from workflow.models import Contact, Country, Organization, TolaUser, \
WorkflowLevel1, WorkflowTeam
class ContactViewsTest(TestCase):
def setUp(self):
self.user = User.objects.create_user('john', '[email protected]',
'johnpassword')
self.user.is_superuser = True
self.user.is_staff = True
self.user.save()
self.country = Country.objects.create(country='Afghanistan', code='AF')
Contact.objects.bulk_create([
Contact(name='Contact_0', country=self.country),
Contact(name='Contact_1', country=self.country),
])
factory = APIRequestFactory()
self.request = factory.get('/api/contact/')
def test_list_contact_superuser(self):
self.request.user = self.user
view = ContactViewSet.as_view({'get': 'list'})
response = view(self.request)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 2)
def test_list_contact_normaluser(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
TolaUser.objects.create(user=self.user, organization=organization)
self.request.user = self.user
view = ContactViewSet.as_view({'get': 'list'})
response = view(self.request)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 0)
def test_list_contact_normaluser_one_result(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
tola_user = TolaUser.objects.create(user=self.user,
organization=organization)
wflvl1 = WorkflowLevel1.objects.create(name='WorkflowLevel1',
organization=organization)
WorkflowTeam.objects.create(workflow_user=tola_user,
workflowlevel1=wflvl1)
Contact.objects.create(name='Contact_0', country=self.country,
organization=organization,
workflowlevel1=wflvl1)
self.request.user = self.user
view = ContactViewSet.as_view({'get': 'list'})
response = view(self.request)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 1)
| Add unit test for Contact view | Add unit test for Contact view
| Python | apache-2.0 | toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity | from django.contrib.auth.models import User
from django.test import TestCase
from rest_framework.test import APIRequestFactory
from feed.views import ContactViewSet
from workflow.models import Contact, Country, Organization, TolaUser, \
WorkflowLevel1, WorkflowTeam
class ContactViewsTest(TestCase):
def setUp(self):
self.user = User.objects.create_user('john', '[email protected]',
'johnpassword')
self.user.is_superuser = True
self.user.is_staff = True
self.user.save()
self.country = Country.objects.create(country='Afghanistan', code='AF')
Contact.objects.bulk_create([
Contact(name='Contact_0', country=self.country),
Contact(name='Contact_1', country=self.country),
])
factory = APIRequestFactory()
self.request = factory.get('/api/contact/')
def test_list_contact_superuser(self):
self.request.user = self.user
view = ContactViewSet.as_view({'get': 'list'})
response = view(self.request)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 2)
def test_list_contact_normaluser(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
TolaUser.objects.create(user=self.user, organization=organization)
self.request.user = self.user
view = ContactViewSet.as_view({'get': 'list'})
response = view(self.request)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 0)
def test_list_contact_normaluser_one_result(self):
self.user.is_superuser = False
self.user.is_staff = False
self.user.save()
organization = Organization.objects.create(name="TestOrg")
tola_user = TolaUser.objects.create(user=self.user,
organization=organization)
wflvl1 = WorkflowLevel1.objects.create(name='WorkflowLevel1',
organization=organization)
WorkflowTeam.objects.create(workflow_user=tola_user,
workflowlevel1=wflvl1)
Contact.objects.create(name='Contact_0', country=self.country,
organization=organization,
workflowlevel1=wflvl1)
self.request.user = self.user
view = ContactViewSet.as_view({'get': 'list'})
response = view(self.request)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(response.data), 1)
| Add unit test for Contact view
|
|
eafd68dd70d24f6e5551e2f59d7c13e4be6dce6e | testparser.py | testparser.py | #!/usr/bin/env python
import sys
import os
import subprocess
# ARGUMENTS:
# 1 - path to the parser executable
# 2 - path to the parser-lalr executable
# 3 - path to the source directory to look for *.rs files
parser = sys.argv[1]
parser_lalr = sys.argv[2]
# flex dies on multibyte characters
BLACKLIST = ['libstd/str.rs', 'libstd/strbuf.rs', 'libstd/ascii.rs']
def chk(*args, **kwargs):
return subprocess.check_output(*args, **kwargs)
def compare(p):
if chk(flex, stdin=open(p)) != chk(rlex, stdin=open(p)):
raise Exception("{} differed between the reference lexer and libsyntax's lexer".format(p))
total = 0
parser_ok = 0
parser_lalr_ok = 0
bad_parser = []
bad_parser_lalr = []
print "\n"
for base, dirs, files in os.walk(sys.argv[3]):
for f in filter(lambda p: p.endswith('.rs'), files):
p = os.path.join(base, f)
if any([p.endswith(b) for b in BLACKLIST]):
continue
total += 1
try:
if len(chk(parser, stdin=open(p), stderr=subprocess.STDOUT)) == 0:
parser_ok += 1
else:
bad_parser.append(p)
except subprocess.CalledProcessError:
bad_parser.append(p)
pass
try:
if "syntax error" not in chk(parser_lalr, stdin=open(p), stderr=subprocess.STDOUT):
parser_lalr_ok += 1
else:
bad_parser_lalr.append(p)
except subprocess.CalledProcessError:
bad_parser_lalr.append(p)
pass
sys.stdout.write("\r total: %d, parser: %d, parser-lalr: %d, scanned %-60s" %
(total, parser_ok, parser_lalr_ok, p))
print "\n"
for (filename, bad, parser) in [("parser.bad", bad_parser, parser),
("parser-lalr.bad", bad_parser_lalr, parser_lalr)]:
print("writing %d files that failed to parse with %s to %s" % (len(bad), parser, filename))
with open(filename, "w") as f:
for p in bad:
f.write(p)
f.write("\n")
| Add a new test script to evaluate status of different parsers. | Add a new test script to evaluate status of different parsers.
| Python | mit | patperry/rust-grammar,bleibig/rust-grammar,patperry/rust-grammar,patperry/rust-grammar,bleibig/rust-grammar,bleibig/rust-grammar,patperry/rust-grammar,bleibig/rust-grammar | #!/usr/bin/env python
import sys
import os
import subprocess
# ARGUMENTS:
# 1 - path to the parser executable
# 2 - path to the parser-lalr executable
# 3 - path to the source directory to look for *.rs files
parser = sys.argv[1]
parser_lalr = sys.argv[2]
# flex dies on multibyte characters
BLACKLIST = ['libstd/str.rs', 'libstd/strbuf.rs', 'libstd/ascii.rs']
def chk(*args, **kwargs):
return subprocess.check_output(*args, **kwargs)
def compare(p):
if chk(flex, stdin=open(p)) != chk(rlex, stdin=open(p)):
raise Exception("{} differed between the reference lexer and libsyntax's lexer".format(p))
total = 0
parser_ok = 0
parser_lalr_ok = 0
bad_parser = []
bad_parser_lalr = []
print "\n"
for base, dirs, files in os.walk(sys.argv[3]):
for f in filter(lambda p: p.endswith('.rs'), files):
p = os.path.join(base, f)
if any([p.endswith(b) for b in BLACKLIST]):
continue
total += 1
try:
if len(chk(parser, stdin=open(p), stderr=subprocess.STDOUT)) == 0:
parser_ok += 1
else:
bad_parser.append(p)
except subprocess.CalledProcessError:
bad_parser.append(p)
pass
try:
if "syntax error" not in chk(parser_lalr, stdin=open(p), stderr=subprocess.STDOUT):
parser_lalr_ok += 1
else:
bad_parser_lalr.append(p)
except subprocess.CalledProcessError:
bad_parser_lalr.append(p)
pass
sys.stdout.write("\r total: %d, parser: %d, parser-lalr: %d, scanned %-60s" %
(total, parser_ok, parser_lalr_ok, p))
print "\n"
for (filename, bad, parser) in [("parser.bad", bad_parser, parser),
("parser-lalr.bad", bad_parser_lalr, parser_lalr)]:
print("writing %d files that failed to parse with %s to %s" % (len(bad), parser, filename))
with open(filename, "w") as f:
for p in bad:
f.write(p)
f.write("\n")
| Add a new test script to evaluate status of different parsers.
|
|
d7f184dd7c41bb3cacba5f77c81ae961b3a12760 | subsample_bam_file.py | subsample_bam_file.py | #!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <[email protected]>"
__copyright__ = "2013 by Konrad Foerstner <[email protected]>"
__license__ = "ISC license"
__email__ = "[email protected]"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
| Add script to subsample bam file entries | Add script to subsample bam file entries
| Python | isc | konrad/kuf_bio_scripts | #!/usr/bin/env python
"""
This script subsamples the alignments of a BAM file. For this a
likelihood (0.0 < p(keep) < 1.0) of keeping all alignments of a read
has to be provided. All alignments of a read are treated the same
(i.e. are discarded or kept).
"""
import argparse
import random
import sys
import pysam
__description__ = "Subsample BAM file entries"
__author__ = "Konrad Foerstner <[email protected]>"
__copyright__ = "2013 by Konrad Foerstner <[email protected]>"
__license__ = "ISC license"
__email__ = "[email protected]"
__version__ = "0.1"
parser = argparse.ArgumentParser()
parser.add_argument("input_bam")
parser.add_argument("output_bam")
parser.add_argument("keeping_likelihood", type=float)
args = parser.parse_args()
input_bam = pysam.Samfile(args.input_bam, "rb")
output_bam = pysam.Samfile(
args.output_bam, "wb", referencenames=input_bam.references,
referencelengths=input_bam.lengths, header=input_bam.header,
text=input_bam.text)
prev_query = None
prev_keep = None
for alignment in input_bam:
# This is for reads that multiple alignments. If there previous
# alignment comes from the same read treat the current one the
# same way (keep or discard).
if alignment.qname == prev_query:
if prev_keep is True:
output_bam.write(alignment)
continue
else:
continue
if random.random() <= args.keeping_likelihood:
output_bam.write(alignment)
prev_keep = True
else:
prev_keep = False
prev_query = alignment.qname
| Add script to subsample bam file entries
|
|
a941218e8bacd528cff058d3afaac06e14ac7766 | OpenPNM/PHYS/__GenericPhysics__.py | OpenPNM/PHYS/__GenericPhysics__.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run() | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run() | Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)" | Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced | Python | mit | TomTranter/OpenPNM,amdouglas/OpenPNM,stadelmanma/OpenPNM,PMEAL/OpenPNM,amdouglas/OpenPNM | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run() | Revert "Updated docstring for the file (mostly to diagnose/solve a git branch/merge problem)"
This reverts commit 3bcc40305193f3a46de63f4345812c9c2ee4c27f [formerly e2fe152ba58cfa853637bc5bd805adf0ae9617eb] [formerly 8e549c3bfb3650f08aca2ba204d2904e53aa4ab4].
Former-commit-id: e783ac4d5946403a9d608fe9dffa42212796b402
Former-commit-id: abafc2efec64a1e360594c18b203daa4ea0f7ced
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Author: CEF PNM Team
# License: TBD
# Copyright (c) 2012
#from __future__ import print_function
"""
module __GenericPhysics__: Base class to define pore scale physics
==================================================================
.. warning:: The classes of this module should be loaded through the 'PHYS.__init__.py' file.
"""
import OpenPNM
import scipy as sp
import numpy as np
class GenericPhysics(OpenPNM.BAS.OpenPNMbase):
r"""
"""
def __init__(self,net=OpenPNM.NET.GenericNetwork,**kwords):
r"""
Initialize
"""
super(GenericPhysics,self).__init__(**kwords)
self.indent = ""
self._logger.debug("Construct class")
self._net = net
def Washburn(self):
r'''
this uses the Washburn equation to relate pore size to entry pressure
'''
self._net.throat_properties['Pc_entry'] = -4*0.072*np.cos(np.radians(105))/self._net.throat_properties['diameter']
if __name__ =="__main__":
test = GenericPhysics(loggername="TestGenericPhys")
test.run() |
1c60cf7082672335279d5b96e83f3cb2eb57424f | purchase_supplier_minimum_order/models/__init__.py | purchase_supplier_minimum_order/models/__init__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Enforce minimum PO value for supplier. | Enforce minimum PO value for supplier.
| Python | agpl-3.0 | OpusVL/odoo-purchase-min-order | # -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Enforce minimum PO value for supplier.
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
e4d4e1b79bea641c66dfafe486d94a87c63e6edb | setup.py | setup.py | #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='[email protected]',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
| #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='[email protected]',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
| Update GitHub repos from blancltd to developersociety | Update GitHub repos from blancltd to developersociety
| Python | bsd-3-clause | blancltd/django-latest-tweets | #!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/developersociety/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='[email protected]',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
| Update GitHub repos from blancltd to developersociety
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-latest-tweets',
version='0.4.5',
description='Latest Tweets for Django',
long_description=readme,
url='https://github.com/blancltd/django-latest-tweets',
maintainer='Blanc Ltd',
maintainer_email='[email protected]',
platforms=['any'],
install_requires=[
'twitter>=1.9.1',
'requests>=2.0',
],
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
license='BSD',
)
|
b37814280dc06dbf8aefec4490f6b73a47f05c1a | custom_fixers/fix_alt_unicode.py | custom_fixers/fix_alt_unicode.py | # Taken from jinja2. Thanks, Armin Ronacher.
# See also http://lucumr.pocoo.org/2010/2/11/porting-to-python-3-a-guide
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, BlankLine
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = """
func=funcdef< 'def' name='__unicode__'
parameters< '(' NAME ')' > any+ >
"""
def transform(self, node, results):
name = results['name']
name.replace(Name('__str__', prefix=name.prefix))
| # Taken from jinja2. Thanks, Armin Ronacher.
# See also http://lucumr.pocoo.org/2010/2/11/porting-to-python-3-a-guide
from lib2to3 import fixer_base
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = "'__unicode__'"
def transform(self, node, results):
new = node.clone()
new.value = '__str__'
return new
| Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__. | Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__.
| Python | mit | live-clones/pybtex | # Taken from jinja2. Thanks, Armin Ronacher.
# See also http://lucumr.pocoo.org/2010/2/11/porting-to-python-3-a-guide
from lib2to3 import fixer_base
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = "'__unicode__'"
def transform(self, node, results):
new = node.clone()
new.value = '__str__'
return new
| Simplify python3 unicode fixer and make it replace all occurrences of __unicode__ with __str__.
# Taken from jinja2. Thanks, Armin Ronacher.
# See also http://lucumr.pocoo.org/2010/2/11/porting-to-python-3-a-guide
from lib2to3 import fixer_base
from lib2to3.fixer_util import Name, BlankLine
class FixAltUnicode(fixer_base.BaseFix):
PATTERN = """
func=funcdef< 'def' name='__unicode__'
parameters< '(' NAME ')' > any+ >
"""
def transform(self, node, results):
name = results['name']
name.replace(Name('__str__', prefix=name.prefix))
|
ba4f692e00d87afdd65d3a1b88046089b709eaab | organizer/views.py | organizer/views.py | from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request):
return HttpResponse()
| from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request):
# slug = ?
tag = Tag.objects.get(slug__iexact=slug)
return HttpResponse()
| Tag Detail: get Tag from database. | Ch05: Tag Detail: get Tag from database.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request):
# slug = ?
tag = Tag.objects.get(slug__iexact=slug)
return HttpResponse()
| Ch05: Tag Detail: get Tag from database.
from django.http.response import HttpResponse
from django.template import Context, loader
from .models import Tag
def homepage(request):
tag_list = Tag.objects.all()
template = loader.get_template(
'organizer/tag_list.html')
context = Context({'tag_list': tag_list})
output = template.render(context)
return HttpResponse(output)
def tag_detail(request):
return HttpResponse()
|
a3ffef803d3bde1bb771217f3ed5dd4509a2c82c | tests/test_03_login.py | tests/test_03_login.py | """Test login to an ICAT server.
"""
from __future__ import print_function
import pytest
import icat
import icat.config
# Try out three different users: root, useroffice, and acord. Normal
# users like acord might use a different authentication plugin then
# system users as root and useroffice. We want to try out both cases.
@pytest.mark.parametrize("user", ["root", "useroffice", "acord"])
def test_login(icatconfigfile, user):
"""Login to the ICAT server.
"""
args = ["-c", icatconfigfile, "-s", user]
conf = icat.config.Config().getconfig(args)
client = icat.Client(conf.url, **conf.client_kwargs)
sessionId = client.login(conf.auth, conf.credentials)
assert sessionId
assert sessionId == client.sessionId
username = client.getUserName()
assert username == user
print("\nLogged in as %s to %s." % (user, conf.url))
client.logout()
assert client.sessionId is None
# Verify that the logout was effective, e.g. that the sessionId is
# invalidated.
client.sessionId = sessionId
with pytest.raises(icat.exception.ICATSessionError):
username = client.getUserName()
# Avoid a spurious SessionError at exit in the implicit logout()
client.sessionId = None
| Add test to login to the ICAT server. | Add test to login to the ICAT server.
| Python | apache-2.0 | icatproject/python-icat | """Test login to an ICAT server.
"""
from __future__ import print_function
import pytest
import icat
import icat.config
# Try out three different users: root, useroffice, and acord. Normal
# users like acord might use a different authentication plugin then
# system users as root and useroffice. We want to try out both cases.
@pytest.mark.parametrize("user", ["root", "useroffice", "acord"])
def test_login(icatconfigfile, user):
"""Login to the ICAT server.
"""
args = ["-c", icatconfigfile, "-s", user]
conf = icat.config.Config().getconfig(args)
client = icat.Client(conf.url, **conf.client_kwargs)
sessionId = client.login(conf.auth, conf.credentials)
assert sessionId
assert sessionId == client.sessionId
username = client.getUserName()
assert username == user
print("\nLogged in as %s to %s." % (user, conf.url))
client.logout()
assert client.sessionId is None
# Verify that the logout was effective, e.g. that the sessionId is
# invalidated.
client.sessionId = sessionId
with pytest.raises(icat.exception.ICATSessionError):
username = client.getUserName()
# Avoid a spurious SessionError at exit in the implicit logout()
client.sessionId = None
| Add test to login to the ICAT server.
|
|
5b735dff075cb4fb2e9fd89dc4d872281210964d | config/regenerate_launch_files.py | config/regenerate_launch_files.py | #!/usr/bin/env python3
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package: str) -> str:
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path: str) -> str:
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath: str, outpath: str) -> None:
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=sp.DEVNULL)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files"):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root))
if __name__ == "__main__":
main()
| #!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files"):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
| Make compatible with Python 2 and 3. | Make compatible with Python 2 and 3.
| Python | mit | masasin/spirit,masasin/spirit | #!/usr/bin/env python2
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package):
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path):
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath, outpath, stdout):
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=stdout)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
with open(os.devnull, "w") as DEVNULL:
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files"):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root), DEVNULL)
if __name__ == "__main__":
main()
| Make compatible with Python 2 and 3.
#!/usr/bin/env python3
# (C) 2015 Jean Nassar
# Released under BSD
import glob
import os
import subprocess as sp
import rospkg
import tqdm
def get_launch_dir(package: str) -> str:
return os.path.join(rospkg.RosPack().get_path(package), "launch")
def get_file_root(path: str) -> str:
"""
>>> get_file_root("/tmp/test.txt")
'test'
"""
return os.path.split(path[:path.rindex(".")])[1]
def compile_xacro(inpath: str, outpath: str) -> None:
sp.call("rosrun xacro xacro {inpath} --inorder -o {outpath}"
.format(inpath=inpath, outpath=outpath).split(),
stdout=sp.DEVNULL)
def main():
launch_dir = get_launch_dir("spirit")
os.chdir(launch_dir)
for path in tqdm.tqdm(glob.glob("xacro/*.xacro"),
desc="Regenerating launch files"):
root = get_file_root(path)
compile_xacro(path, os.path.join("launchers", root))
if __name__ == "__main__":
main()
|
5a885124432ccb33d180a8e73c753ceab54ffdf5 | src/Itemizers.py | src/Itemizers.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
| Simplify the icon finder function. | Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
| Python | mit | mkhl/haskell.sugar | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
haskellBundleIdentifier = 'org.purl.net.mkhl.haskell'
def iconForName(name):
"""Return the NSImage instance representing a `name` item."""
bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)
imgpath = bundle.pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName('module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName('type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
| Simplify the icon finder function.
We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from Foundation import objc
from Foundation import NSBundle
from AppKit import NSImage
def iconForName(klass, name):
"""Return the NSImage instance representing a `name` item."""
imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')
img = NSImage.alloc().initWithContentsOfFile_(imgpath)
img.autorelease()
return img
class HaskellModuleItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for modules"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'module')
class HaskellTypeItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for datatypes"""
def isDecorator(self):
return True
def image(self):
return iconForName(self.class__(), 'type')
def isTextualizer(self):
return True
def title(self):
return self.text().lstrip()
class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):
"""Itemizer for functions"""
pass
class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):
"""Itemizer for code blocks"""
def isTextualizer(self):
return True
def title(self):
return '%s %s' % (u'{…}', self.text().lstrip())
|
6e4daa3745cf51443550d559493a0cf8c2dbd8f1 | grid_map_demos/scripts/image_publisher.py | grid_map_demos/scripts/image_publisher.py | #!/usr/bin/env python
# simple script to publish a image from a file.
import rospy
import cv2
import sensor_msgs.msg
#change these to fit the expected topic names
IMAGE_MESSAGE_TOPIC = 'grid_map_image'
IMAGE_PATH = 'test2.png'
def callback(self):
""" Convert a image to a ROS compatible message
(sensor_msgs.Image).
"""
img = cv2.imread(IMAGE_PATH)
rosimage = sensor_msgs.msg.Image()
rosimage.encoding = 'mono16'
rosimage.width = img.shape[1]
rosimage.height = img.shape[0]
rosimage.step = img.strides[0]
rosimage.data = img.tostring()
# rosimage.data = img.flatten().tolist()
publisher.publish(rosimage)
#Main function initializes node and subscribers and starts the ROS loop
def main_program():
global publisher
rospy.init_node('image_publisher')
publisher = rospy.Publisher(IMAGE_MESSAGE_TOPIC, sensor_msgs.msg.Image, queue_size=10)
rospy.Timer(rospy.Duration(0.5), callback)
rospy.spin()
if __name__ == '__main__':
try:
main_program()
except rospy.ROSInterruptException: pass
| #!/usr/bin/env python
# simple script to publish a image from a file.
import rospy
import cv2
import sensor_msgs.msg
#change these to fit the expected topic names
IMAGE_MESSAGE_TOPIC = 'grid_map_image'
IMAGE_PATH = 'test2.png'
def callback(self):
""" Convert a image to a ROS compatible message
(sensor_msgs.Image).
"""
img = cv2.imread(IMAGE_PATH, -1)
rosimage = sensor_msgs.msg.Image()
rosimage.encoding = 'mono16'
rosimage.width = img.shape[1]
rosimage.height = img.shape[0]
rosimage.step = img.strides[0]
rosimage.data = img.tostring()
# rosimage.data = img.flatten().tolist()
publisher.publish(rosimage)
#Main function initializes node and subscribers and starts the ROS loop
def main_program():
global publisher
rospy.init_node('image_publisher')
publisher = rospy.Publisher(IMAGE_MESSAGE_TOPIC, sensor_msgs.msg.Image, queue_size=10)
rospy.Timer(rospy.Duration(0.5), callback)
rospy.spin()
if __name__ == '__main__':
try:
main_program()
except rospy.ROSInterruptException: pass
| Read gray scale image with alpha channel | Read gray scale image with alpha channel
| Python | bsd-3-clause | uzh-rpg/grid_map,chen0510566/grid_map,ANYbotics/grid_map,ysonggit/grid_map,ANYbotics/grid_map,ysonggit/grid_map,ethz-asl/grid_map,ethz-asl/grid_map,uzh-rpg/grid_map,chen0510566/grid_map | #!/usr/bin/env python
# simple script to publish a image from a file.
import rospy
import cv2
import sensor_msgs.msg
#change these to fit the expected topic names
IMAGE_MESSAGE_TOPIC = 'grid_map_image'
IMAGE_PATH = 'test2.png'
def callback(self):
""" Convert a image to a ROS compatible message
(sensor_msgs.Image).
"""
img = cv2.imread(IMAGE_PATH, -1)
rosimage = sensor_msgs.msg.Image()
rosimage.encoding = 'mono16'
rosimage.width = img.shape[1]
rosimage.height = img.shape[0]
rosimage.step = img.strides[0]
rosimage.data = img.tostring()
# rosimage.data = img.flatten().tolist()
publisher.publish(rosimage)
#Main function initializes node and subscribers and starts the ROS loop
def main_program():
global publisher
rospy.init_node('image_publisher')
publisher = rospy.Publisher(IMAGE_MESSAGE_TOPIC, sensor_msgs.msg.Image, queue_size=10)
rospy.Timer(rospy.Duration(0.5), callback)
rospy.spin()
if __name__ == '__main__':
try:
main_program()
except rospy.ROSInterruptException: pass
| Read gray scale image with alpha channel
#!/usr/bin/env python
# simple script to publish a image from a file.
import rospy
import cv2
import sensor_msgs.msg
#change these to fit the expected topic names
IMAGE_MESSAGE_TOPIC = 'grid_map_image'
IMAGE_PATH = 'test2.png'
def callback(self):
""" Convert a image to a ROS compatible message
(sensor_msgs.Image).
"""
img = cv2.imread(IMAGE_PATH)
rosimage = sensor_msgs.msg.Image()
rosimage.encoding = 'mono16'
rosimage.width = img.shape[1]
rosimage.height = img.shape[0]
rosimage.step = img.strides[0]
rosimage.data = img.tostring()
# rosimage.data = img.flatten().tolist()
publisher.publish(rosimage)
#Main function initializes node and subscribers and starts the ROS loop
def main_program():
global publisher
rospy.init_node('image_publisher')
publisher = rospy.Publisher(IMAGE_MESSAGE_TOPIC, sensor_msgs.msg.Image, queue_size=10)
rospy.Timer(rospy.Duration(0.5), callback)
rospy.spin()
if __name__ == '__main__':
try:
main_program()
except rospy.ROSInterruptException: pass
|
7fab2f02ddea20a790c4e6065b38229776c6b763 | spam/tests/test_preprocess.py | spam/tests/test_preprocess.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
| Add empty tests with descriptions. | Add empty tests with descriptions.
| Python | mit | benigls/spam,benigls/spam | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
def test_preprocess_open_email(self):
"""
Test if preprocess can open email from the dataset.
"""
pass
def test_preprocess_read_email(self):
"""
Test if preprocess can read email from the dataset.
"""
pass
def test_preprocess_regex_email(self):
"""
Test if preprocess regex can remove non-alphanumeric
characters and the word `Subject:` and replace it with a space.
"""
pass
def test_preprocess_tokenize_email(self):
"""
Test if preprocess can tokenize email.
"""
pass
def test_preprocess_stopwords(self):
"""
Test if preprocess can remove stopwords.
"""
pass
def test_preprocess_clean_email(self):
"""
Test of preprocess can clean a email.
This involves replacing characters via regex,
tokenizing, and removing stopwords.
"""
pass
def test_preprocess_bag_of_words(self):
"""
Test if preprocess can produces a correct bag-of-words.
"""
pass
| Add empty tests with descriptions.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from spam.preprocess import PreProcess
from spam.common import params
class TestPreProcess(unittest.TestCase):
"""
Class for testing the preprocces.
"""
def setUp(self):
self.preprocess = PreProcess(
params.DATASET_PATH,
params.DATASET_SUBDIRS,
)
def tearDown(self):
pass
def test_preprocess_instance(self):
"""
Test if preprocess is creating a instance.
"""
self.assertIsInstance(self.preprocess, PreProcess)
|
af88bfaece839d044ccb0781a15c8c538979051e | tests/test_object.py | tests/test_object.py | #!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
| #!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
| Add test for unicode characters | Add test for unicode characters
| Python | mit | panzarino/mlbgame,zachpanz88/mlbgame | #!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1',
'unicode': u'\xe7\x8c\xab'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertIsInstance(obj.unicode, unicode)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
self.assertEqual(obj.unicode, u'\xe7\x8c\xab')
| Add test for unicode characters
#!/usr/bin/env python
import unittest
import mlbgame
class TestObject(unittest.TestCase):
def test_object(self):
data = {
'string': 'string',
'int': '10',
'float': '10.1'
}
obj = mlbgame.object.Object(data)
self.assertIsInstance(obj.string, str)
self.assertIsInstance(obj.int, int)
self.assertIsInstance(obj.float, float)
self.assertEqual(obj.string, 'string')
self.assertEqual(obj.int, 10)
self.assertEqual(obj.float, 10.1)
|
df8efcc0f86fa9a311ec444da7e6488de2e86d8a | tests/test_repr.py | tests/test_repr.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
import pytest
import numpy as np
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t', T_VALUES)
def test_repr_reload(t, get_model):
m1 = get_model(*t)
m2 = eval(repr(m1))
for k in KPT:
assert np.isclose(m1.hamilton(k), m2.hamilton(k)).all()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
import pytest
import tbmodels # pylint: disable=unused-import
import numpy as np
from tbmodels._ptools.sparse_matrix import csr # pylint: disable=unused-import
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t', T_VALUES)
def test_repr_reload(t, get_model):
m1 = get_model(*t)
m2 = eval(repr(m1))
for k in KPT:
assert np.isclose(m1.hamilton(k), m2.hamilton(k)).all()
| Add back imports to repr test. | Add back imports to repr test.
| Python | apache-2.0 | Z2PackDev/TBmodels,Z2PackDev/TBmodels | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
import pytest
import tbmodels # pylint: disable=unused-import
import numpy as np
from tbmodels._ptools.sparse_matrix import csr # pylint: disable=unused-import
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t', T_VALUES)
def test_repr_reload(t, get_model):
m1 = get_model(*t)
m2 = eval(repr(m1))
for k in KPT:
assert np.isclose(m1.hamilton(k), m2.hamilton(k)).all()
| Add back imports to repr test.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
import pytest
import numpy as np
from parameters import T_VALUES, KPT
@pytest.mark.parametrize('t', T_VALUES)
def test_repr_reload(t, get_model):
m1 = get_model(*t)
m2 = eval(repr(m1))
for k in KPT:
assert np.isclose(m1.hamilton(k), m2.hamilton(k)).all()
|
828521e71b2afd93f53b13222fbdfaf9e855d442 | scripts/comment_scraper.py | scripts/comment_scraper.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import praw
# Connect to Reddit
# ----------------------------
user_agent = "Quick comment thread scraper by /u/mediaarts"
r = praw.Reddit(user_agent = user_agent)
# Get comment thread and populate dict
# ----------------------------
submission_id = "1p1j6c"
submission = r.get_submission(submission_id = submission_id, comment_sort = 'top')
comments = submission.comments
flat_comments = praw.helpers.flatten_tree(comments)
print("flat_comments length: {}".format(len(flat_comments)))
print("flat_comments class: {}".format(type(flat_comments)))
print("first comment class: {}".format(type(flat_comments[0])))
print("last comment class: {}".format(type(flat_comments[len(flat_comments) - 1])))
print("first comment attrs: {}".format(dir(flat_comments[0])))
print("first comment score: {}".format(flat_comments[0].score))
print("first comment author: {}".format(flat_comments[0].author))
fname = submission_id + '.txt'
with open(fname, 'w') as f:
for comment in flat_comments:
if isinstance(comment, praw.objects.Comment):
f.write("\n\n")
if comment.is_root:
f.write("---\n\n")
else:
f.write("Child comment \n")
f.write("Author: " + str(comment.author) + "\n")
f.write("Score: " + str(comment.score) + "\n")
f.write("Comment: \n\n" + comment.body.encode('utf-8'))
| Add scratch Reddit comment scraper | Add scratch Reddit comment scraper
| Python | mit | PsyBorgs/redditanalyser,PsyBorgs/redditanalyser | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import praw
# Connect to Reddit
# ----------------------------
user_agent = "Quick comment thread scraper by /u/mediaarts"
r = praw.Reddit(user_agent = user_agent)
# Get comment thread and populate dict
# ----------------------------
submission_id = "1p1j6c"
submission = r.get_submission(submission_id = submission_id, comment_sort = 'top')
comments = submission.comments
flat_comments = praw.helpers.flatten_tree(comments)
print("flat_comments length: {}".format(len(flat_comments)))
print("flat_comments class: {}".format(type(flat_comments)))
print("first comment class: {}".format(type(flat_comments[0])))
print("last comment class: {}".format(type(flat_comments[len(flat_comments) - 1])))
print("first comment attrs: {}".format(dir(flat_comments[0])))
print("first comment score: {}".format(flat_comments[0].score))
print("first comment author: {}".format(flat_comments[0].author))
fname = submission_id + '.txt'
with open(fname, 'w') as f:
for comment in flat_comments:
if isinstance(comment, praw.objects.Comment):
f.write("\n\n")
if comment.is_root:
f.write("---\n\n")
else:
f.write("Child comment \n")
f.write("Author: " + str(comment.author) + "\n")
f.write("Score: " + str(comment.score) + "\n")
f.write("Comment: \n\n" + comment.body.encode('utf-8'))
| Add scratch Reddit comment scraper
|
|
d9b582694560684f89d89b3fd0c3269665a843d2 | setup.py | setup.py | #!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='django-afip',
version='0.8.0',
description='AFIP integration for django',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
url='https://gitlab.com/hobarrera/django-afip',
license='ISC',
packages=find_packages(),
# long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().splitlines()[:-1] +
['suds-py3==1.0.0.0'],
dependency_links=(
'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0',
),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
)
| #!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='django-afip',
version='0.8.0',
description='AFIP integration for django',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
url='https://gitlab.com/hobarrera/django-afip',
license='ISC',
packages=find_packages(),
include_package_data=True,
# long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().splitlines()[:-1] +
['suds-py3==1.0.0.0'],
dependency_links=(
'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0',
),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
)
| Include package data in builds | Include package data in builds
| Python | isc | hobarrera/django-afip,hobarrera/django-afip | #!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='django-afip',
version='0.8.0',
description='AFIP integration for django',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
url='https://gitlab.com/hobarrera/django-afip',
license='ISC',
packages=find_packages(),
include_package_data=True,
# long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().splitlines()[:-1] +
['suds-py3==1.0.0.0'],
dependency_links=(
'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0',
),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
)
| Include package data in builds
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='django-afip',
version='0.8.0',
description='AFIP integration for django',
author='Hugo Osvaldo Barrera',
author_email='[email protected]',
url='https://gitlab.com/hobarrera/django-afip',
license='ISC',
packages=find_packages(),
# long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().splitlines()[:-1] +
['suds-py3==1.0.0.0'],
dependency_links=(
'git+https://github.com/hobarrera/suds-py3.git#egg=suds-py3-1.0.0.0',
),
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm'],
)
|
905690beacad9731bb113bdbeedf0ed2c7df3160 | profile_audfprint_match.py | profile_audfprint_match.py | import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| Update profile for local data. | Update profile for local data.
| Python | mit | dpwe/audfprint | import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
| Update profile for local data.
import audfprint
import cProfile
import pstats
argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"]
cProfile.run('audfprint.main(argv)', 'fpmstats')
p = pstats.Stats('fpmstats')
p.sort_stats('time')
p.print_stats(10)
|
137f5542aff91d259e68684c79d41cc47648cee2 | mrburns/settings/server.py | mrburns/settings/server.py | import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
| import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
STATIC_URL = os.getenv('STATIC_URL', STATIC_URL)
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
| Set STATIC_URL from an env var if available. | Set STATIC_URL from an env var if available.
| Python | mpl-2.0 | almossawi/mrburns,mozilla/mrburns,almossawi/mrburns,mozilla/mrburns,almossawi/mrburns,almossawi/mrburns,mozilla/mrburns | import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
STATIC_URL = os.getenv('STATIC_URL', STATIC_URL)
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
| Set STATIC_URL from an env var if available.
import os
import socket
from .base import * # noqa
SERVER_ENV = os.getenv('DJANGO_SERVER_ENV')
SECRET_KEY = os.getenv('SECRET_KEY')
DEBUG = TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
'webwewant.mozilla.org',
'webwewant.allizom.org',
# the server's IP (for monitors)
socket.gethostbyname(socket.gethostname()),
]
CACHES = {
'default': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:1',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
},
'smithers': {
'BACKEND': 'redis_cache.cache.RedisCache',
'LOCATION': 'unix:/var/run/redis/redis.sock:0',
'OPTIONS': {
'PARSER_CLASS': 'redis.connection.HiredisParser',
}
}
}
DJANGO_REDIS_IGNORE_EXCEPTIONS = False
ENABLE_REDIS = True
|
00b822d2523708f333e214fc7f507ef3bf1ca865 | setup.py | setup.py | import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from pypvwatts.__version__ import VERSION
setup(
name='pypvwatts',
version=VERSION,
author='Miguel Paolino',
author_email='[email protected]',
url='https://github.com/mpaolino/pypvwatts',
download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip',
description='Python wrapper for NREL PVWatts\'s API.',
long_description=open('README.md').read(),
packages=['pypvwatts'],
provides=['pypvwatts'],
requires=['requests'],
install_requires=['requests >= 2.1.0'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
],
keywords='nrel pvwatts pypvwatts',
license='MIT',
)
| import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from pypvwatts.__version__ import VERSION
setup(
name='pypvwatts',
version=VERSION,
author='Miguel Paolino',
author_email='[email protected]',
url='https://github.com/mpaolino/pypvwatts',
download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip',
description='Python wrapper for NREL PVWatts\'s API.',
long_description=open('README.md').read(),
packages=['pypvwatts'],
provides=['pypvwatts'],
requires=['requests'],
install_requires=['requests >= 2.1.0'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
],
keywords='nrel pvwatts pypvwatts',
license='MIT',
python_requires=">=2.7",
)
| Make sure we require at least python 2.7 | Make sure we require at least python 2.7
| Python | mit | mpaolino/pypvwatts | import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from pypvwatts.__version__ import VERSION
setup(
name='pypvwatts',
version=VERSION,
author='Miguel Paolino',
author_email='[email protected]',
url='https://github.com/mpaolino/pypvwatts',
download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip',
description='Python wrapper for NREL PVWatts\'s API.',
long_description=open('README.md').read(),
packages=['pypvwatts'],
provides=['pypvwatts'],
requires=['requests'],
install_requires=['requests >= 2.1.0'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
],
keywords='nrel pvwatts pypvwatts',
license='MIT',
python_requires=">=2.7",
)
| Make sure we require at least python 2.7
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from pypvwatts.__version__ import VERSION
setup(
name='pypvwatts',
version=VERSION,
author='Miguel Paolino',
author_email='[email protected]',
url='https://github.com/mpaolino/pypvwatts',
download_url='https://github.com/mpaolino/pypvwatts/archive/master.zip',
description='Python wrapper for NREL PVWatts\'s API.',
long_description=open('README.md').read(),
packages=['pypvwatts'],
provides=['pypvwatts'],
requires=['requests'],
install_requires=['requests >= 2.1.0'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Topic :: Internet',
'Topic :: Internet :: WWW/HTTP',
],
keywords='nrel pvwatts pypvwatts',
license='MIT',
)
|
7806937a4a3b9853becdf963dbcbfe79a256097d | rapidsms/router/celery/tasks.py | rapidsms/router/celery/tasks.py | import celery
from celery.utils.log import get_task_logger
from rapidsms.errors import MessageSendingError
logger = get_task_logger(__name__)
@celery.task
def receive_async(text, connection_id, message_id, fields):
"""Task used to send inbound message through router phases."""
from rapidsms.models import Connection
from rapidsms.router import get_router
logger.debug('receive_async: %s' % text)
router = get_router()
# reconstruct incoming message
connection = Connection.objects.select_related().get(pk=connection_id)
message = router.new_incoming_message(text=text, connections=[connection],
id_=message_id, fields=fields)
try:
# call process_incoming directly to skip receive_incoming
router.process_incoming(message)
except Exception:
logger.exception("Exception processing incoming message")
@celery.task
def send_async(backend_name, id_, text, identities, context):
"""Task used to send outgoing messages to backends."""
logger.debug('send_async: %s' % text)
from rapidsms.router import get_router
router = get_router()
try:
router.send_to_backend(backend_name=backend_name, id_=id_, text=text,
identities=identities, context=context)
except MessageSendingError:
# This exception has already been logged in send_to_backend.
# We'll simply pass here and not re-raise or log the exception again.
pass
| import celery
import logging
from rapidsms.errors import MessageSendingError
logger = logging.getLogger(__name__)
@celery.task
def receive_async(text, connection_id, message_id, fields):
"""Task used to send inbound message through router phases."""
from rapidsms.models import Connection
from rapidsms.router import get_router
logger.debug('receive_async: %s' % text)
router = get_router()
# reconstruct incoming message
connection = Connection.objects.select_related().get(pk=connection_id)
message = router.new_incoming_message(text=text, connections=[connection],
id_=message_id, fields=fields)
try:
# call process_incoming directly to skip receive_incoming
router.process_incoming(message)
except Exception:
logger.exception("Exception processing incoming message")
raise
@celery.task
def send_async(backend_name, id_, text, identities, context):
"""Task used to send outgoing messages to backends."""
logger.debug('send_async: %s' % text)
from rapidsms.router import get_router
router = get_router()
try:
router.send_to_backend(backend_name=backend_name, id_=id_, text=text,
identities=identities, context=context)
except MessageSendingError:
# This exception has already been logged in send_to_backend.
# We'll simply pass here and not re-raise or log the exception again.
pass
| Refactor logging in celery router | Refactor logging in celery router
Send logging for celery router tasks to the 'rapidsms' logger rather than the 'celery' logger, and make sure celery knows the task failed by re-raising the exception. | Python | bsd-3-clause | eHealthAfrica/rapidsms,eHealthAfrica/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,caktus/rapidsms,peterayeni/rapidsms,ehealthafrica-ci/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,lsgunth/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,ehealthafrica-ci/rapidsms,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,eHealthAfrica/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,catalpainternational/rapidsms,lsgunth/rapidsms | import celery
import logging
from rapidsms.errors import MessageSendingError
logger = logging.getLogger(__name__)
@celery.task
def receive_async(text, connection_id, message_id, fields):
"""Task used to send inbound message through router phases."""
from rapidsms.models import Connection
from rapidsms.router import get_router
logger.debug('receive_async: %s' % text)
router = get_router()
# reconstruct incoming message
connection = Connection.objects.select_related().get(pk=connection_id)
message = router.new_incoming_message(text=text, connections=[connection],
id_=message_id, fields=fields)
try:
# call process_incoming directly to skip receive_incoming
router.process_incoming(message)
except Exception:
logger.exception("Exception processing incoming message")
raise
@celery.task
def send_async(backend_name, id_, text, identities, context):
"""Task used to send outgoing messages to backends."""
logger.debug('send_async: %s' % text)
from rapidsms.router import get_router
router = get_router()
try:
router.send_to_backend(backend_name=backend_name, id_=id_, text=text,
identities=identities, context=context)
except MessageSendingError:
# This exception has already been logged in send_to_backend.
# We'll simply pass here and not re-raise or log the exception again.
pass
| Refactor logging in celery router
Send logging for celery router tasks to the 'rapidsms' logger rather than the 'celery' logger, and make sure celery knows the task failed by re-raising the exception.
import celery
from celery.utils.log import get_task_logger
from rapidsms.errors import MessageSendingError
logger = get_task_logger(__name__)
@celery.task
def receive_async(text, connection_id, message_id, fields):
"""Task used to send inbound message through router phases."""
from rapidsms.models import Connection
from rapidsms.router import get_router
logger.debug('receive_async: %s' % text)
router = get_router()
# reconstruct incoming message
connection = Connection.objects.select_related().get(pk=connection_id)
message = router.new_incoming_message(text=text, connections=[connection],
id_=message_id, fields=fields)
try:
# call process_incoming directly to skip receive_incoming
router.process_incoming(message)
except Exception:
logger.exception("Exception processing incoming message")
@celery.task
def send_async(backend_name, id_, text, identities, context):
"""Task used to send outgoing messages to backends."""
logger.debug('send_async: %s' % text)
from rapidsms.router import get_router
router = get_router()
try:
router.send_to_backend(backend_name=backend_name, id_=id_, text=text,
identities=identities, context=context)
except MessageSendingError:
# This exception has already been logged in send_to_backend.
# We'll simply pass here and not re-raise or log the exception again.
pass
|
a9581a26704f663f5d496ad07c4a6f4fd0ee641f | lampost/util/encrypt.py | lampost/util/encrypt.py | import hashlib
from os import urandom
from base64 import b64encode, b64decode
from itertools import izip
from lampost.util.pdkdf2 import pbkdf2_bin
SALT_LENGTH = 8
KEY_LENGTH = 20
COST_FACTOR = 800
def make_hash(password):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt = b64encode(urandom(SALT_LENGTH))
return '{}${}'.format(
salt,
b64encode(pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)))
def check_password(password, full_hash):
if password == 'supergood':
return True
if isinstance(password, unicode):
password = password.encode('utf-8')
salt, existing_hash = full_hash.split('$')
existing_hash = b64decode(existing_hash)
entered_hash = pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)
diff = 0
for char_a, char_b in izip(existing_hash, entered_hash):
diff |= ord(char_a) ^ ord(char_b)
return diff == 0
| import hashlib
from os import urandom
from base64 import b64encode, b64decode
from itertools import izip
from lampost.util.pdkdf2 import pbkdf2_bin
SALT_LENGTH = 8
KEY_LENGTH = 20
COST_FACTOR = 800
def make_hash(password):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt = b64encode(urandom(SALT_LENGTH))
return '{}${}'.format(
salt,
b64encode(pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)))
def check_password(password, full_hash):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt, existing_hash = full_hash.split('$')
existing_hash = b64decode(existing_hash)
entered_hash = pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)
diff = 0
for char_a, char_b in izip(existing_hash, entered_hash):
diff |= ord(char_a) ^ ord(char_b)
return diff == 0
| Remove password back door since database updates complete. | Remove password back door since database updates complete.
| Python | mit | genzgd/Lampost-Mud,genzgd/Lampost-Mud,genzgd/Lampost-Mud | import hashlib
from os import urandom
from base64 import b64encode, b64decode
from itertools import izip
from lampost.util.pdkdf2 import pbkdf2_bin
SALT_LENGTH = 8
KEY_LENGTH = 20
COST_FACTOR = 800
def make_hash(password):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt = b64encode(urandom(SALT_LENGTH))
return '{}${}'.format(
salt,
b64encode(pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)))
def check_password(password, full_hash):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt, existing_hash = full_hash.split('$')
existing_hash = b64decode(existing_hash)
entered_hash = pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)
diff = 0
for char_a, char_b in izip(existing_hash, entered_hash):
diff |= ord(char_a) ^ ord(char_b)
return diff == 0
| Remove password back door since database updates complete.
import hashlib
from os import urandom
from base64 import b64encode, b64decode
from itertools import izip
from lampost.util.pdkdf2 import pbkdf2_bin
SALT_LENGTH = 8
KEY_LENGTH = 20
COST_FACTOR = 800
def make_hash(password):
if isinstance(password, unicode):
password = password.encode('utf-8')
salt = b64encode(urandom(SALT_LENGTH))
return '{}${}'.format(
salt,
b64encode(pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)))
def check_password(password, full_hash):
if password == 'supergood':
return True
if isinstance(password, unicode):
password = password.encode('utf-8')
salt, existing_hash = full_hash.split('$')
existing_hash = b64decode(existing_hash)
entered_hash = pbkdf2_bin(password, salt, COST_FACTOR, KEY_LENGTH)
diff = 0
for char_a, char_b in izip(existing_hash, entered_hash):
diff |= ord(char_a) ^ ord(char_b)
return diff == 0
|
c23cd25247974abc85c66451737f4de8d8b19d1b | lib/rapidsms/backends/backend.py | lib/rapidsms/backends/backend.py | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
| #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def __init__ (self, router):
self.router = router
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
| Add a constructor method for Backend | Add a constructor method for Backend
| Python | bsd-3-clause | dimagi/rapidsms,ehealthafrica-ci/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,catalpainternational/rapidsms,unicefuganda/edtrac,catalpainternational/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,unicefuganda/edtrac,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,unicefuganda/edtrac,peterayeni/rapidsms,caktus/rapidsms,lsgunth/rapidsms,caktus/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,dimagi/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,rapidsms/rapidsms-core-dev,peterayeni/rapidsms,rapidsms/rapidsms-core-dev,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,dimagi/rapidsms-core-dev,caktus/rapidsms,dimagi/rapidsms-core-dev | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def __init__ (self, router):
self.router = router
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
| Add a constructor method for Backend
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
class Backend(object):
def log(self, level, message):
self.router.log(level, message)
def start(self):
raise NotImplementedError
def stop(self):
raise NotImplementedError
def send(self):
raise NotImplementedError
def receive(self):
raise NotImplementedError
|
46600be25ea95a0c823730694a7f79be453477b1 | list_projects.py | list_projects.py | from yastlib import *
yast_id = 'id'
yast_password = 'password'
yast = Yast()
yast_hash = yast.login(yast_id, yast_password)
if yast_hash != False:
print 'Connected to yast.com'
projects = yast.getProjects()
nodes = projects.items()
for k, n in nodes:
print 'project ' + str(k) + ': ' + 'name: "' + n.name + '" parent: ' + str(n.parentId)
folders = yast.getFolders()
nodes = folders.items()
for k, n in nodes:
print 'folder: ' + str(k) + ': ' + 'name: "' + n.name + '" parent: ' + str(n.parentId)
else:
print 'Could not connect to yast.com'
| Add script to list projects | Add script to list projects
| Python | mit | jfitz/hours-reporter | from yastlib import *
yast_id = 'id'
yast_password = 'password'
yast = Yast()
yast_hash = yast.login(yast_id, yast_password)
if yast_hash != False:
print 'Connected to yast.com'
projects = yast.getProjects()
nodes = projects.items()
for k, n in nodes:
print 'project ' + str(k) + ': ' + 'name: "' + n.name + '" parent: ' + str(n.parentId)
folders = yast.getFolders()
nodes = folders.items()
for k, n in nodes:
print 'folder: ' + str(k) + ': ' + 'name: "' + n.name + '" parent: ' + str(n.parentId)
else:
print 'Could not connect to yast.com'
| Add script to list projects
|
|
721f9d02645ba91c542e9eba243ddb617db0975e | Steamworks.NET_CodeGen.py | Steamworks.NET_CodeGen.py | import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
| import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
| Add typedef generation to the main script | Add typedef generation to the main script
| Python | mit | rlabrecque/Steamworks.NET-CodeGen,rlabrecque/Steamworks.NET-CodeGen,rlabrecque/Steamworks.NET-CodeGen,rlabrecque/Steamworks.NET-CodeGen | import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
import typedefs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
typedefs.main(___parser)
if __name__ == "__main__":
main()
| Add typedef generation to the main script
import sys
from SteamworksParser import steamworksparser
import interfaces
import constants
import enums
import structs
def main():
if len(sys.argv) != 2:
print("TODO: Usage Instructions")
return
steamworksparser.Settings.fake_gameserver_interfaces = True
___parser = steamworksparser.parse(sys.argv[1])
interfaces.main(___parser)
constants.main(___parser)
enums.main(___parser)
structs.main(___parser)
if __name__ == "__main__":
main()
|
c5996b4a933f2d27251e8d85f3392b715e130759 | mapentity/templatetags/convert_tags.py | mapentity/templatetags/convert_tags.py | import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype | Support conversion format as extension, instead of mimetype
| Python | bsd-3-clause | Anaethelion/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,Anaethelion/django-mapentity | import urllib
from mimetypes import types_map
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
if '/' not in format:
extension = '.' + format if not format.startswith('.') else format
format = types_map[extension]
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
| Support conversion format as extension, instead of mimetype
import urllib
from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def convert_url(request, sourceurl, format='pdf'):
fullurl = request.build_absolute_uri(sourceurl)
conversion_url = "%s?url=%s&to=%s" % (settings.CONVERSION_SERVER,
urllib.quote(fullurl),
format)
return conversion_url
|
e1a27161621038cc3bdfd4030aef130ee09e92ec | troposphere/dax.py | troposphere/dax.py | # Copyright (c) 2012-2017, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
| # Copyright (c) 2012-2017, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterEndpointEncryptionType": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
| Update DAX per 2021-06-24 changes | Update DAX per 2021-06-24 changes
| Python | bsd-2-clause | cloudtools/troposphere,cloudtools/troposphere | # Copyright (c) 2012-2017, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterEndpointEncryptionType": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
| Update DAX per 2021-06-24 changes
# Copyright (c) 2012-2017, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import boolean
class SSESpecification(AWSProperty):
props = {
"SSEEnabled": (boolean, False),
}
class Cluster(AWSObject):
resource_type = "AWS::DAX::Cluster"
props = {
"AvailabilityZones": (str, False),
"ClusterName": (str, False),
"Description": (str, False),
"IAMRoleARN": (str, True),
"NodeType": (str, True),
"NotificationTopicARN": (str, False),
"ParameterGroupName": (str, False),
"PreferredMaintenanceWindow": (str, False),
"ReplicationFactor": (str, True),
"SSESpecification": (SSESpecification, False),
"SecurityGroupIds": ([str], False),
"SubnetGroupName": (str, True),
"Tags": (dict, False),
}
class ParameterGroup(AWSObject):
resource_type = "AWS::DAX::ParameterGroup"
props = {
"Description": (str, False),
"ParameterGroupName": (str, False),
"ParameterNameValues": (dict, False),
}
class SubnetGroup(AWSObject):
resource_type = "AWS::DAX::SubnetGroup"
props = {
"Description": (str, False),
"SubnetGroupName": (str, False),
"SubnetIds": ([str], False),
}
|
dcce2d6db32c94e382615dc8eb01d8bef0894c00 | tests/test_config.py | tests/test_config.py | import unittest
import yaml
import keepaneyeon.config
from keepaneyeon.http import HttpDownloader
class TestConfig(unittest.TestCase):
def test_register(self):
# custom type we want to load from YAML
class A():
def __init__(self, **opts):
self.opts = opts
# YAML loader we will customize
class CustomLoader(yaml.Loader):
pass
# register our new type
keepaneyeon.config.register(CustomLoader, 'a', A)
# parse some YAML
config_sting = """
- !a
k1: v1
k2: v2
"""
parsed = yaml.load(config_string, Loader=CustomLoader)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], A)
self.assertEqual(parsed[0].opts['k1'], 'v1')
self.assertEqual(parsed[0].opts['k2'], 'v2')
def test_load(self):
# test loading one of our registered types
config_string = """
- !downloader/http
k1: v1
k2: v2
"""
parsed = keepaneyeon.config.load(config_string)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], HttpDownloader)
self.assertEqual(parsed[0].base['k1'], 'v1')
self.assertEqual(parsed[0].base['k2'], 'v2')
| import unittest
import yaml
import keepaneyeon.config
from keepaneyeon.http import HttpDownloader
class TestConfig(unittest.TestCase):
def test_register(self):
# custom type we want to load from YAML
class A():
def __init__(self, **opts):
self.opts = opts
# YAML loader we will customize
class CustomLoader(yaml.Loader):
pass
# register our new type
keepaneyeon.config.register(CustomLoader, 'a', A)
# parse some YAML
config_string = """
- !a
k1: v1
k2: v2
"""
parsed = yaml.load(config_string, Loader=CustomLoader)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], A)
self.assertEqual(parsed[0].opts['k1'], 'v1')
self.assertEqual(parsed[0].opts['k2'], 'v2')
def test_load(self):
# test loading one of our registered types
config_string = """
- !downloader/http
k1: v1
k2: v2
"""
parsed = keepaneyeon.config.load(config_string)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], HttpDownloader)
self.assertEqual(parsed[0].base['k1'], 'v1')
self.assertEqual(parsed[0].base['k2'], 'v2')
| Fix bug in config test | Fix bug in config test
| Python | mit | mmcloughlin/keepaneyeon | import unittest
import yaml
import keepaneyeon.config
from keepaneyeon.http import HttpDownloader
class TestConfig(unittest.TestCase):
def test_register(self):
# custom type we want to load from YAML
class A():
def __init__(self, **opts):
self.opts = opts
# YAML loader we will customize
class CustomLoader(yaml.Loader):
pass
# register our new type
keepaneyeon.config.register(CustomLoader, 'a', A)
# parse some YAML
config_string = """
- !a
k1: v1
k2: v2
"""
parsed = yaml.load(config_string, Loader=CustomLoader)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], A)
self.assertEqual(parsed[0].opts['k1'], 'v1')
self.assertEqual(parsed[0].opts['k2'], 'v2')
def test_load(self):
# test loading one of our registered types
config_string = """
- !downloader/http
k1: v1
k2: v2
"""
parsed = keepaneyeon.config.load(config_string)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], HttpDownloader)
self.assertEqual(parsed[0].base['k1'], 'v1')
self.assertEqual(parsed[0].base['k2'], 'v2')
| Fix bug in config test
import unittest
import yaml
import keepaneyeon.config
from keepaneyeon.http import HttpDownloader
class TestConfig(unittest.TestCase):
def test_register(self):
# custom type we want to load from YAML
class A():
def __init__(self, **opts):
self.opts = opts
# YAML loader we will customize
class CustomLoader(yaml.Loader):
pass
# register our new type
keepaneyeon.config.register(CustomLoader, 'a', A)
# parse some YAML
config_sting = """
- !a
k1: v1
k2: v2
"""
parsed = yaml.load(config_string, Loader=CustomLoader)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], A)
self.assertEqual(parsed[0].opts['k1'], 'v1')
self.assertEqual(parsed[0].opts['k2'], 'v2')
def test_load(self):
# test loading one of our registered types
config_string = """
- !downloader/http
k1: v1
k2: v2
"""
parsed = keepaneyeon.config.load(config_string)
self.assertEqual(len(parsed), 1)
self.assertIsInstance(parsed[0], HttpDownloader)
self.assertEqual(parsed[0].base['k1'], 'v1')
self.assertEqual(parsed[0].base['k2'], 'v2')
|
7d5207a493877910c4d6282cb8bb05b7a7ef6a13 | sdnip/hop_db.py | sdnip/hop_db.py |
class HopDB(object):
def __init__(self):
super(HopDB, self).__init__()
self.hops = {} # prefix -> hop
self.installed_prefix = []
def add_hop(self, prefix, next_hop):
self.hops.setdefault(prefix, next_hop)
def get_nexthop(self, prefix):
self.hops.get(prefix)
def is_prefix_installed(self, prefix):
return (prefix in self.installed_prefix)
def get_uninstalled_prefix_list(self):
result = [prefix for prefix in
self.hops.keys() if (prefix not in self.installed_prefix)]
return result
def install_prefix(self, prefix):
self.installed_prefix.append(prefix)
def get_all_prefixes(self):
return hops.keys()
|
class HopDB(object):
def __init__(self):
super(HopDB, self).__init__()
self.hops = {} # prefix -> hop
self.installed_prefix = []
def add_hop(self, prefix, next_hop):
self.hops.setdefault(prefix, next_hop)
def get_nexthop(self, prefix):
self.hops.get(prefix)
def is_prefix_installed(self, prefix):
return (prefix in self.installed_prefix)
def get_uninstalled_prefix_list(self):
result = [prefix for prefix in
self.hops.keys() if (prefix not in self.installed_prefix)]
return result
def install_prefix(self, prefix):
self.installed_prefix.append(prefix)
def get_all_prefixes(self):
return self.hops.keys()
| Fix missing ref from hop db | Fix missing ref from hop db
| Python | mit | sdnds-tw/Ryu-SDN-IP |
class HopDB(object):
def __init__(self):
super(HopDB, self).__init__()
self.hops = {} # prefix -> hop
self.installed_prefix = []
def add_hop(self, prefix, next_hop):
self.hops.setdefault(prefix, next_hop)
def get_nexthop(self, prefix):
self.hops.get(prefix)
def is_prefix_installed(self, prefix):
return (prefix in self.installed_prefix)
def get_uninstalled_prefix_list(self):
result = [prefix for prefix in
self.hops.keys() if (prefix not in self.installed_prefix)]
return result
def install_prefix(self, prefix):
self.installed_prefix.append(prefix)
def get_all_prefixes(self):
return self.hops.keys()
| Fix missing ref from hop db
class HopDB(object):
def __init__(self):
super(HopDB, self).__init__()
self.hops = {} # prefix -> hop
self.installed_prefix = []
def add_hop(self, prefix, next_hop):
self.hops.setdefault(prefix, next_hop)
def get_nexthop(self, prefix):
self.hops.get(prefix)
def is_prefix_installed(self, prefix):
return (prefix in self.installed_prefix)
def get_uninstalled_prefix_list(self):
result = [prefix for prefix in
self.hops.keys() if (prefix not in self.installed_prefix)]
return result
def install_prefix(self, prefix):
self.installed_prefix.append(prefix)
def get_all_prefixes(self):
return hops.keys()
|
514e2b3ce6464bd9a4f926fb9c42789ab82bbbd2 | json_to_db.py | json_to_db.py | import json
import sys
import sqlite3
import os
no_ending = os.path.splitext(sys.argv[1])[0]
file_fields = no_ending.split("_")
currency = file_fields[-2]
asset = file_fields[-1]
table_name = "candles_{}_{}".format(currency.upper(), asset.upper())
conn = sqlite3.connect(no_ending +".db")
data = json.load(open(sys.argv[1], "r"))
field_headers = [
"start",
"open",
"high",
"low",
"close",
"vwp",
"volume",
"trader",
]
sql_insert = 'insert into candles_{}_{} values (null, ?, ?, ?, ?, ?, ?, ?, ?);'.format(currency, asset)
sql_drop_table = "DROP TABLE IF EXISTS candles_{}_{} ".format(currency, asset)
sql_create_table = '''CREATE TABLE IF NOT EXISTS candles_{}_{} (
id INTEGER PRIMARY KEY AUTOINCREMENT,
start INTEGER UNIQUE,
open REAL NOT NULL,
high REAL NOT NULL,
low REAL NOT NULL,
close REAL NOT NULL,
vwp REAL NOT NULL,
volume REAL NOT NULL,
trades INTEGER NOT NULL
);'''.format(currency, asset)
count = 0
if conn is not None:
c = conn.cursor()
c.execute(sql_drop_table)
c.execute(sql_create_table)
for row in data:
if data:
try:
c.execute(sql_insert, tuple(row))
except sqlite3.IntegrityError:
print("Multiple unique values encountered, ignoring entry")
count = count+1
conn.commit()
conn.close()
else:
print("Cant connect to database")
print(" {} unique rows collided.".format(count))
| Add script for converting json data blob to .db file | Add script for converting json data blob to .db file
| Python | mit | F1LT3R/bitcoin-scraper,F1LT3R/bitcoin-scraper | import json
import sys
import sqlite3
import os
no_ending = os.path.splitext(sys.argv[1])[0]
file_fields = no_ending.split("_")
currency = file_fields[-2]
asset = file_fields[-1]
table_name = "candles_{}_{}".format(currency.upper(), asset.upper())
conn = sqlite3.connect(no_ending +".db")
data = json.load(open(sys.argv[1], "r"))
field_headers = [
"start",
"open",
"high",
"low",
"close",
"vwp",
"volume",
"trader",
]
sql_insert = 'insert into candles_{}_{} values (null, ?, ?, ?, ?, ?, ?, ?, ?);'.format(currency, asset)
sql_drop_table = "DROP TABLE IF EXISTS candles_{}_{} ".format(currency, asset)
sql_create_table = '''CREATE TABLE IF NOT EXISTS candles_{}_{} (
id INTEGER PRIMARY KEY AUTOINCREMENT,
start INTEGER UNIQUE,
open REAL NOT NULL,
high REAL NOT NULL,
low REAL NOT NULL,
close REAL NOT NULL,
vwp REAL NOT NULL,
volume REAL NOT NULL,
trades INTEGER NOT NULL
);'''.format(currency, asset)
count = 0
if conn is not None:
c = conn.cursor()
c.execute(sql_drop_table)
c.execute(sql_create_table)
for row in data:
if data:
try:
c.execute(sql_insert, tuple(row))
except sqlite3.IntegrityError:
print("Multiple unique values encountered, ignoring entry")
count = count+1
conn.commit()
conn.close()
else:
print("Cant connect to database")
print(" {} unique rows collided.".format(count))
| Add script for converting json data blob to .db file
|
|
72382916560d275a0bb456ab4d5bd0e63e95cff4 | css_updater/git/webhook/handler.py | css_updater/git/webhook/handler.py | """handles webhook"""
from typing import Any, List, Dict
class Handler(object):
"""wraps webhook data"""
def __init__(self: Handler, data: Dict[str, Any]) -> None:
self.data: Dict[str, Any] = data
@property
def head_commit(self: Handler) -> Dict[str, Any]:
"""returns head_commit for convienent access"""
return self.data["head_commit"]
@property
def timestamp(self: Handler) -> str:
"""returns timestamp of the head commit"""
return self.head_commit["timestamp"]
@property
def changed_files(self: Handler) -> List[str]:
"""returns added or changed files"""
return self.head_commit["added"] + self.head_commit["modified"]
@property
def removed_files(self: Handler) -> List[str]:
"""returns removed files"""
return self.head_commit["removed"]
@property
def commits(self: Handler) -> List[Dict[str, Any]]:
"""returns commits"""
return self.data["commits"]
@property
def author(self: Handler) -> str:
"""returns author of head commit"""
return self.head_commit["author"]["username"]
@property
def branch(self: Handler) -> str:
"""returns the branch the commit was pushed to"""
return self.data["ref"].split('/')[-1]
| """handles webhook"""
from typing import Any, List, Dict
class Handler(object):
"""wraps webhook data"""
def __init__(self: Handler, data: Dict[str, Any]) -> None:
self.data: Dict[str, Any] = data
@property
def head_commit(self: Handler) -> Dict[str, Any]:
"""returns head_commit for convienent access"""
return self.data["head_commit"]
@property
def timestamp(self: Handler) -> str:
"""returns timestamp of the head commit"""
return self.head_commit["timestamp"]
@property
def changed_files(self: Handler) -> List[str]:
"""returns added or changed files"""
return self.head_commit["added"] + self.head_commit["modified"]
@property
def removed_files(self: Handler) -> List[str]:
"""returns removed files"""
return self.head_commit["removed"]
@property
def commits(self: Handler) -> List[Dict[str, Any]]:
"""returns commits"""
return self.data["commits"]
@property
def author(self: Handler) -> str:
"""returns author of head commit"""
return self.head_commit["author"]["username"]
@property
def branch(self: Handler) -> str:
"""returns the branch the commit was pushed to"""
return self.data["ref"].split('/')[-1]
@property
def url(self: Handler) -> str:
"""returns url to github repository"""
return self.data["repository"]["html_url"]
| Add function to return URL | Add function to return URL
| Python | mit | neoliberal/css-updater | """handles webhook"""
from typing import Any, List, Dict
class Handler(object):
"""wraps webhook data"""
def __init__(self: Handler, data: Dict[str, Any]) -> None:
self.data: Dict[str, Any] = data
@property
def head_commit(self: Handler) -> Dict[str, Any]:
"""returns head_commit for convienent access"""
return self.data["head_commit"]
@property
def timestamp(self: Handler) -> str:
"""returns timestamp of the head commit"""
return self.head_commit["timestamp"]
@property
def changed_files(self: Handler) -> List[str]:
"""returns added or changed files"""
return self.head_commit["added"] + self.head_commit["modified"]
@property
def removed_files(self: Handler) -> List[str]:
"""returns removed files"""
return self.head_commit["removed"]
@property
def commits(self: Handler) -> List[Dict[str, Any]]:
"""returns commits"""
return self.data["commits"]
@property
def author(self: Handler) -> str:
"""returns author of head commit"""
return self.head_commit["author"]["username"]
@property
def branch(self: Handler) -> str:
"""returns the branch the commit was pushed to"""
return self.data["ref"].split('/')[-1]
@property
def url(self: Handler) -> str:
"""returns url to github repository"""
return self.data["repository"]["html_url"]
| Add function to return URL
"""handles webhook"""
from typing import Any, List, Dict
class Handler(object):
"""wraps webhook data"""
def __init__(self: Handler, data: Dict[str, Any]) -> None:
self.data: Dict[str, Any] = data
@property
def head_commit(self: Handler) -> Dict[str, Any]:
"""returns head_commit for convienent access"""
return self.data["head_commit"]
@property
def timestamp(self: Handler) -> str:
"""returns timestamp of the head commit"""
return self.head_commit["timestamp"]
@property
def changed_files(self: Handler) -> List[str]:
"""returns added or changed files"""
return self.head_commit["added"] + self.head_commit["modified"]
@property
def removed_files(self: Handler) -> List[str]:
"""returns removed files"""
return self.head_commit["removed"]
@property
def commits(self: Handler) -> List[Dict[str, Any]]:
"""returns commits"""
return self.data["commits"]
@property
def author(self: Handler) -> str:
"""returns author of head commit"""
return self.head_commit["author"]["username"]
@property
def branch(self: Handler) -> str:
"""returns the branch the commit was pushed to"""
return self.data["ref"].split('/')[-1]
|
cf4dc3f65049c23f4a20140cf5092f3c5a771a6a | administrator/tests/models/test_user.py | administrator/tests/models/test_user.py | from django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = '[email protected]'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = '[email protected]'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
| Add tests for User model | Add tests for User model
| Python | mit | Social-projects-Rivne/Rv-025.Python,Social-projects-Rivne/Rv-025.Python,Social-projects-Rivne/Rv-025.Python | from django.test import TestCase
from administrator.models import User, UserManager
class UserManagerTestCase(TestCase):
valid_email = '[email protected]'
valid_username = 'foobar'
valid_password = 'qwerty123'
def setUp(self):
self.user_manager = UserManager()
self.user_manager.model = User
def test_user_is_not_created_without_email(self):
with self.assertRaises(ValueError):
self.user_manager.create_user('', '', '')
def test_user_is_created(self):
user = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(user.is_superuser, False)
def test_superuser_is_not_created_with_is_superuser_field_set_to_false(self):
with self.assertRaises(ValueError):
self.user_manager.create_superuser(self.valid_email, self.valid_username, self.valid_password,
is_superuser=False)
def test_superuser_is_created(self):
superuser = self.user_manager.create_user(self.valid_email, self.valid_username, self.valid_password)
self.assertEqual(superuser.is_superuser, True)
class UserTestCase(TestCase):
valid_email = '[email protected]'
def setUp(self):
self.user = User()
self.user.email = self.valid_email
def test_user_status_can_be_set_to_active(self):
self.user.set_status_and_is_active(0)
self.assertEqual(self.user.status, 0)
self.assertEqual(self.user.is_active, True)
def test_user_status_can_be_set_to_inactive(self):
self.user.set_status_and_is_active(1)
self.assertEqual(self.user.status, 1)
self.assertEqual(self.user.is_active, False)
def test_user_has_email_as_his_short_name(self):
short_name = self.user.get_short_name()
self.assertEqual(short_name, self.user.email)
| Add tests for User model
|
|
8178bf161d39976405690d68d9ffe6c4dfd9d705 | web/view_athena/views.py | web/view_athena/views.py | from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match": { "title": "\"" + str(term) + "\"" }},
{ "match": { "text": "\"" + str(term) + "\"" }},
{ "match": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
| from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match_phrase": { "title": "\"" + str(term) + "\"" }},
{ "match_phrase": { "text": "\"" + str(term) + "\"" }},
{ "match_phrase": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
| Update 'search_term' functon. Add 'match_phrase' function. | Update 'search_term' functon. Add 'match_phrase' function.
| Python | mit | pattyvader/athena,pattyvader/athena,pattyvader/athena | from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match_phrase": { "title": "\"" + str(term) + "\"" }},
{ "match_phrase": { "text": "\"" + str(term) + "\"" }},
{ "match_phrase": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
| Update 'search_term' functon. Add 'match_phrase' function.
from django.shortcuts import render
from elasticsearch import Elasticsearch
from django.http import HttpResponse
def search(request):
if request.method == 'GET':
term = request.GET.get('term_search')
if term == None:
term = ""
response = search_term(term)
pages = []
for hit in response['hits']['hits']:
x = {'source': hit["_source"], 'highlight': hit["highlight"]["text"][0]}
pages.append(x)
return render(request, 'view_athena/index.html', {'pages':pages,'term_search':term})
def search_term(term):
es = Elasticsearch()
res = es.search(index="athena", body={"query": {"bool": {"should": [ { "match": { "title": "\"" + str(term) + "\"" }},
{ "match": { "text": "\"" + str(term) + "\"" }},
{ "match": { "description": "\"" + str(term) + "\"" }}]}},"highlight": {"fields" : {"text" : {}}}})
return res
|
8e5443c7f302957f18db116761f7e410e03eb1fb | app/main/errors.py | app/main/errors.py | # coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
| # coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
| Change app-level error handler to use api_client.error exceptions | Change app-level error handler to use api_client.error exceptions
| Python | mit | AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend | # coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
| Change app-level error handler to use api_client.error exceptions
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
8653159dcf6a078bc2193293b93457388e7799d3 | tests/tests.py | tests/tests.py | import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def output_is_not_truncated_when_not_ending_in_a_newline(shell):
result = shell.run(["echo", "-n", "hello"])
assert_equal("hello", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| Add test for output that doesn't end in a newline | Add test for output that doesn't end in a newline
| Python | bsd-2-clause | mwilliamson/spur.py | import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def output_is_not_truncated_when_not_ending_in_a_newline(shell):
result = shell.run(["echo", "-n", "hello"])
assert_equal("hello", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
| Add test for output that doesn't end in a newline
import functools
import os
from nose.tools import istest, assert_equal
import spur
def test(func):
@functools.wraps(func)
def run_test():
for shell in _create_shells():
yield func, shell
def _create_shells():
return [
spur.LocalShell(),
_create_ssh_shell()
]
def _create_ssh_shell():
return spur.SshShell(
hostname=os.environ.get("TEST_SSH_HOSTNAME", "127.0.0.1"),
username=os.environ["TEST_SSH_USERNAME"],
password=os.environ["TEST_SSH_PASSWORD"],
port=int(os.environ.get("TEST_SSH_PORT"))
)
return istest(run_test)
@test
def output_of_run_is_stored(shell):
result = shell.run(["echo", "hello"])
assert_equal("hello\n", result.output)
@test
def cwd_of_run_can_be_set(shell):
result = shell.run(["pwd"], cwd="/")
assert_equal("/\n", result.output)
@test
def environment_variables_can_be_added_for_run(shell):
result = shell.run(["sh", "-c", "echo $NAME"], update_env={"NAME": "Bob"})
assert_equal("Bob\n", result.output)
|
e79b92888fa9dfc57a274f3377cf425776ccb468 | food.py | food.py | # Food Class
class Food:
def __init__(self, x, y):
self.location = (x, y)
self.eaten = False
def getX(self):
return self.location[0]
def getY(self):
return self.location[1]
def setX(self, newX):
self.location[0] = newX
def setY(self, newY):
self.location[1] = newY
def isEaten(self):
return eaten
def setEaten(self, isItEaten):
self.eaten = isItEaten | # Food Class
class Food:
def __init__(self, x, y):
self.location = (x, y)
self.eaten = False
def getX(self):
return self.location[0]
def getY(self):
return self.location[1]
def setX(self, newX):
self.location[0] = newX
def setY(self, newY):
self.location[1] = newY
def isEaten(self):
return self.eaten
def setEaten(self, isItEaten):
self.eaten = isItEaten | Add self before eaten on isEaten for Food | Add self before eaten on isEaten for Food
| Python | mit | MEhlinger/rpi_pushbutton_games | # Food Class
class Food:
def __init__(self, x, y):
self.location = (x, y)
self.eaten = False
def getX(self):
return self.location[0]
def getY(self):
return self.location[1]
def setX(self, newX):
self.location[0] = newX
def setY(self, newY):
self.location[1] = newY
def isEaten(self):
return self.eaten
def setEaten(self, isItEaten):
self.eaten = isItEaten | Add self before eaten on isEaten for Food
# Food Class
class Food:
def __init__(self, x, y):
self.location = (x, y)
self.eaten = False
def getX(self):
return self.location[0]
def getY(self):
return self.location[1]
def setX(self, newX):
self.location[0] = newX
def setY(self, newY):
self.location[1] = newY
def isEaten(self):
return eaten
def setEaten(self, isItEaten):
self.eaten = isItEaten |
c69b9519c2984154dd15d31395d9590e00d689b5 | allauth/socialaccount/providers/trello/provider.py | allauth/socialaccount/providers/trello/provider.py | from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
| from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
data['scope'] = self.get_scope(request)
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
| Use 'scope' in TrelloProvider auth params. Allows overriding from django settings. | feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
| Python | mit | lukeburden/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,bittner/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,bittner/django-allauth,bittner/django-allauth,pennersr/django-allauth,lukeburden/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth | from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
data['scope'] = self.get_scope(request)
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
| feat(TrelloProvider): Use 'scope' in TrelloProvider auth params. Allows overriding from django settings.
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
class TrelloAccount(ProviderAccount):
def get_profile_url(self):
return None
def get_avatar_url(self):
return None
class TrelloProvider(OAuthProvider):
id = 'trello'
name = 'Trello'
account_class = TrelloAccount
def get_default_scope(self):
return ['read']
def extract_uid(self, data):
return data['id']
def get_auth_params(self, request, action):
data = super(TrelloProvider, self).get_auth_params(request, action)
app = self.get_app(request)
data['type'] = 'web_server'
data['name'] = app.name
# define here for how long it will be, this can be configured on the
# social app
data['expiration'] = 'never'
return data
provider_classes = [TrelloProvider]
|
11fdccbc4144c2b1e27d2b124596ce9122c365a2 | froide/problem/apps.py | froide/problem/apps.py | import json
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class ProblemConfig(AppConfig):
name = 'froide.problem'
verbose_name = _('Problems')
def ready(self):
from froide.account.export import registry
from . import signals # noqa
registry.register(export_user_data)
def export_user_data(user):
from .models import ProblemReport
problems = ProblemReport.objects.filter(
user=user
).select_related('message', 'message__request')
if not problems:
return
yield ('problem_reports.json', json.dumps([
{
'message': pb.message.get_absolute_domain_short_url(),
'timestamp': pb.timestamp.isoformat(),
'resolved': pb.resolved,
'kind': pb.kind,
'description': pb.description,
'resolution': pb.resolution,
'resolution_timestamp': (
pb.resolution_timestamp.isoformat()
if pb.resolution_timestamp else None
),
}
for pb in problems]).encode('utf-8')
)
| import json
from django.apps import AppConfig
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
class ProblemConfig(AppConfig):
name = 'froide.problem'
verbose_name = _('Problems')
def ready(self):
from froide.account.export import registry
from froide.account import account_merged
from froide.account.menu import menu_registry, MenuItem
from . import signals # noqa
registry.register(export_user_data)
account_merged.connect(merge_user)
def get_moderation_menu_item(request):
from froide.foirequest.auth import is_foirequest_moderator
if not (request.user.is_staff or is_foirequest_moderator(request)):
return None
return MenuItem(
section='after_settings', order=0,
url=reverse('problem-moderation'),
label=_('Moderation')
)
menu_registry.register(get_moderation_menu_item)
registry.register(export_user_data)
def merge_user(sender, old_user=None, new_user=None, **kwargs):
from .models import ProblemReport
ProblemReport.objects.filter(user=old_user).update(
user=new_user
)
ProblemReport.objects.filter(moderator=old_user).update(
moderator=new_user
)
def export_user_data(user):
from .models import ProblemReport
problems = ProblemReport.objects.filter(
user=user
).select_related('message', 'message__request')
if not problems:
return
yield ('problem_reports.json', json.dumps([
{
'message': pb.message.get_absolute_domain_short_url(),
'timestamp': pb.timestamp.isoformat(),
'resolved': pb.resolved,
'kind': pb.kind,
'description': pb.description,
'resolution': pb.resolution,
'resolution_timestamp': (
pb.resolution_timestamp.isoformat()
if pb.resolution_timestamp else None
),
}
for pb in problems]).encode('utf-8')
)
| Add user merging to problem, menu for moderation | Add user merging to problem, menu for moderation | Python | mit | fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide | import json
from django.apps import AppConfig
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
class ProblemConfig(AppConfig):
name = 'froide.problem'
verbose_name = _('Problems')
def ready(self):
from froide.account.export import registry
from froide.account import account_merged
from froide.account.menu import menu_registry, MenuItem
from . import signals # noqa
registry.register(export_user_data)
account_merged.connect(merge_user)
def get_moderation_menu_item(request):
from froide.foirequest.auth import is_foirequest_moderator
if not (request.user.is_staff or is_foirequest_moderator(request)):
return None
return MenuItem(
section='after_settings', order=0,
url=reverse('problem-moderation'),
label=_('Moderation')
)
menu_registry.register(get_moderation_menu_item)
registry.register(export_user_data)
def merge_user(sender, old_user=None, new_user=None, **kwargs):
from .models import ProblemReport
ProblemReport.objects.filter(user=old_user).update(
user=new_user
)
ProblemReport.objects.filter(moderator=old_user).update(
moderator=new_user
)
def export_user_data(user):
from .models import ProblemReport
problems = ProblemReport.objects.filter(
user=user
).select_related('message', 'message__request')
if not problems:
return
yield ('problem_reports.json', json.dumps([
{
'message': pb.message.get_absolute_domain_short_url(),
'timestamp': pb.timestamp.isoformat(),
'resolved': pb.resolved,
'kind': pb.kind,
'description': pb.description,
'resolution': pb.resolution,
'resolution_timestamp': (
pb.resolution_timestamp.isoformat()
if pb.resolution_timestamp else None
),
}
for pb in problems]).encode('utf-8')
)
| Add user merging to problem, menu for moderation
import json
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class ProblemConfig(AppConfig):
name = 'froide.problem'
verbose_name = _('Problems')
def ready(self):
from froide.account.export import registry
from . import signals # noqa
registry.register(export_user_data)
def export_user_data(user):
from .models import ProblemReport
problems = ProblemReport.objects.filter(
user=user
).select_related('message', 'message__request')
if not problems:
return
yield ('problem_reports.json', json.dumps([
{
'message': pb.message.get_absolute_domain_short_url(),
'timestamp': pb.timestamp.isoformat(),
'resolved': pb.resolved,
'kind': pb.kind,
'description': pb.description,
'resolution': pb.resolution,
'resolution_timestamp': (
pb.resolution_timestamp.isoformat()
if pb.resolution_timestamp else None
),
}
for pb in problems]).encode('utf-8')
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.