commit
stringlengths
40
40
old_file
stringlengths
5
117
new_file
stringlengths
5
117
old_contents
stringlengths
0
1.93k
new_contents
stringlengths
19
3.3k
subject
stringlengths
17
320
message
stringlengths
18
3.28k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
42.4k
completion
stringlengths
19
3.3k
prompt
stringlengths
21
3.65k
fc5714951bac61f17509eacf8ec2413e14a79ddc
txircd/modules/core/sno_oper.py
txircd/modules/core/sno_oper.py
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
Add a snomask for OPER attempts
Add a snomask for OPER attempts
Python
bsd-3-clause
ElementalAlchemist/txircd,Heufneutje/txircd
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
Add a snomask for OPER attempts
864f5be90fb31529f8ae9b0cf765fcf77504c0c5
comics/comics/mortenm.py
comics/comics/mortenm.py
# encoding: utf-8 from comics.aggregator.crawler import BaseComicCrawler from comics.meta.base import BaseComicMeta class ComicMeta(BaseComicMeta): name = 'Morten M (vg.no)' language = 'no' url = 'http://www.vg.no/spesial/mortenm/' start_date = '1978-01-01' history_capable_days = 120 schedule = 'Mo,Tu,We,Th,Fr,Sa,Su' time_zone = 1 rights = 'Morten M. Kristiansen' class ComicCrawler(BaseComicCrawler): def crawl(self): self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % { 'year': self.pub_date.strftime("%Y"), 'month': self.pub_date.strftime("%m"), 'day': self.pub_date.strftime("%d"), }
# encoding: utf-8 from comics.aggregator.crawler import BaseComicCrawler from comics.meta.base import BaseComicMeta class ComicMeta(BaseComicMeta): name = 'Morten M (vg.no)' language = 'no' url = 'http://www.vg.no/spesial/mortenm/' start_date = '1978-01-01' history_capable_days = 120 schedule = 'Mo,Tu,We,Th,Fr,Sa,Su' time_zone = 1 rights = 'Morten M. Kristiansen' class ComicCrawler(BaseComicCrawler): def crawl(self): self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % { 'year': self.pub_date.strftime("%Y"), 'month': self.pub_date.strftime("%m"), 'day': self.pub_date.strftime("%d"), }
Add missing chars in URL for 'Morten M' crawler
Add missing chars in URL for 'Morten M' crawler
Python
agpl-3.0
klette/comics,jodal/comics,datagutten/comics,datagutten/comics,klette/comics,datagutten/comics,jodal/comics,jodal/comics,klette/comics,datagutten/comics,jodal/comics
# encoding: utf-8 from comics.aggregator.crawler import BaseComicCrawler from comics.meta.base import BaseComicMeta class ComicMeta(BaseComicMeta): name = 'Morten M (vg.no)' language = 'no' url = 'http://www.vg.no/spesial/mortenm/' start_date = '1978-01-01' history_capable_days = 120 schedule = 'Mo,Tu,We,Th,Fr,Sa,Su' time_zone = 1 rights = 'Morten M. Kristiansen' class ComicCrawler(BaseComicCrawler): def crawl(self): self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)s-%(month)s-%(day)s.jpg' % { 'year': self.pub_date.strftime("%Y"), 'month': self.pub_date.strftime("%m"), 'day': self.pub_date.strftime("%d"), }
Add missing chars in URL for 'Morten M' crawler # encoding: utf-8 from comics.aggregator.crawler import BaseComicCrawler from comics.meta.base import BaseComicMeta class ComicMeta(BaseComicMeta): name = 'Morten M (vg.no)' language = 'no' url = 'http://www.vg.no/spesial/mortenm/' start_date = '1978-01-01' history_capable_days = 120 schedule = 'Mo,Tu,We,Th,Fr,Sa,Su' time_zone = 1 rights = 'Morten M. Kristiansen' class ComicCrawler(BaseComicCrawler): def crawl(self): self.url = 'http://static.vg.no/gfx/mortenm/output/%(year)s/%(month)s/%(year)-%(month)s-%(day).jpg' % { 'year': self.pub_date.strftime("%Y"), 'month': self.pub_date.strftime("%m"), 'day': self.pub_date.strftime("%d"), }
fb99c40fa0dcf59aaaf45a14b238a240a453bcaa
climlab/__init__.py
climlab/__init__.py
__version__ = '0.2.4' # This list defines all the modules that will be loaded if a user invokes # from climLab import * # totally out of date! #__all__ = ["constants", "thermo", "orbital_table", # "long_orbital_table", "insolation", "ebm", # "column", "convadj"] #from climlab import radiation # this should ensure that we can still import constants.py as climlab.constants from climlab.utils import constants from climlab.utils import thermo, legendre # some more useful shorcuts #from climlab.model import ebm, column from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal from climlab.domain import domain from climlab.domain.field import Field, global_mean from climlab.domain.axis import Axis from climlab.process.process import Process, process_like, get_axes from climlab.process.time_dependent_process import TimeDependentProcess from climlab.process.implicit import ImplicitProcess from climlab.process.diagnostic import DiagnosticProcess from climlab.process.energy_budget import EnergyBudget
__version__ = '0.2.5' # This list defines all the modules that will be loaded if a user invokes # from climLab import * # totally out of date! #__all__ = ["constants", "thermo", "orbital_table", # "long_orbital_table", "insolation", "ebm", # "column", "convadj"] #from climlab import radiation # this should ensure that we can still import constants.py as climlab.constants from climlab.utils import constants from climlab.utils import thermo, legendre # some more useful shorcuts #from climlab.model import ebm, column from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal from climlab.domain import domain from climlab.domain.field import Field, global_mean from climlab.domain.axis import Axis from climlab.process.process import Process, process_like, get_axes from climlab.process.time_dependent_process import TimeDependentProcess from climlab.process.implicit import ImplicitProcess from climlab.process.diagnostic import DiagnosticProcess from climlab.process.energy_budget import EnergyBudget
Increment version number to 0.2.5
Increment version number to 0.2.5
Python
mit
brian-rose/climlab,cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab,cjcardinale/climlab
__version__ = '0.2.5' # This list defines all the modules that will be loaded if a user invokes # from climLab import * # totally out of date! #__all__ = ["constants", "thermo", "orbital_table", # "long_orbital_table", "insolation", "ebm", # "column", "convadj"] #from climlab import radiation # this should ensure that we can still import constants.py as climlab.constants from climlab.utils import constants from climlab.utils import thermo, legendre # some more useful shorcuts #from climlab.model import ebm, column from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal from climlab.domain import domain from climlab.domain.field import Field, global_mean from climlab.domain.axis import Axis from climlab.process.process import Process, process_like, get_axes from climlab.process.time_dependent_process import TimeDependentProcess from climlab.process.implicit import ImplicitProcess from climlab.process.diagnostic import DiagnosticProcess from climlab.process.energy_budget import EnergyBudget
Increment version number to 0.2.5 __version__ = '0.2.4' # This list defines all the modules that will be loaded if a user invokes # from climLab import * # totally out of date! #__all__ = ["constants", "thermo", "orbital_table", # "long_orbital_table", "insolation", "ebm", # "column", "convadj"] #from climlab import radiation # this should ensure that we can still import constants.py as climlab.constants from climlab.utils import constants from climlab.utils import thermo, legendre # some more useful shorcuts #from climlab.model import ebm, column from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal from climlab.domain import domain from climlab.domain.field import Field, global_mean from climlab.domain.axis import Axis from climlab.process.process import Process, process_like, get_axes from climlab.process.time_dependent_process import TimeDependentProcess from climlab.process.implicit import ImplicitProcess from climlab.process.diagnostic import DiagnosticProcess from climlab.process.energy_budget import EnergyBudget
7a1254fa530b02d32f39e2210ec864f78dd9504a
groundstation/transfer/response_handlers/describeobjects.py
groundstation/transfer/response_handlers/describeobjects.py
from groundstation import logger log = logger.getLogger(__name__) def handle_describeobjects(self): if not self.payload: log.info("station %s sent empty DESCRIVEOBJECTS payload - new database?" % (str(self.origin))) return for obj in self.payload.split(chr(0)): if obj not in self.station or True: request = self._Request("FETCHOBJECT", payload=obj) self.stream.enqueue(request) else: log.debug("Not fetching already present object %s" % (str(obj)))
from groundstation import logger log = logger.getLogger(__name__) def handle_describeobjects(self): if not self.payload: log.info("station %s sent empty DESCRIVEOBJECTS payload - new database?" % (str(self.origin))) return for obj in self.payload.split(chr(0)): if obj not in self.station: request = self._Request("FETCHOBJECT", payload=obj) self.stream.enqueue(request) else: log.debug("Not fetching already present object %s" % (str(obj)))
Remove hook that snuck in
Remove hook that snuck in
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
from groundstation import logger log = logger.getLogger(__name__) def handle_describeobjects(self): if not self.payload: log.info("station %s sent empty DESCRIVEOBJECTS payload - new database?" % (str(self.origin))) return for obj in self.payload.split(chr(0)): if obj not in self.station: request = self._Request("FETCHOBJECT", payload=obj) self.stream.enqueue(request) else: log.debug("Not fetching already present object %s" % (str(obj)))
Remove hook that snuck in from groundstation import logger log = logger.getLogger(__name__) def handle_describeobjects(self): if not self.payload: log.info("station %s sent empty DESCRIVEOBJECTS payload - new database?" % (str(self.origin))) return for obj in self.payload.split(chr(0)): if obj not in self.station or True: request = self._Request("FETCHOBJECT", payload=obj) self.stream.enqueue(request) else: log.debug("Not fetching already present object %s" % (str(obj)))
7b14e846f08f69601372266ed82f91ba5bd306f6
devito/core/__init__.py
devito/core/__init__.py
""" The ``core`` Devito backend is simply a "shadow" of the ``base`` backend, common to all other backends. The ``core`` backend (and therefore the ``base`` backend as well) are used to run Devito on standard CPU architectures. """ from devito.dle import (BasicRewriter, AdvancedRewriter, AdvancedRewriterSafeMath, SpeculativeRewriter, init_dle) from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') core_configuration.add('autotuning', 'basic', ['none', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning', } add_sub_configuration(core_configuration, env_vars_mapper) # Initialize the DLE modes = {'basic': BasicRewriter, 'advanced': AdvancedRewriter, 'advanced-safemath': AdvancedRewriterSafeMath, 'speculative': SpeculativeRewriter} init_dle(modes) # The following used by backends.backendSelector from devito.function import (Constant, Function, TimeFunction, SparseFunction, # noqa SparseTimeFunction) from devito.grid import Grid # noqa from devito.core.operator import Operator # noqa from devito.types import CacheManager # noqa
""" The ``core`` Devito backend is simply a "shadow" of the ``base`` backend, common to all other backends. The ``core`` backend (and therefore the ``base`` backend as well) are used to run Devito on standard CPU architectures. """ from devito.dle import (BasicRewriter, AdvancedRewriter, AdvancedRewriterSafeMath, SpeculativeRewriter, init_dle) from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') core_configuration.add('autotuning', 'basic', ['off', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning', } add_sub_configuration(core_configuration, env_vars_mapper) # Initialize the DLE modes = {'basic': BasicRewriter, 'advanced': AdvancedRewriter, 'advanced-safemath': AdvancedRewriterSafeMath, 'speculative': SpeculativeRewriter} init_dle(modes) # The following used by backends.backendSelector from devito.function import (Constant, Function, TimeFunction, SparseFunction, # noqa SparseTimeFunction) from devito.grid import Grid # noqa from devito.core.operator import Operator # noqa from devito.types import CacheManager # noqa
Change autotuning 'none' to 'off'
core: Change autotuning 'none' to 'off'
Python
mit
opesci/devito,opesci/devito
""" The ``core`` Devito backend is simply a "shadow" of the ``base`` backend, common to all other backends. The ``core`` backend (and therefore the ``base`` backend as well) are used to run Devito on standard CPU architectures. """ from devito.dle import (BasicRewriter, AdvancedRewriter, AdvancedRewriterSafeMath, SpeculativeRewriter, init_dle) from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') core_configuration.add('autotuning', 'basic', ['off', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning', } add_sub_configuration(core_configuration, env_vars_mapper) # Initialize the DLE modes = {'basic': BasicRewriter, 'advanced': AdvancedRewriter, 'advanced-safemath': AdvancedRewriterSafeMath, 'speculative': SpeculativeRewriter} init_dle(modes) # The following used by backends.backendSelector from devito.function import (Constant, Function, TimeFunction, SparseFunction, # noqa SparseTimeFunction) from devito.grid import Grid # noqa from devito.core.operator import Operator # noqa from devito.types import CacheManager # noqa
core: Change autotuning 'none' to 'off' """ The ``core`` Devito backend is simply a "shadow" of the ``base`` backend, common to all other backends. The ``core`` backend (and therefore the ``base`` backend as well) are used to run Devito on standard CPU architectures. """ from devito.dle import (BasicRewriter, AdvancedRewriter, AdvancedRewriterSafeMath, SpeculativeRewriter, init_dle) from devito.parameters import Parameters, add_sub_configuration core_configuration = Parameters('core') core_configuration.add('autotuning', 'basic', ['none', 'basic', 'aggressive']) env_vars_mapper = { 'DEVITO_AUTOTUNING': 'autotuning', } add_sub_configuration(core_configuration, env_vars_mapper) # Initialize the DLE modes = {'basic': BasicRewriter, 'advanced': AdvancedRewriter, 'advanced-safemath': AdvancedRewriterSafeMath, 'speculative': SpeculativeRewriter} init_dle(modes) # The following used by backends.backendSelector from devito.function import (Constant, Function, TimeFunction, SparseFunction, # noqa SparseTimeFunction) from devito.grid import Grid # noqa from devito.core.operator import Operator # noqa from devito.types import CacheManager # noqa
3befcbaf3a78a46edc31cc1910fcd8e0a9381102
money_conversion/money.py
money_conversion/money.py
class Money(object): def __init__(self, amount, currency): self.amount = amount self.currency = currency.upper() def __repr__(self): return "%.2f %s" % (self.amount, self.currency)
from currency_rates import rates class Money(object): def __init__(self, amount, currency): self.amount = amount self.currency = currency.upper() def __repr__(self): return "%.2f %s" % (self.amount, self.currency) def to_currency(self, new_currency): new_currency = new_currency.split('_')[1].upper() amount = self.amount base_currency_rates = rates.get(self.currency) new_amount = amount * base_currency_rates.get(new_currency) return Money(new_amount, new_currency)
Add to_currency method in order to be able to convert to a new currency
Add to_currency method in order to be able to convert to a new currency
Python
mit
mdsrosa/money-conversion-py
from currency_rates import rates class Money(object): def __init__(self, amount, currency): self.amount = amount self.currency = currency.upper() def __repr__(self): return "%.2f %s" % (self.amount, self.currency) def to_currency(self, new_currency): new_currency = new_currency.split('_')[1].upper() amount = self.amount base_currency_rates = rates.get(self.currency) new_amount = amount * base_currency_rates.get(new_currency) return Money(new_amount, new_currency)
Add to_currency method in order to be able to convert to a new currency class Money(object): def __init__(self, amount, currency): self.amount = amount self.currency = currency.upper() def __repr__(self): return "%.2f %s" % (self.amount, self.currency)
6b9c9f98ce10db9e9a767c1ae81c0655c9d4d28c
runtests.py
runtests.py
#!/usr/bin/env python import os import sys import subprocess from importlib import import_module if __name__ == '__main__': # Test using django.test.runner.DiscoverRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' # We need to use subprocess.call instead of django's execute_from_command_line # because we can only setup django's settings once, and it's bad # practice to change them at runtime subprocess.call(['django-admin', 'test', '--nomigrations']) subprocess.call(['django-admin', 'test', '-n']) # Test using django_nose.NoseTestSuiteRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.nose_settings' for module in ('nose', 'django_nose',): try: import_module(module) except ImportError: print("Testing failed: could not import {0}, try pip installing it".format(module)) sys.exit(1) # Add pdb flag is this is only supported by nose subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '--nomigrations', '--pdb']) subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '-n', '--pdb'])
#!/usr/bin/env python import os import sys import subprocess from importlib import import_module if __name__ == '__main__': # Test using django.test.runner.DiscoverRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' # We need to use subprocess.call instead of django's execute_from_command_line # because we can only setup django's settings once, and it's bad # practice to change them at runtime subprocess.call(['django-admin', 'test', '--nomigrations']) subprocess.call(['django-admin', 'test', '-n']) # Test using django_nose.NoseTestSuiteRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.nose_settings' for module in ('nose', 'django_nose',): try: import_module(module) except ImportError: print("Testing failed: could not import {0}, try pip installing it".format(module)) sys.exit(1) # Add pdb flag as this is only supported by nose subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '--nomigrations', '--pdb']) subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '-n', '--pdb'])
Fix typo is -> as
Fix typo is -> as
Python
mit
henriquebastos/django-test-without-migrations,henriquebastos/django-test-without-migrations
#!/usr/bin/env python import os import sys import subprocess from importlib import import_module if __name__ == '__main__': # Test using django.test.runner.DiscoverRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' # We need to use subprocess.call instead of django's execute_from_command_line # because we can only setup django's settings once, and it's bad # practice to change them at runtime subprocess.call(['django-admin', 'test', '--nomigrations']) subprocess.call(['django-admin', 'test', '-n']) # Test using django_nose.NoseTestSuiteRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.nose_settings' for module in ('nose', 'django_nose',): try: import_module(module) except ImportError: print("Testing failed: could not import {0}, try pip installing it".format(module)) sys.exit(1) # Add pdb flag as this is only supported by nose subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '--nomigrations', '--pdb']) subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '-n', '--pdb'])
Fix typo is -> as #!/usr/bin/env python import os import sys import subprocess from importlib import import_module if __name__ == '__main__': # Test using django.test.runner.DiscoverRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' # We need to use subprocess.call instead of django's execute_from_command_line # because we can only setup django's settings once, and it's bad # practice to change them at runtime subprocess.call(['django-admin', 'test', '--nomigrations']) subprocess.call(['django-admin', 'test', '-n']) # Test using django_nose.NoseTestSuiteRunner os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.nose_settings' for module in ('nose', 'django_nose',): try: import_module(module) except ImportError: print("Testing failed: could not import {0}, try pip installing it".format(module)) sys.exit(1) # Add pdb flag is this is only supported by nose subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '--nomigrations', '--pdb']) subprocess.call(['django-admin', 'test', 'tests.myapp.nose_tests', '-n', '--pdb'])
1945a200cb8d517ce16eb039ecb4c3afc67acb9b
bin/checkpypi.py
bin/checkpypi.py
#!/usr/bin/env python # Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/ # Changelog: # - patch to python 3.6 # - include hidden releases import xmlrpc import pip pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi') for dist in pip.get_installed_distributions(): available = pypi.package_releases(dist.project_name, True) if not available: # Try to capitalize pkg name available = pypi.package_releases(dist.project_name.capitalize()) if not available: msg = 'no releases at pypi' elif available[0] != dist.version: msg = '{} available'.format(available[0]) else: msg = 'up to date' pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist) print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
Check latest version of Python modules in Pypi
Check latest version of Python modules in Pypi
Python
apache-2.0
verdimrc/linuxcfg,verdimrc/linuxcfg,verdimrc/linuxcfg
#!/usr/bin/env python # Adapted from http://code.activestate.com/recipes/577708-check-for-package-updates-on-pypi-works-best-in-pi/ # Changelog: # - patch to python 3.6 # - include hidden releases import xmlrpc import pip pypi = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi') for dist in pip.get_installed_distributions(): available = pypi.package_releases(dist.project_name, True) if not available: # Try to capitalize pkg name available = pypi.package_releases(dist.project_name.capitalize()) if not available: msg = 'no releases at pypi' elif available[0] != dist.version: msg = '{} available'.format(available[0]) else: msg = 'up to date' pkg_info = '{dist.project_name} {dist.version}'.format(dist=dist) print('{pkg_info:40} {msg}'.format(pkg_info=pkg_info, msg=msg))
Check latest version of Python modules in Pypi
e2f9c0c0e8b96e44c5410c242d0609ef36b5ee4e
tests/test_ghostscript.py
tests/test_ghostscript.py
import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(str(stderr), "") self.assertRegexpMatches(str(stdout), r'9\.\d\d')
import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], universal_newlines=True, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(stderr, "") self.assertRegexpMatches(stdout, r'9\.\d\d')
Make Popen.communicate return output as strings not bytes.
Make Popen.communicate return output as strings not bytes.
Python
mit
YPlan/treepoem
import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], universal_newlines=True, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(stderr, "") self.assertRegexpMatches(stdout, r'9\.\d\d')
Make Popen.communicate return output as strings not bytes. import subprocess import unittest class GhostscriptTest(unittest.TestCase): def test_installed(self): process = subprocess.Popen( ['gs', '--version'], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = process.communicate() self.assertEqual(process.returncode, 0) self.assertEqual(str(stderr), "") self.assertRegexpMatches(str(stdout), r'9\.\d\d')
abe11541d94a185456a79286bb9e5800c44305c7
vote.py
vote.py
#!/usr/bin/python import commands counter =0 while counter <=100 : #alocate new Elastic IP, and get the allocation id (stauts,output) = commands.getstatusoutput("aws ec2 allocate-address") allocation_id = output.split('\t') [0] #associate the allocated ip to indicated ec2 instance (status,output) = commands.getstatusoutput("aws ec2 associate-address --instance-id i-9afe2b90 --allocation-id "+allocation_id) #Sleep for 5 seconds (status,output) = commands.getstatusoutput("sleep 10") #release allocated Elastic IP (status,output) = commands.getstatusoutput("aws ec2 release-address --allocation-id " + allocation_id) counter +=1 print counter
Add one script to use AWS CLI to allocate/associate/release EIP automatically.
Add one script to use AWS CLI to allocate/associate/release EIP automatically.
Python
mit
yuecong/tools,yuecong/tools,yuecong/tools,yuecong/tools
#!/usr/bin/python import commands counter =0 while counter <=100 : #alocate new Elastic IP, and get the allocation id (stauts,output) = commands.getstatusoutput("aws ec2 allocate-address") allocation_id = output.split('\t') [0] #associate the allocated ip to indicated ec2 instance (status,output) = commands.getstatusoutput("aws ec2 associate-address --instance-id i-9afe2b90 --allocation-id "+allocation_id) #Sleep for 5 seconds (status,output) = commands.getstatusoutput("sleep 10") #release allocated Elastic IP (status,output) = commands.getstatusoutput("aws ec2 release-address --allocation-id " + allocation_id) counter +=1 print counter
Add one script to use AWS CLI to allocate/associate/release EIP automatically.
ff80cf04452c85ff0b93666feb867afa6e4d94f0
examples/apc2016/train_fcn8s.py
examples/apc2016/train_fcn8s.py
#!/usr/bin/env python import argparse import os import os.path as osp import chainer from chainer import cuda import fcn import datasets def main(): parser = argparse.ArgumentParser() parser.add_argument('--fcn16s', required=True) parser.add_argument('--gpu', type=int, default=0) parser.add_argument('--out', required=True) parser.add_argument('--dataset', default='v2', choices=['v1', 'v2']) args = parser.parse_args() fcn16s_path = args.fcn16s gpu = args.gpu out = args.out if args.dataset == 'v1': dataset_class = datasets.APC2016DatasetV1 else: dataset_class = datasets.APC2016DatasetV2 if not osp.exists(out): os.makedirs(out) # 1. dataset dataset_train = dataset_class('train') dataset_val = dataset_class('val') iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1) iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1, repeat=False, shuffle=False) # 2. model n_class = len(dataset_train.label_names) fcn16s = fcn.models.FCN16s(n_class=n_class) chainer.serializers.load_hdf5(fcn16s_path, fcn16s) model = fcn.models.FCN8s(n_class=n_class) model.train = True fcn.utils.copy_chainermodel(fcn16s, model) if gpu >= 0: cuda.get_device(gpu).use() model.to_gpu() # 3. optimizer optimizer = chainer.optimizers.Adam(alpha=1e-5) optimizer.setup(model) # training loop trainer = fcn.Trainer( device=gpu, model=model, optimizer=optimizer, iter_train=iter_train, iter_val=iter_val, out=out, ) trainer.train( max_iter=150000, interval_eval=5000, ) if __name__ == '__main__': main()
Add trainer for fcn8s on apc2016
Add trainer for fcn8s on apc2016
Python
mit
wkentaro/fcn
#!/usr/bin/env python import argparse import os import os.path as osp import chainer from chainer import cuda import fcn import datasets def main(): parser = argparse.ArgumentParser() parser.add_argument('--fcn16s', required=True) parser.add_argument('--gpu', type=int, default=0) parser.add_argument('--out', required=True) parser.add_argument('--dataset', default='v2', choices=['v1', 'v2']) args = parser.parse_args() fcn16s_path = args.fcn16s gpu = args.gpu out = args.out if args.dataset == 'v1': dataset_class = datasets.APC2016DatasetV1 else: dataset_class = datasets.APC2016DatasetV2 if not osp.exists(out): os.makedirs(out) # 1. dataset dataset_train = dataset_class('train') dataset_val = dataset_class('val') iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1) iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1, repeat=False, shuffle=False) # 2. model n_class = len(dataset_train.label_names) fcn16s = fcn.models.FCN16s(n_class=n_class) chainer.serializers.load_hdf5(fcn16s_path, fcn16s) model = fcn.models.FCN8s(n_class=n_class) model.train = True fcn.utils.copy_chainermodel(fcn16s, model) if gpu >= 0: cuda.get_device(gpu).use() model.to_gpu() # 3. optimizer optimizer = chainer.optimizers.Adam(alpha=1e-5) optimizer.setup(model) # training loop trainer = fcn.Trainer( device=gpu, model=model, optimizer=optimizer, iter_train=iter_train, iter_val=iter_val, out=out, ) trainer.train( max_iter=150000, interval_eval=5000, ) if __name__ == '__main__': main()
Add trainer for fcn8s on apc2016
4bcc0aae53def04e16e87499b1321256ff35a7c1
pyconll/__init__.py
pyconll/__init__.py
""" A library whose purpose is to provide a low level layer between the CoNLL format and python code. """ __all__ = ['exception', 'load', 'tree', 'unit', 'util'] from .load import load_from_string, load_from_file, load_from_url, \ iter_from_string, iter_from_file, iter_from_url
""" A library whose purpose is to provide a low level layer between the CoNLL format and python code. """ __all__ = ['conllable', 'exception', 'load', 'tree', 'unit', 'util'] from .load import load_from_string, load_from_file, load_from_url, \ iter_from_string, iter_from_file, iter_from_url
Add conllable to all list.
Add conllable to all list.
Python
mit
pyconll/pyconll,pyconll/pyconll
""" A library whose purpose is to provide a low level layer between the CoNLL format and python code. """ __all__ = ['conllable', 'exception', 'load', 'tree', 'unit', 'util'] from .load import load_from_string, load_from_file, load_from_url, \ iter_from_string, iter_from_file, iter_from_url
Add conllable to all list. """ A library whose purpose is to provide a low level layer between the CoNLL format and python code. """ __all__ = ['exception', 'load', 'tree', 'unit', 'util'] from .load import load_from_string, load_from_file, load_from_url, \ iter_from_string, iter_from_file, iter_from_url
4a1ea1545c6428f3695c001ef9960ea696d20a36
test_utilities/src/d1_test/instance_generator/sciobj.py
test_utilities/src/d1_test/instance_generator/sciobj.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import random import re import StringIO import d1_common.xml import d1_test.d1_test_case import d1_test.instance_generator.identifier import d1_test.instance_generator.system_metadata def generate_reproducible(client, pid=None, option_dict=None): """Generate science object bytes and a random, fully populated System Metadata object that are always the same for a given PID. The PID can be seen as a handle through which the same science object bytes and sysmeta can always be retrieved. """ option_dict = option_dict or {} pid = pid or d1_test.instance_generator.identifier.generate_pid() option_dict['identifier'] = pid with d1_test.d1_test_case.reproducible_random_context(pid): sciobj_str = generate_reproducible_sciobj_str(pid) sysmeta_pyxb = ( d1_test.instance_generator.system_metadata.generate_from_file( client, StringIO.StringIO(sciobj_str), option_dict ) ) return ( pid, d1_common.xml.get_value(sysmeta_pyxb, 'seriesId'), sciobj_str, sysmeta_pyxb ) def generate_reproducible_sciobj_str(pid): """Return a science object byte string that is always the same for a given PID """ undecorated_pid = re.sub(r'^<.*?>', '', pid) with d1_test.d1_test_case.reproducible_random_context(undecorated_pid): return ( 'These are the reproducible Science Object bytes for pid="{}". ' 'What follows is 100 to 200 random bytes: '. format(undecorated_pid.encode('utf-8')) + str( bytearray( random.getrandbits(8) for _ in range(random.randint(100, 200)) ) ) )
Add instance generator for complete reproducible objects
Add instance generator for complete reproducible objects
Python
apache-2.0
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
#!/usr/bin/env python # -*- coding: utf-8 -*- import random import re import StringIO import d1_common.xml import d1_test.d1_test_case import d1_test.instance_generator.identifier import d1_test.instance_generator.system_metadata def generate_reproducible(client, pid=None, option_dict=None): """Generate science object bytes and a random, fully populated System Metadata object that are always the same for a given PID. The PID can be seen as a handle through which the same science object bytes and sysmeta can always be retrieved. """ option_dict = option_dict or {} pid = pid or d1_test.instance_generator.identifier.generate_pid() option_dict['identifier'] = pid with d1_test.d1_test_case.reproducible_random_context(pid): sciobj_str = generate_reproducible_sciobj_str(pid) sysmeta_pyxb = ( d1_test.instance_generator.system_metadata.generate_from_file( client, StringIO.StringIO(sciobj_str), option_dict ) ) return ( pid, d1_common.xml.get_value(sysmeta_pyxb, 'seriesId'), sciobj_str, sysmeta_pyxb ) def generate_reproducible_sciobj_str(pid): """Return a science object byte string that is always the same for a given PID """ undecorated_pid = re.sub(r'^<.*?>', '', pid) with d1_test.d1_test_case.reproducible_random_context(undecorated_pid): return ( 'These are the reproducible Science Object bytes for pid="{}". ' 'What follows is 100 to 200 random bytes: '. format(undecorated_pid.encode('utf-8')) + str( bytearray( random.getrandbits(8) for _ in range(random.randint(100, 200)) ) ) )
Add instance generator for complete reproducible objects
52c2205804d8dc38447bca1ccbf5599e00cd1d7b
main.py
main.py
#!/usr/bin/env python3 import requests CONFIG_DIR = "config" class Bot: def __init__(self): self.config = Config(CONFIG_DIR) self.api = TelegramBotApi(self.config.get_auth_token()) def run(self): self.api.send_message(self.config.get_user_id(), "test") class TelegramBotApi: def __init__(self, auth_token): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" def send_message(self, chat_id, text): self.__send_request("sendMessage", chat_id=chat_id, text=text) def __send_request(self, command, **params): requests.get(self.base_url + command, params=params) class Config: def __init__(self, config_dir): self.config_dir = config_dir + "/" def get_auth_token(self): return self.__get_config_value("auth_token") def get_user_id(self): return self.__get_config_value("user_id") def __get_config_value(self, config_key): return open(self.config_dir + config_key).read().strip() if __name__ == "__main__": Bot().run()
#!/usr/bin/env python3 import requests CONFIG_DIR = "config" class Bot: def __init__(self): self.config = Config(CONFIG_DIR) self.api = TelegramBotApi(self.config.get_auth_token()) def run(self): self.api.send_message(self.config.get_admin_user_id(), "test") class TelegramBotApi: def __init__(self, auth_token): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" def send_message(self, chat_id, text): self.__send_request("sendMessage", chat_id=chat_id, text=text) def __send_request(self, command, **params): requests.get(self.base_url + command, params=params) class Config: def __init__(self, config_dir): self.config_dir = config_dir + "/" def get_auth_token(self): return self.__get_config_value("auth_token") def get_admin_user_id(self): return self.__get_config_value("admin_user_id") def __get_config_value(self, config_key): return open(self.config_dir + config_key).read().strip() if __name__ == "__main__": Bot().run()
Rename user_id config key to admin_user_id
Rename user_id config key to admin_user_id
Python
agpl-3.0
alvarogzp/telegram-bot,alvarogzp/telegram-bot
#!/usr/bin/env python3 import requests CONFIG_DIR = "config" class Bot: def __init__(self): self.config = Config(CONFIG_DIR) self.api = TelegramBotApi(self.config.get_auth_token()) def run(self): self.api.send_message(self.config.get_admin_user_id(), "test") class TelegramBotApi: def __init__(self, auth_token): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" def send_message(self, chat_id, text): self.__send_request("sendMessage", chat_id=chat_id, text=text) def __send_request(self, command, **params): requests.get(self.base_url + command, params=params) class Config: def __init__(self, config_dir): self.config_dir = config_dir + "/" def get_auth_token(self): return self.__get_config_value("auth_token") def get_admin_user_id(self): return self.__get_config_value("admin_user_id") def __get_config_value(self, config_key): return open(self.config_dir + config_key).read().strip() if __name__ == "__main__": Bot().run()
Rename user_id config key to admin_user_id #!/usr/bin/env python3 import requests CONFIG_DIR = "config" class Bot: def __init__(self): self.config = Config(CONFIG_DIR) self.api = TelegramBotApi(self.config.get_auth_token()) def run(self): self.api.send_message(self.config.get_user_id(), "test") class TelegramBotApi: def __init__(self, auth_token): self.base_url = "https://api.telegram.org/bot" + auth_token + "/" def send_message(self, chat_id, text): self.__send_request("sendMessage", chat_id=chat_id, text=text) def __send_request(self, command, **params): requests.get(self.base_url + command, params=params) class Config: def __init__(self, config_dir): self.config_dir = config_dir + "/" def get_auth_token(self): return self.__get_config_value("auth_token") def get_user_id(self): return self.__get_config_value("user_id") def __get_config_value(self, config_key): return open(self.config_dir + config_key).read().strip() if __name__ == "__main__": Bot().run()
ba3544fc18d5c5e827b1c1777b7811201545a8c5
boto/pyami/scriptbase.py
boto/pyami/scriptbase.py
import os, sys, time, traceback import smtplib from boto.utils import ShellCommand, get_ts import boto import boto.utils class ScriptBase: def __init__(self, config_file=None): self.instance_id = boto.config.get('Instance', 'instance-id', 'default') self.name = self.__class__.__name__ self.ts = get_ts() if config_file: boto.config.read(config_file) def notify(self, subject, body=''): boto.utils.notify(subject, body) def mkdir(self, path): if not os.path.isdir(path): try: os.mkdir(path) except: boto.log.error('Error creating directory: %s' % path) def umount(self, path): if os.path.ismount(path): self.run('umount %s' % path) def run(self, command, notify=True, exit_on_error=False): self.last_command = ShellCommand(command) if self.last_command.status != 0: boto.log.error(self.last_command.output) if notify: self.notify('Error encountered', self.last_command.output) if exit_on_error: sys.exit(-1) return self.last_command.status def main(self): pass
import os, sys, time, traceback import smtplib from boto.utils import ShellCommand, get_ts import boto import boto.utils class ScriptBase: def __init__(self, config_file=None): self.instance_id = boto.config.get('Instance', 'instance-id', 'default') self.name = self.__class__.__name__ self.ts = get_ts() if config_file: boto.config.read(config_file) def notify(self, subject, body=''): boto.utils.notify(subject, body) def mkdir(self, path): if not os.path.isdir(path): try: os.mkdir(path) except: boto.log.error('Error creating directory: %s' % path) def umount(self, path): if os.path.ismount(path): self.run('umount %s' % path) def run(self, command, notify=True, exit_on_error=False): self.last_command = ShellCommand(command) if self.last_command.status != 0: boto.log.error('Error running command: "%s". Output: "%s"' % (command, self.last_command.output)) if notify: self.notify('Error encountered', \ 'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \ (command, self.last_command.output)) if exit_on_error: sys.exit(-1) return self.last_command.status def main(self): pass
Add the command that failed to the error log and the error email to help debug problems where the error produces no output.
Add the command that failed to the error log and the error email to help debug problems where the error produces no output.
Python
mit
appneta/boto,dimdung/boto,j-carl/boto,ekalosak/boto,drbild/boto,acourtney2015/boto,bryx-inc/boto,darjus-amzn/boto,ddzialak/boto,alfredodeza/boto,vijaylbais/boto,clouddocx/boto,israelbenatar/boto,alex/boto,podhmo/boto,cyclecomputing/boto,shipci/boto,kouk/boto,jindongh/boto,felix-d/boto,Timus1712/boto,alex/boto,lochiiconnectivity/boto,Pretio/boto,dablak/boto,weebygames/boto,tpodowd/boto,jamesls/boto,disruptek/boto,dablak/boto,elainexmas/boto,jameslegg/boto,lochiiconnectivity/boto,varunarya10/boto,jamesls/boto,jameslegg/boto,bleib1dj/boto,nikhilraog/boto,pfhayes/boto,yangchaogit/boto,abridgett/boto,serviceagility/boto,tpodowd/boto,campenberger/boto,ryansb/boto,kouk/boto,ocadotechnology/boto,zzzirk/boto,FATruden/boto,revmischa/boto,weka-io/boto,rayluo/boto,shaunbrady/boto,TiVoMaker/boto,rosmo/boto,ric03uec/boto,vishnugonela/boto,lra/boto,drbild/boto,andresriancho/boto,garnaat/boto,awatts/boto,trademob/boto,andresriancho/boto,khagler/boto,nishigori/boto,ramitsurana/boto,SaranyaKarthikeyan/boto,nexusz99/boto,appneta/boto,zachmullen/boto,Asana/boto,rjschwei/boto,s0enke/boto,rjschwei/boto,stevenbrichards/boto,disruptek/boto,jotes/boto,janslow/boto
import os, sys, time, traceback import smtplib from boto.utils import ShellCommand, get_ts import boto import boto.utils class ScriptBase: def __init__(self, config_file=None): self.instance_id = boto.config.get('Instance', 'instance-id', 'default') self.name = self.__class__.__name__ self.ts = get_ts() if config_file: boto.config.read(config_file) def notify(self, subject, body=''): boto.utils.notify(subject, body) def mkdir(self, path): if not os.path.isdir(path): try: os.mkdir(path) except: boto.log.error('Error creating directory: %s' % path) def umount(self, path): if os.path.ismount(path): self.run('umount %s' % path) def run(self, command, notify=True, exit_on_error=False): self.last_command = ShellCommand(command) if self.last_command.status != 0: boto.log.error('Error running command: "%s". Output: "%s"' % (command, self.last_command.output)) if notify: self.notify('Error encountered', \ 'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \ (command, self.last_command.output)) if exit_on_error: sys.exit(-1) return self.last_command.status def main(self): pass
Add the command that failed to the error log and the error email to help debug problems where the error produces no output. import os, sys, time, traceback import smtplib from boto.utils import ShellCommand, get_ts import boto import boto.utils class ScriptBase: def __init__(self, config_file=None): self.instance_id = boto.config.get('Instance', 'instance-id', 'default') self.name = self.__class__.__name__ self.ts = get_ts() if config_file: boto.config.read(config_file) def notify(self, subject, body=''): boto.utils.notify(subject, body) def mkdir(self, path): if not os.path.isdir(path): try: os.mkdir(path) except: boto.log.error('Error creating directory: %s' % path) def umount(self, path): if os.path.ismount(path): self.run('umount %s' % path) def run(self, command, notify=True, exit_on_error=False): self.last_command = ShellCommand(command) if self.last_command.status != 0: boto.log.error(self.last_command.output) if notify: self.notify('Error encountered', self.last_command.output) if exit_on_error: sys.exit(-1) return self.last_command.status def main(self): pass
80d052df13653943bc2a2369cfbea4cf0e77ce12
django_tables/__init__.py
django_tables/__init__.py
__version__ = (0, 3, 'dev') from memory import * from models import * from columns import * from options import *
__version__ = (0, 2, 1) from memory import * from models import * from columns import * from options import *
Prepare to fix a new version.
Prepare to fix a new version.
Python
bsd-2-clause
PolicyStat/django-tables,miracle2k/django-tables,isolationism/mongoengine-tables
__version__ = (0, 2, 1) from memory import * from models import * from columns import * from options import *
Prepare to fix a new version. __version__ = (0, 3, 'dev') from memory import * from models import * from columns import * from options import *
5255b69390caa52d7eab096cf95d62bb54fd6dd2
Lib/test/test_unary.py
Lib/test/test_unary.py
"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2""" import unittest from test_support import run_unittest class UnaryOpTestCase(unittest.TestCase): def test_negative(self): self.assert_(-2 == 0 - 2) self.assert_(-0 == 0) self.assert_(--2 == 2) self.assert_(-2L == 0 - 2L) self.assert_(-2.0 == 0 - 2.0) self.assert_(-2j == 0 - 2j) def test_positive(self): self.assert_(+2 == 2) self.assert_(+0 == 0) self.assert_(++2 == 2) self.assert_(+2L == 2L) self.assert_(+2.0 == 2.0) self.assert_(+2j == 2j) def test_invert(self): self.assert_(-2 == 0 - 2) self.assert_(-0 == 0) self.assert_(--2 == 2) self.assert_(-2L == 0 - 2L) def test_overflow(self): self.assertRaises(OverflowError, eval, "+" + ("9" * 32)) self.assertRaises(OverflowError, eval, "-" + ("9" * 32)) self.assertRaises(OverflowError, eval, "~" + ("9" * 32)) def test_bad_types(self): for op in '+', '-', '~': self.assertRaises(TypeError, eval, op + "'a'") self.assertRaises(TypeError, eval, op + "u'a'") self.assertRaises(TypeError, eval, "~2j") self.assertRaises(TypeError, eval, "~2.0") run_unittest(UnaryOpTestCase)
Test the unary operator changes to the compiler
Test the unary operator changes to the compiler
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2""" import unittest from test_support import run_unittest class UnaryOpTestCase(unittest.TestCase): def test_negative(self): self.assert_(-2 == 0 - 2) self.assert_(-0 == 0) self.assert_(--2 == 2) self.assert_(-2L == 0 - 2L) self.assert_(-2.0 == 0 - 2.0) self.assert_(-2j == 0 - 2j) def test_positive(self): self.assert_(+2 == 2) self.assert_(+0 == 0) self.assert_(++2 == 2) self.assert_(+2L == 2L) self.assert_(+2.0 == 2.0) self.assert_(+2j == 2j) def test_invert(self): self.assert_(-2 == 0 - 2) self.assert_(-0 == 0) self.assert_(--2 == 2) self.assert_(-2L == 0 - 2L) def test_overflow(self): self.assertRaises(OverflowError, eval, "+" + ("9" * 32)) self.assertRaises(OverflowError, eval, "-" + ("9" * 32)) self.assertRaises(OverflowError, eval, "~" + ("9" * 32)) def test_bad_types(self): for op in '+', '-', '~': self.assertRaises(TypeError, eval, op + "'a'") self.assertRaises(TypeError, eval, op + "u'a'") self.assertRaises(TypeError, eval, "~2j") self.assertRaises(TypeError, eval, "~2.0") run_unittest(UnaryOpTestCase)
Test the unary operator changes to the compiler
6c20f8a2c722fca1b2f811d4f06ea5480ec6d945
telethon/events/messagedeleted.py
telethon/events/messagedeleted.py
from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) if peer is None: # If it's not a channel ID, then it was private/small group. # We can't know which one was exactly unless we logged all # messages, but we can indicate that it was maybe either of # both by setting them both to True. self.is_private = self.is_group = True self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
Set is private/group=True for messages deleted out of channels
Set is private/group=True for messages deleted out of channels
Python
mit
LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,expectocode/Telethon,LonamiWebs/Telethon
from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) if peer is None: # If it's not a channel ID, then it was private/small group. # We can't know which one was exactly unless we logged all # messages, but we can indicate that it was maybe either of # both by setting them both to True. self.is_private = self.is_group = True self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
Set is private/group=True for messages deleted out of channels from .common import EventBuilder, EventCommon, name_inner_event from ..tl import types @name_inner_event class MessageDeleted(EventBuilder): """ Event fired when one or more messages are deleted. """ def build(self, update): if isinstance(update, types.UpdateDeleteMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=None ) elif isinstance(update, types.UpdateDeleteChannelMessages): event = MessageDeleted.Event( deleted_ids=update.messages, peer=types.PeerChannel(update.channel_id) ) else: return event._entities = update._entities return self._filter_event(event) class Event(EventCommon): def __init__(self, deleted_ids, peer): super().__init__( chat_peer=peer, msg_id=(deleted_ids or [0])[0] ) self.deleted_id = None if not deleted_ids else deleted_ids[0] self.deleted_ids = deleted_ids
5d634511af87150cf1e1b57c52b2bb7136890eb4
twilix/cmd.py
twilix/cmd.py
import os import subprocess import errno def cmd_pwd(*args): return subprocess.check_output(['pwd']) def cmd_ls(*args): return subprocess.check_output(*args) def cmd_cd(*args): if path[0] == '~': path[0] = os.path.expanduser(path[0]) os.chdir(path[0]) return run_pwd() def cmd_mkdir(*args): try: if path[0][0] == '~': path[0] = os.path.expanduser(path[0]) os.makedirs(path[0]) return "Director {0} created".format(path) except OSError as exception: if exception.errno != errno.EEXIST: raise cmds = { 'pwd' : cmd_pwd, 'ls' : cmd_ls, 'cd' : cmd_cd, 'mkdir': cmd_mkdir } if __name__ == '__main__': a = cmd_mkdir("~/Test/ing") print a
import os import subprocess import errno def cmd_pwd(*args): return subprocess.check_output(['pwd']) def cmd_ls(*args): return subprocess.check_output(*args) def cmd_cd(*args): if args[0][1] == '~': args[0][1] = os.path.expanduser(args[0][1]) os.chdir(args[0][1]) return cmd_pwd() def cmd_mkdir(*args): try: if args[0][1][0] == '~': args[0][1] = os.path.expanduser(args[0][1]) os.makedirs(args[0][1]) return "Director {0} created".format(args[0][1]) except OSError as exception: if exception.errno != errno.EEXIST: raise def cmd_pipe(*args): p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE) p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE) output = p2.communicate()[0] return output cmds = { 'pwd' : cmd_pwd, 'ls' : cmd_ls, 'cd' : cmd_cd, 'mkdir': cmd_mkdir, 'pipe': cmd_pipe } if __name__ == '__main__': a = cmd_mkdir("~/ue/mhacks") print a
Add option tu run piped commands
Add option tu run piped commands
Python
mit
ueg1990/twilix,ueg1990/twilix
import os import subprocess import errno def cmd_pwd(*args): return subprocess.check_output(['pwd']) def cmd_ls(*args): return subprocess.check_output(*args) def cmd_cd(*args): if args[0][1] == '~': args[0][1] = os.path.expanduser(args[0][1]) os.chdir(args[0][1]) return cmd_pwd() def cmd_mkdir(*args): try: if args[0][1][0] == '~': args[0][1] = os.path.expanduser(args[0][1]) os.makedirs(args[0][1]) return "Director {0} created".format(args[0][1]) except OSError as exception: if exception.errno != errno.EEXIST: raise def cmd_pipe(*args): p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE) p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE) output = p2.communicate()[0] return output cmds = { 'pwd' : cmd_pwd, 'ls' : cmd_ls, 'cd' : cmd_cd, 'mkdir': cmd_mkdir, 'pipe': cmd_pipe } if __name__ == '__main__': a = cmd_mkdir("~/ue/mhacks") print a
Add option tu run piped commands import os import subprocess import errno def cmd_pwd(*args): return subprocess.check_output(['pwd']) def cmd_ls(*args): return subprocess.check_output(*args) def cmd_cd(*args): if path[0] == '~': path[0] = os.path.expanduser(path[0]) os.chdir(path[0]) return run_pwd() def cmd_mkdir(*args): try: if path[0][0] == '~': path[0] = os.path.expanduser(path[0]) os.makedirs(path[0]) return "Director {0} created".format(path) except OSError as exception: if exception.errno != errno.EEXIST: raise cmds = { 'pwd' : cmd_pwd, 'ls' : cmd_ls, 'cd' : cmd_cd, 'mkdir': cmd_mkdir } if __name__ == '__main__': a = cmd_mkdir("~/Test/ing") print a
69e081afd1d2b24d40a4992c6af4538aba86ca1c
brew_journal/brew_journal/urls.py
brew_journal/brew_journal/urls.py
from django.conf.urls import patterns, include, url from brew_journal.views import IndexView from rest_framework_nested import routers from authentication.views import AccountViewSet, LoginView router = routers.SimpleRouter() router.register(r'account', AccountViewSet) urlpatterns = patterns('', # Examples: # url(r'^$', 'brew_journal.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), # Default index view. Must be last to avoid accidentially catching other URLs url(r'^$', IndexView.as_view(), name='index'), )
from django.conf.urls import patterns, include, url from brew_journal.views import IndexView from rest_framework_nested import routers from authentication.views import AccountViewSet, LoginView router = routers.SimpleRouter() router.register(r'account', AccountViewSet) urlpatterns = patterns('', # Examples: # url(r'^$', 'brew_journal.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), # Default index view. Must be last to avoid accidentially catching other URLs url(r'^.*$', IndexView.as_view(), name='index'), )
Reset the base url matching regex to correctly reroute to the home page when provided an unknown url
Reset the base url matching regex to correctly reroute to the home page when provided an unknown url
Python
apache-2.0
moonboy13/brew-journal,moonboy13/brew-journal,moonboy13/brew-journal
from django.conf.urls import patterns, include, url from brew_journal.views import IndexView from rest_framework_nested import routers from authentication.views import AccountViewSet, LoginView router = routers.SimpleRouter() router.register(r'account', AccountViewSet) urlpatterns = patterns('', # Examples: # url(r'^$', 'brew_journal.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), # Default index view. Must be last to avoid accidentially catching other URLs url(r'^.*$', IndexView.as_view(), name='index'), )
Reset the base url matching regex to correctly reroute to the home page when provided an unknown url from django.conf.urls import patterns, include, url from brew_journal.views import IndexView from rest_framework_nested import routers from authentication.views import AccountViewSet, LoginView router = routers.SimpleRouter() router.register(r'account', AccountViewSet) urlpatterns = patterns('', # Examples: # url(r'^$', 'brew_journal.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), # Default index view. Must be last to avoid accidentially catching other URLs url(r'^$', IndexView.as_view(), name='index'), )
3e842228beba066000eac536635e7e9d4d87c8e2
instruments/Instrument.py
instruments/Instrument.py
from traits.api import HasTraits import json class Instrument(HasTraits): """ Main super-class for all instruments. """ def get_settings(self): return self.__getstate__() def set_settings(self, settings): for key,value in settings.items(): setattr(self, key, value)
from traits.api import HasTraits, Bool import json class Instrument(HasTraits): """ Main super-class for all instruments. """ enabled = Bool(True, desc='Whether the unit is used/enabled.') def get_settings(self): return self.__getstate__() def set_settings(self, settings): for key,value in settings.items(): setattr(self, key, value)
Add enabled to top-level instrument class.
Add enabled to top-level instrument class.
Python
apache-2.0
Plourde-Research-Lab/PyQLab,rmcgurrin/PyQLab,calebjordan/PyQLab,BBN-Q/PyQLab
from traits.api import HasTraits, Bool import json class Instrument(HasTraits): """ Main super-class for all instruments. """ enabled = Bool(True, desc='Whether the unit is used/enabled.') def get_settings(self): return self.__getstate__() def set_settings(self, settings): for key,value in settings.items(): setattr(self, key, value)
Add enabled to top-level instrument class. from traits.api import HasTraits import json class Instrument(HasTraits): """ Main super-class for all instruments. """ def get_settings(self): return self.__getstate__() def set_settings(self, settings): for key,value in settings.items(): setattr(self, key, value)
e9d5143b8751bee1d74a5cfebeca848225426d68
tests/test_special_tokens.py
tests/test_special_tokens.py
from tests import TestCase class SignedIntegerTokenTestCase(TestCase): def setup_method(self, method): TestCase.setup_method(self, method) self.session.add( self.TextItem(name=u'index', content=u'some 12-14') ) self.session.commit() class TestSignedIntegersWithRemoveHyphens(SignedIntegerTokenTestCase): remove_hyphens = True def test_with_hyphen_search_term(self): assert self.TextItemQuery( self.TextItem, self.session ).search('12-14').count() class TestSignedIntegersWithoutRemoveHyphens(SignedIntegerTokenTestCase): remove_hyphens = False def test_with_hyphen_search_term(self): assert not self.TextItemQuery( self.TextItem, self.session ).search('12-14').count()
Add test case for special tokens
Add test case for special tokens
Python
bsd-3-clause
cristen/sqlalchemy-searchable
from tests import TestCase class SignedIntegerTokenTestCase(TestCase): def setup_method(self, method): TestCase.setup_method(self, method) self.session.add( self.TextItem(name=u'index', content=u'some 12-14') ) self.session.commit() class TestSignedIntegersWithRemoveHyphens(SignedIntegerTokenTestCase): remove_hyphens = True def test_with_hyphen_search_term(self): assert self.TextItemQuery( self.TextItem, self.session ).search('12-14').count() class TestSignedIntegersWithoutRemoveHyphens(SignedIntegerTokenTestCase): remove_hyphens = False def test_with_hyphen_search_term(self): assert not self.TextItemQuery( self.TextItem, self.session ).search('12-14').count()
Add test case for special tokens
233d52247d89bb39ccc9ada3a591296baae9cff5
notification/backends/web.py
notification/backends/web.py
from notification.backends.base import NotificationBackend class WebBackend(NotificationBackend): slug = u'web' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def send(self, sender, recipient, notice_type, context, on_site=False, *args, **kwargs): """Always "sends" (i.e. stores to the database), setting on_site accordingly. """ # TODO can't do this at the top or we get circular imports from notification.models import Notice Notice.objects.create(recipient=recipient, message=self.format_message(notice_type.label, 'notice.html', context), notice_type=notice_type, on_site=on_site, sender=sender) return True
from notification.backends.base import NotificationBackend class WebBackend(NotificationBackend): slug = u'web' display_name = u'Web' formats = ['short.txt', 'full.txt'] def send(self, sender, recipient, notice_type, context, on_site=False, *args, **kwargs): """Always "sends" (i.e. stores to the database), setting on_site accordingly. """ # TODO can't do this at the top or we get circular imports from notification.models import Notice Notice.objects.create(recipient=recipient, message=self.format_message(notice_type.label, 'notice.html', context), notice_type=notice_type, on_site=on_site, sender=sender) return True
Use correct slug for Web backend.
Use correct slug for Web backend.
Python
mit
theatlantic/django-notification,theatlantic/django-notification
from notification.backends.base import NotificationBackend class WebBackend(NotificationBackend): slug = u'web' display_name = u'Web' formats = ['short.txt', 'full.txt'] def send(self, sender, recipient, notice_type, context, on_site=False, *args, **kwargs): """Always "sends" (i.e. stores to the database), setting on_site accordingly. """ # TODO can't do this at the top or we get circular imports from notification.models import Notice Notice.objects.create(recipient=recipient, message=self.format_message(notice_type.label, 'notice.html', context), notice_type=notice_type, on_site=on_site, sender=sender) return True
Use correct slug for Web backend. from notification.backends.base import NotificationBackend class WebBackend(NotificationBackend): slug = u'web' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def send(self, sender, recipient, notice_type, context, on_site=False, *args, **kwargs): """Always "sends" (i.e. stores to the database), setting on_site accordingly. """ # TODO can't do this at the top or we get circular imports from notification.models import Notice Notice.objects.create(recipient=recipient, message=self.format_message(notice_type.label, 'notice.html', context), notice_type=notice_type, on_site=on_site, sender=sender) return True
12dab867a97241e27eeca44b3919113d379c1850
setup.py
setup.py
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='porick', version='0.1', description='', author='', author_email='', url='', install_requires=[ "Pylons>=1.0.1rc1", "SQLAlchemy==0.6.8", ], setup_requires=["PasteScript>=1.6.3"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'porick': ['i18n/*/LC_MESSAGES/*.mo']}, #message_extractors={'porick': [ # ('**.py', 'python', None), # ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), # ('public/**', 'ignore', None)]}, zip_safe=False, paster_plugins=['PasteScript', 'Pylons'], entry_points=""" [paste.app_factory] main = porick.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller """, )
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='porick', version='0.1', description='', author='', author_email='', url='', install_requires=[ "Pylons>=1.0.1rc1", "SQLAlchemy==0.7.7", ], setup_requires=["PasteScript>=1.6.3"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'porick': ['i18n/*/LC_MESSAGES/*.mo']}, #message_extractors={'porick': [ # ('**.py', 'python', None), # ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), # ('public/**', 'ignore', None)]}, zip_safe=False, paster_plugins=['PasteScript', 'Pylons'], entry_points=""" [paste.app_factory] main = porick.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller """, )
Use sqlalchemy 0.7.7 instead of 0.6
Use sqlalchemy 0.7.7 instead of 0.6
Python
apache-2.0
kopf/porick,kopf/porick,kopf/porick
try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='porick', version='0.1', description='', author='', author_email='', url='', install_requires=[ "Pylons>=1.0.1rc1", "SQLAlchemy==0.7.7", ], setup_requires=["PasteScript>=1.6.3"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'porick': ['i18n/*/LC_MESSAGES/*.mo']}, #message_extractors={'porick': [ # ('**.py', 'python', None), # ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), # ('public/**', 'ignore', None)]}, zip_safe=False, paster_plugins=['PasteScript', 'Pylons'], entry_points=""" [paste.app_factory] main = porick.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller """, )
Use sqlalchemy 0.7.7 instead of 0.6 try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages setup( name='porick', version='0.1', description='', author='', author_email='', url='', install_requires=[ "Pylons>=1.0.1rc1", "SQLAlchemy==0.6.8", ], setup_requires=["PasteScript>=1.6.3"], packages=find_packages(exclude=['ez_setup']), include_package_data=True, test_suite='nose.collector', package_data={'porick': ['i18n/*/LC_MESSAGES/*.mo']}, #message_extractors={'porick': [ # ('**.py', 'python', None), # ('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}), # ('public/**', 'ignore', None)]}, zip_safe=False, paster_plugins=['PasteScript', 'Pylons'], entry_points=""" [paste.app_factory] main = porick.config.middleware:make_app [paste.app_install] main = pylons.util:PylonsInstaller """, )
4ba1ea670b208a734aa3f36c694b20e4bb5d6dda
alembic/versions/14ef1fe33bd_add_checkpoint_evaluation_table.py
alembic/versions/14ef1fe33bd_add_checkpoint_evaluation_table.py
"""Add checkpoint_evaluation table Revision ID: 14ef1fe33bd Revises: 3d0a468b38f Create Date: 2015-07-10 02:35:06.655075 """ # revision identifiers, used by Alembic. revision = '14ef1fe33bd' down_revision = '3d0a468b38f' branch_labels = None depends_on = None from alembic import op from bnd.models import CheckpointEvaluation def upgrade(): # op.create_table(CheckpointEvaluation.__table__) # FIXME: Temporary workaround from bnd import create_app from bnd.models import db app = create_app(__name__) with app.app_context(): db.create_all() def downgrade(): op.drop_table(CheckpointEvaluation.__tablename__)
Write an Alembic migration script
Write an Alembic migration script
Python
mit
suminb/bnd,suminb/bnd,suminb/bnd
"""Add checkpoint_evaluation table Revision ID: 14ef1fe33bd Revises: 3d0a468b38f Create Date: 2015-07-10 02:35:06.655075 """ # revision identifiers, used by Alembic. revision = '14ef1fe33bd' down_revision = '3d0a468b38f' branch_labels = None depends_on = None from alembic import op from bnd.models import CheckpointEvaluation def upgrade(): # op.create_table(CheckpointEvaluation.__table__) # FIXME: Temporary workaround from bnd import create_app from bnd.models import db app = create_app(__name__) with app.app_context(): db.create_all() def downgrade(): op.drop_table(CheckpointEvaluation.__tablename__)
Write an Alembic migration script
02363de7bdd7a069243da09248816f3caf38b2e6
scripts/get-month.py
scripts/get-month.py
#!/usr/bin/env python import pandas as pd import pdfplumber import requests import datetime import re from io import BytesIO def parse_date(pdf): text = pdf.pages[0].extract_text(x_tolerance=5) date_pat = r"UPDATED:\s+As of (.+)\n" updated_date = re.search(date_pat, text).group(1) d = datetime.datetime.strptime(updated_date, "%B %d, %Y") return d if __name__ == "__main__": URL = "https://www.fbi.gov/about-us/cjis/nics/reports/active_records_in_the_nics-index.pdf" raw = requests.get(URL).content pdf = pdfplumber.load(BytesIO(raw)) d = parse_date(pdf) print(d.strftime("%Y-%m"))
#!/usr/bin/env python import pandas as pd import pdfplumber import requests import datetime import re from io import BytesIO def parse_date(pdf): text = pdf.pages[0].extract_text(x_tolerance=5) date_pat = r"UPDATED:\s+As of (.+)\n" updated_date = re.search(date_pat, text).group(1) d = datetime.datetime.strptime(updated_date, "%B %d, %Y") return d if __name__ == "__main__": URL = "https://www.fbi.gov/file-repository/active_records_in_the_nics-index.pdf" raw = requests.get(URL).content pdf = pdfplumber.load(BytesIO(raw)) d = parse_date(pdf) print(d.strftime("%Y-%m"))
Update "Active Records" PDF URL
Update "Active Records" PDF URL
Python
mit
BuzzFeedNews/nics-firearm-background-checks
#!/usr/bin/env python import pandas as pd import pdfplumber import requests import datetime import re from io import BytesIO def parse_date(pdf): text = pdf.pages[0].extract_text(x_tolerance=5) date_pat = r"UPDATED:\s+As of (.+)\n" updated_date = re.search(date_pat, text).group(1) d = datetime.datetime.strptime(updated_date, "%B %d, %Y") return d if __name__ == "__main__": URL = "https://www.fbi.gov/file-repository/active_records_in_the_nics-index.pdf" raw = requests.get(URL).content pdf = pdfplumber.load(BytesIO(raw)) d = parse_date(pdf) print(d.strftime("%Y-%m"))
Update "Active Records" PDF URL #!/usr/bin/env python import pandas as pd import pdfplumber import requests import datetime import re from io import BytesIO def parse_date(pdf): text = pdf.pages[0].extract_text(x_tolerance=5) date_pat = r"UPDATED:\s+As of (.+)\n" updated_date = re.search(date_pat, text).group(1) d = datetime.datetime.strptime(updated_date, "%B %d, %Y") return d if __name__ == "__main__": URL = "https://www.fbi.gov/about-us/cjis/nics/reports/active_records_in_the_nics-index.pdf" raw = requests.get(URL).content pdf = pdfplumber.load(BytesIO(raw)) d = parse_date(pdf) print(d.strftime("%Y-%m"))
d30c3b9c574566d9c69fc1322b6a2dfec3a6eb67
opps/core/admin/article.py
opps/core/admin/article.py
# -*- coding: utf-8 -*- from django.contrib import admin from opps.core.models import Post class PostAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("title",)} def save_model(self, request, obj, form, change): if not obj.user: obj.user = request.user obj.save() admin.site.register(Post, PostAdmin)
# -*- coding: utf-8 -*- from django.contrib import admin from django import forms from opps.core.models import Post from redactor.widgets import RedactorEditor class PostAdminForm(forms.ModelForm): class Meta: model = Post widgets = {'content': RedactorEditor(),} class PostAdmin(admin.ModelAdmin): form = PostAdminForm prepopulated_fields = {"slug": ("title",)} def save_model(self, request, obj, form, change): if not obj.user: obj.user = request.user obj.save() admin.site.register(Post, PostAdmin)
Create post admin form, custom content field add texteditor
Create post admin form, custom content field add texteditor
Python
mit
YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps
# -*- coding: utf-8 -*- from django.contrib import admin from django import forms from opps.core.models import Post from redactor.widgets import RedactorEditor class PostAdminForm(forms.ModelForm): class Meta: model = Post widgets = {'content': RedactorEditor(),} class PostAdmin(admin.ModelAdmin): form = PostAdminForm prepopulated_fields = {"slug": ("title",)} def save_model(self, request, obj, form, change): if not obj.user: obj.user = request.user obj.save() admin.site.register(Post, PostAdmin)
Create post admin form, custom content field add texteditor # -*- coding: utf-8 -*- from django.contrib import admin from opps.core.models import Post class PostAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("title",)} def save_model(self, request, obj, form, change): if not obj.user: obj.user = request.user obj.save() admin.site.register(Post, PostAdmin)
fd77e3211e2298457b9778f409c56c70a36bf3db
farmers_api/farmers/views.py
farmers_api/farmers/views.py
from rest_framework import viewsets from .models import Farmer from .serializers import FarmerSerializer class FarmerViewSet(viewsets.ReadOnlyModelViewSet): queryset = Farmer.objects.all() serializer_class = FarmerSerializer filter_fields = ('town',)
from rest_framework import viewsets, permissions from .models import Farmer from .serializers import FarmerSerializer class FarmerViewSet(viewsets.ModelViewSet): queryset = Farmer.objects.all() serializer_class = FarmerSerializer filter_fields = ('town',) permissions = permissions.DjangoModelPermissionsOrAnonReadOnly
Add permission settings on FarmerViewSet
Add permission settings on FarmerViewSet
Python
bsd-2-clause
tm-kn/farmers-api
from rest_framework import viewsets, permissions from .models import Farmer from .serializers import FarmerSerializer class FarmerViewSet(viewsets.ModelViewSet): queryset = Farmer.objects.all() serializer_class = FarmerSerializer filter_fields = ('town',) permissions = permissions.DjangoModelPermissionsOrAnonReadOnly
Add permission settings on FarmerViewSet from rest_framework import viewsets from .models import Farmer from .serializers import FarmerSerializer class FarmerViewSet(viewsets.ReadOnlyModelViewSet): queryset = Farmer.objects.all() serializer_class = FarmerSerializer filter_fields = ('town',)
2f4483440a98f34b650ea09a75f6dc941548f8b2
zeus/vcs/db.py
zeus/vcs/db.py
import asyncpg class Database: def __init__(self, host: str, port: int, user: str, password: str, database: str): self.host = host self.port = port self.user = user self.password = password self.database = database self._conn = None async def connect(self): self._conn = await asyncpg.connect( host=self.host, port=self.port, user=self.user, password=self.password, database=self.database, ) return self._conn async def close(self): if self._conn: await self._conn.close() self._conn = None async def fetch(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.fetch(*args, **kwargs) async def execute(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.execute(*args, **kwargs) async def transaction(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return conn.transaction(*args, **kwargs)
import asyncpg class Database: def __init__(self, host: str, port: int, user: str, password: str, database: str): self.host = host self.port = port self.user = user self.password = password self.database = database self._conn = None async def connect(self): self._conn = await asyncpg.connect( host=self.host, port=self.port, user=self.user, password=self.password, database=self.database, # https://github.com/MagicStack/asyncpg/issues/76 # we want to rely on pgbouncer max_cached_statement_lifetime=0, ) return self._conn async def close(self): if self._conn: await self._conn.close() self._conn = None async def fetch(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.fetch(*args, **kwargs) async def execute(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.execute(*args, **kwargs) async def transaction(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return conn.transaction(*args, **kwargs)
Disable asyncpg prepared statement cache
Disable asyncpg prepared statement cache
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
import asyncpg class Database: def __init__(self, host: str, port: int, user: str, password: str, database: str): self.host = host self.port = port self.user = user self.password = password self.database = database self._conn = None async def connect(self): self._conn = await asyncpg.connect( host=self.host, port=self.port, user=self.user, password=self.password, database=self.database, # https://github.com/MagicStack/asyncpg/issues/76 # we want to rely on pgbouncer max_cached_statement_lifetime=0, ) return self._conn async def close(self): if self._conn: await self._conn.close() self._conn = None async def fetch(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.fetch(*args, **kwargs) async def execute(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.execute(*args, **kwargs) async def transaction(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return conn.transaction(*args, **kwargs)
Disable asyncpg prepared statement cache import asyncpg class Database: def __init__(self, host: str, port: int, user: str, password: str, database: str): self.host = host self.port = port self.user = user self.password = password self.database = database self._conn = None async def connect(self): self._conn = await asyncpg.connect( host=self.host, port=self.port, user=self.user, password=self.password, database=self.database, ) return self._conn async def close(self): if self._conn: await self._conn.close() self._conn = None async def fetch(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.fetch(*args, **kwargs) async def execute(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return await conn.execute(*args, **kwargs) async def transaction(self, *args, **kwargs): if not self._conn: conn = await self.connect() else: conn = self._conn return conn.transaction(*args, **kwargs)
bae805afa67bbced9a968b12c38e1b22e05f8f61
Findminn.py
Findminn.py
__author__ = "Claytonbat" from random import randrange import time def findMin(alist): overallmin = alist[0] for i in alist: issmallest = True for j in alist : if i > j: issmallest = False if issmallest: overallmin = i return overallmin def findMin1(alist): temp_min = alist[0] for i in alist: if temp_min > i: temp_min = i return temp_min #print(findMin([5,4,3,2,1,0])) for listSize in range(1000,10001,1000): alist = [randrange(100000) for x in range(listSize)] start = time.time() print (findMin(alist)) end = time.time() start1 = time.time() print (findMin1(alist)) end1 = time.time() print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list.
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list. Add author and email.
Python
mit
mcsoo/Exercises
__author__ = "Claytonbat" from random import randrange import time def findMin(alist): overallmin = alist[0] for i in alist: issmallest = True for j in alist : if i > j: issmallest = False if issmallest: overallmin = i return overallmin def findMin1(alist): temp_min = alist[0] for i in alist: if temp_min > i: temp_min = i return temp_min #print(findMin([5,4,3,2,1,0])) for listSize in range(1000,10001,1000): alist = [randrange(100000) for x in range(listSize)] start = time.time() print (findMin(alist)) end = time.time() start1 = time.time() print (findMin1(alist)) end1 = time.time() print ("Size: %d time O(n): %f, time O(n2): %f" %(listSize, (end1 - start1), (end - start)))
Add FindMinn.py file, which contains O(n) and O(n**2) methods in finding the minimum number in a random list. Add author and email.
6bec22cd51288c94dff40cf0c973b975538040d5
tests/integration/minion/test_timeout.py
tests/integration/minion/test_timeout.py
# -*- coding: utf-8 -*- ''' Tests for various minion timeouts ''' # Import Python libs from __future__ import absolute_import import os import sys import salt.utils.platform # Import Salt Testing libs from tests.support.case import ShellCase class MinionTimeoutTestCase(ShellCase): ''' Test minion timing functions ''' def test_long_running_job(self): ''' Test that we will wait longer than the job timeout for a minion to return. ''' # Launch the command sleep_length = 30 if salt.utils.platform.is_windows(): popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))} else: popen_kwargs = None ret = self.run_salt( 'minion test.sleep {0}'.format(sleep_length), timeout=45, catch_stderr=True, popen_kwargs=popen_kwargs, ) self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion' ' may have returned error: {0}'.format(ret)) self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret)) self.assertTrue('True' in ret[0][1], 'Minion did not return True after ' '{0} seconds. ret={1}'.format(sleep_length, ret))
# -*- coding: utf-8 -*- ''' Tests for various minion timeouts ''' # Import Python libs from __future__ import absolute_import import os import sys import salt.utils.platform # Import Salt Testing libs from tests.support.case import ShellCase class MinionTimeoutTestCase(ShellCase): ''' Test minion timing functions ''' def test_long_running_job(self): ''' Test that we will wait longer than the job timeout for a minion to return. ''' # Launch the command sleep_length = 30 if salt.utils.platform.is_windows(): popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))} else: popen_kwargs = None ret = self.run_salt( 'minion test.sleep {0}'.format(sleep_length), timeout=90, catch_stderr=True, popen_kwargs=popen_kwargs, ) self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion' ' may have returned error: {0}'.format(ret)) self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret)) self.assertTrue('True' in ret[0][1], 'Minion did not return True after ' '{0} seconds. ret={1}'.format(sleep_length, ret))
Increase timeout for test_long_running_job test
Increase timeout for test_long_running_job test
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# -*- coding: utf-8 -*- ''' Tests for various minion timeouts ''' # Import Python libs from __future__ import absolute_import import os import sys import salt.utils.platform # Import Salt Testing libs from tests.support.case import ShellCase class MinionTimeoutTestCase(ShellCase): ''' Test minion timing functions ''' def test_long_running_job(self): ''' Test that we will wait longer than the job timeout for a minion to return. ''' # Launch the command sleep_length = 30 if salt.utils.platform.is_windows(): popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))} else: popen_kwargs = None ret = self.run_salt( 'minion test.sleep {0}'.format(sleep_length), timeout=90, catch_stderr=True, popen_kwargs=popen_kwargs, ) self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion' ' may have returned error: {0}'.format(ret)) self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret)) self.assertTrue('True' in ret[0][1], 'Minion did not return True after ' '{0} seconds. ret={1}'.format(sleep_length, ret))
Increase timeout for test_long_running_job test # -*- coding: utf-8 -*- ''' Tests for various minion timeouts ''' # Import Python libs from __future__ import absolute_import import os import sys import salt.utils.platform # Import Salt Testing libs from tests.support.case import ShellCase class MinionTimeoutTestCase(ShellCase): ''' Test minion timing functions ''' def test_long_running_job(self): ''' Test that we will wait longer than the job timeout for a minion to return. ''' # Launch the command sleep_length = 30 if salt.utils.platform.is_windows(): popen_kwargs = {'env': dict(os.environ, PYTHONPATH=';'.join(sys.path))} else: popen_kwargs = None ret = self.run_salt( 'minion test.sleep {0}'.format(sleep_length), timeout=45, catch_stderr=True, popen_kwargs=popen_kwargs, ) self.assertTrue(isinstance(ret[0], list), 'Return is not a list. Minion' ' may have returned error: {0}'.format(ret)) self.assertEqual(len(ret[0]), 2, 'Standard out wrong length {}'.format(ret)) self.assertTrue('True' in ret[0][1], 'Minion did not return True after ' '{0} seconds. ret={1}'.format(sleep_length, ret))
fac97130396057802f1ebf21928667a971395ba9
examples/ex_tabler.py
examples/ex_tabler.py
from tabler import Tabler table = """<table> <thead> <tr> <th>Number</th> <th>First Name</th> <th>Last Name</th> <th>Phone Number</th> </tr> <tr> <td>1</td> <td>Bob</td> <td>Evans</td> <td>(847) 332-0461</td> </tr> <tr> <td>2</td> <td>Mary</td> <td>Newell</td> <td>(414) 617-9516</td> </tr> </thead> </table>""" parser = Tabler(table) print "There are", len(parser.rows), "rows." print "First names:" for row in parser.rows: print row["first_name"]
Add a basic example of the Tabler API.
Add a basic example of the Tabler API.
Python
bsd-3-clause
bschmeck/tabler
from tabler import Tabler table = """<table> <thead> <tr> <th>Number</th> <th>First Name</th> <th>Last Name</th> <th>Phone Number</th> </tr> <tr> <td>1</td> <td>Bob</td> <td>Evans</td> <td>(847) 332-0461</td> </tr> <tr> <td>2</td> <td>Mary</td> <td>Newell</td> <td>(414) 617-9516</td> </tr> </thead> </table>""" parser = Tabler(table) print "There are", len(parser.rows), "rows." print "First names:" for row in parser.rows: print row["first_name"]
Add a basic example of the Tabler API.
5d0c7b7d209b0487c0a12e995a11efa9d695a50e
pq/management/commands/pqcreate.py
pq/management/commands/pqcreate.py
from django.core.management.base import BaseCommand from optparse import make_option from django.conf import settings from pq.queue import PQ_DEFAULT_JOB_TIMEOUT class Command(BaseCommand): help = "Create a queue" args = "<queue queue ...>" option_list = BaseCommand.option_list + ( make_option('--queue', '-q', dest='queue', default='', help='Specify the queue [default]'), make_option('--conn', '-c', dest='conn', default='default', help='Specify a connection [default]'), make_option('--scheduled', action="store_true", default=False, dest="scheduled", help="Schedule jobs in the future"), make_option('--timeout', '-t', type="int", dest='timeout', help="Default timeout in seconds"), make_option('--serial', action="store_true", default=False, dest='serial', help="A timeout in seconds"), ) def handle(self, *args, **options): """ The actual logic of the command. Subclasses must implement this method. """ from pq.queue import Queue, SerialQueue verbosity = int(options.get('verbosity', 1)) timeout = options.get('timeout') for queue in args: if options['serial']: q = SerialQueue.create(queue) else: q = Queue.create(queue) q.connection = options.get('conn') q.scheduled = options.get('scheduled') if timeout: q.default_timeout = timeout q.save()
Add a command to pre-create queues.
Add a command to pre-create queues.
Python
bsd-2-clause
bretth/django-pq
from django.core.management.base import BaseCommand from optparse import make_option from django.conf import settings from pq.queue import PQ_DEFAULT_JOB_TIMEOUT class Command(BaseCommand): help = "Create a queue" args = "<queue queue ...>" option_list = BaseCommand.option_list + ( make_option('--queue', '-q', dest='queue', default='', help='Specify the queue [default]'), make_option('--conn', '-c', dest='conn', default='default', help='Specify a connection [default]'), make_option('--scheduled', action="store_true", default=False, dest="scheduled", help="Schedule jobs in the future"), make_option('--timeout', '-t', type="int", dest='timeout', help="Default timeout in seconds"), make_option('--serial', action="store_true", default=False, dest='serial', help="A timeout in seconds"), ) def handle(self, *args, **options): """ The actual logic of the command. Subclasses must implement this method. """ from pq.queue import Queue, SerialQueue verbosity = int(options.get('verbosity', 1)) timeout = options.get('timeout') for queue in args: if options['serial']: q = SerialQueue.create(queue) else: q = Queue.create(queue) q.connection = options.get('conn') q.scheduled = options.get('scheduled') if timeout: q.default_timeout = timeout q.save()
Add a command to pre-create queues.
601d0183674b555c231954dbf92955e8d8918d0a
contrib_bots/bots/wikipedia/test_wikipedia.py
contrib_bots/bots/wikipedia/test_wikipedia.py
#!/usr/bin/env python from __future__ import absolute_import from __future__ import print_function import os import sys our_dir = os.path.dirname(os.path.abspath(__file__)) # For dev setups, we can find the API in the repo itself. if os.path.exists(os.path.join(our_dir, '..')): sys.path.insert(0, '..') from bots_test_lib import BotTestCase class TestWikipediaBot(BotTestCase): bot_name = "wikipedia" def test_bot(self): self.assert_bot_output( {'content': "foo", 'type': "private", 'sender_email': "foo"}, 'For search term "foo", https://en.wikipedia.org/wiki/Foobar' ) self.assert_bot_output( {'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"}, 'Please enter your message after @mention-bot' ) self.assert_bot_output( {'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"}, 'I am sorry. The search term you provided is not found :slightly_frowning_face:' ) self.assert_bot_output( {'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"}, 'For search term "123", https://en.wikipedia.org/wiki/123' )
Add tests for wikipedia bot in contrib_bots.
testsuite: Add tests for wikipedia bot in contrib_bots. Add test file 'Test_wikipedia.py'. Since wikipedia links for the same query may different according to relevance. This test will also be written by mocking HTTP traffic. But this can work for now.
Python
apache-2.0
jackrzhang/zulip,verma-varsha/zulip,brockwhittaker/zulip,rishig/zulip,vaidap/zulip,andersk/zulip,dhcrzf/zulip,zulip/zulip,shubhamdhama/zulip,showell/zulip,jrowan/zulip,Galexrt/zulip,rht/zulip,timabbott/zulip,rht/zulip,rishig/zulip,shubhamdhama/zulip,hackerkid/zulip,vabs22/zulip,zulip/zulip,andersk/zulip,kou/zulip,rishig/zulip,verma-varsha/zulip,jrowan/zulip,Galexrt/zulip,rht/zulip,vaidap/zulip,kou/zulip,Galexrt/zulip,vaidap/zulip,synicalsyntax/zulip,jackrzhang/zulip,brainwane/zulip,synicalsyntax/zulip,punchagan/zulip,vaidap/zulip,zulip/zulip,rishig/zulip,synicalsyntax/zulip,shubhamdhama/zulip,Galexrt/zulip,tommyip/zulip,zulip/zulip,synicalsyntax/zulip,rishig/zulip,tommyip/zulip,verma-varsha/zulip,andersk/zulip,eeshangarg/zulip,kou/zulip,dhcrzf/zulip,brockwhittaker/zulip,amanharitsh123/zulip,jrowan/zulip,andersk/zulip,tommyip/zulip,brainwane/zulip,eeshangarg/zulip,andersk/zulip,vabs22/zulip,brainwane/zulip,andersk/zulip,showell/zulip,jackrzhang/zulip,punchagan/zulip,Galexrt/zulip,jackrzhang/zulip,mahim97/zulip,showell/zulip,dhcrzf/zulip,timabbott/zulip,mahim97/zulip,timabbott/zulip,showell/zulip,kou/zulip,eeshangarg/zulip,timabbott/zulip,timabbott/zulip,shubhamdhama/zulip,kou/zulip,dhcrzf/zulip,eeshangarg/zulip,Galexrt/zulip,vabs22/zulip,amanharitsh123/zulip,rht/zulip,hackerkid/zulip,zulip/zulip,brockwhittaker/zulip,synicalsyntax/zulip,amanharitsh123/zulip,showell/zulip,eeshangarg/zulip,shubhamdhama/zulip,showell/zulip,verma-varsha/zulip,hackerkid/zulip,vaidap/zulip,jrowan/zulip,jackrzhang/zulip,punchagan/zulip,rishig/zulip,kou/zulip,dhcrzf/zulip,jackrzhang/zulip,eeshangarg/zulip,punchagan/zulip,brockwhittaker/zulip,eeshangarg/zulip,jackrzhang/zulip,vabs22/zulip,zulip/zulip,punchagan/zulip,showell/zulip,rht/zulip,vabs22/zulip,tommyip/zulip,hackerkid/zulip,verma-varsha/zulip,synicalsyntax/zulip,amanharitsh123/zulip,kou/zulip,shubhamdhama/zulip,brainwane/zulip,vabs22/zulip,amanharitsh123/zulip,rht/zulip,punchagan/zulip,tommyip/zulip,synicalsyntax/zulip,jrowan/zulip,vaidap/zulip,dhcrzf/zulip,verma-varsha/zulip,timabbott/zulip,brainwane/zulip,shubhamdhama/zulip,jrowan/zulip,brainwane/zulip,rishig/zulip,rht/zulip,tommyip/zulip,hackerkid/zulip,punchagan/zulip,Galexrt/zulip,mahim97/zulip,timabbott/zulip,hackerkid/zulip,brainwane/zulip,brockwhittaker/zulip,brockwhittaker/zulip,amanharitsh123/zulip,dhcrzf/zulip,mahim97/zulip,tommyip/zulip,mahim97/zulip,hackerkid/zulip,zulip/zulip,mahim97/zulip,andersk/zulip
#!/usr/bin/env python from __future__ import absolute_import from __future__ import print_function import os import sys our_dir = os.path.dirname(os.path.abspath(__file__)) # For dev setups, we can find the API in the repo itself. if os.path.exists(os.path.join(our_dir, '..')): sys.path.insert(0, '..') from bots_test_lib import BotTestCase class TestWikipediaBot(BotTestCase): bot_name = "wikipedia" def test_bot(self): self.assert_bot_output( {'content': "foo", 'type': "private", 'sender_email': "foo"}, 'For search term "foo", https://en.wikipedia.org/wiki/Foobar' ) self.assert_bot_output( {'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"}, 'Please enter your message after @mention-bot' ) self.assert_bot_output( {'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"}, 'I am sorry. The search term you provided is not found :slightly_frowning_face:' ) self.assert_bot_output( {'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"}, 'For search term "123", https://en.wikipedia.org/wiki/123' )
testsuite: Add tests for wikipedia bot in contrib_bots. Add test file 'Test_wikipedia.py'. Since wikipedia links for the same query may different according to relevance. This test will also be written by mocking HTTP traffic. But this can work for now.
815c246f1ef185e24991efc4075b2358c7955c6c
onadata/libs/utils/storage.py
onadata/libs/utils/storage.py
# coding: utf-8 import os import shutil from django.core.files.storage import get_storage_class def delete_user_storage(username): storage = get_storage_class()() def _recursive_delete(path): directories, files = storage.listdir(path) for file_ in files: storage.delete(os.path.join(path, file_)) for directory in directories: _recursive_delete(os.path.join(path, directory)) if storage.__class__.__name__ == 'FileSystemStorage': if storage.exists(username): shutil.rmtree(storage.path(username)) else: _recursive_delete(username) def user_storage_exists(username): storage = get_storage_class()() return storage.exists(username)
# coding: utf-8 import os import shutil from django.core.files.storage import FileSystemStorage, get_storage_class def delete_user_storage(username): storage = get_storage_class()() def _recursive_delete(path): directories, files = storage.listdir(path) for file_ in files: storage.delete(os.path.join(path, file_)) for directory in directories: _recursive_delete(os.path.join(path, directory)) if isinstance(storage, FileSystemStorage): if storage.exists(username): shutil.rmtree(storage.path(username)) else: _recursive_delete(username) def user_storage_exists(username): storage = get_storage_class()() return storage.exists(username)
Use `isinstance()` at the cost of an extra import
Use `isinstance()` at the cost of an extra import
Python
bsd-2-clause
kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat
# coding: utf-8 import os import shutil from django.core.files.storage import FileSystemStorage, get_storage_class def delete_user_storage(username): storage = get_storage_class()() def _recursive_delete(path): directories, files = storage.listdir(path) for file_ in files: storage.delete(os.path.join(path, file_)) for directory in directories: _recursive_delete(os.path.join(path, directory)) if isinstance(storage, FileSystemStorage): if storage.exists(username): shutil.rmtree(storage.path(username)) else: _recursive_delete(username) def user_storage_exists(username): storage = get_storage_class()() return storage.exists(username)
Use `isinstance()` at the cost of an extra import # coding: utf-8 import os import shutil from django.core.files.storage import get_storage_class def delete_user_storage(username): storage = get_storage_class()() def _recursive_delete(path): directories, files = storage.listdir(path) for file_ in files: storage.delete(os.path.join(path, file_)) for directory in directories: _recursive_delete(os.path.join(path, directory)) if storage.__class__.__name__ == 'FileSystemStorage': if storage.exists(username): shutil.rmtree(storage.path(username)) else: _recursive_delete(username) def user_storage_exists(username): storage = get_storage_class()() return storage.exists(username)
cb4d916a23792f92f0929693d58350e7b045fb3d
plugins/coinflip.py
plugins/coinflip.py
import random from plugin import CommandPlugin, PluginException class CoinFlip(CommandPlugin): """ Flip a coin """ max_coin_flips = 1000000 def __init__(self): CommandPlugin.__init__(self) self.triggers = ['coin', 'coinflip'] self.short_help = 'Flip a coin' self.help = 'Flip a coin or number of coins' self.help_example = ['!coin', '!coinflip 5'] def on_command(self, bot, event, response): args = event['text'] if not args: response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails']) else: try: tosses = int(args) if tosses <= 0: raise PluginException('Invalid argument! No coins to flip!') # Avoid taking too long to generate coin flips if tosses > CoinFlip.max_coin_flips: raise PluginException( 'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips) rand_bits = bin(random.getrandbits(tosses))[2:] heads = rand_bits.count('0') tails = rand_bits.count('1') response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % ( tosses, heads, tails) except ValueError: raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`') response['mrkdwn_in'] = ['text'] bot.sc.api_call('chat.postMessage', **response)
Add CoinFlip plugin for some coin flipping fun
Add CoinFlip plugin for some coin flipping fun
Python
mit
Brottweiler/nimbus,itsmartin/nimbus,Plastix/nimbus,bcbwilla/nimbus
import random from plugin import CommandPlugin, PluginException class CoinFlip(CommandPlugin): """ Flip a coin """ max_coin_flips = 1000000 def __init__(self): CommandPlugin.__init__(self) self.triggers = ['coin', 'coinflip'] self.short_help = 'Flip a coin' self.help = 'Flip a coin or number of coins' self.help_example = ['!coin', '!coinflip 5'] def on_command(self, bot, event, response): args = event['text'] if not args: response['text'] = 'A coin is flipped and it is *_%s_*!' % random.choice(['Heads', 'Tails']) else: try: tosses = int(args) if tosses <= 0: raise PluginException('Invalid argument! No coins to flip!') # Avoid taking too long to generate coin flips if tosses > CoinFlip.max_coin_flips: raise PluginException( 'Invalid argument! Number of coins to flip is too large! Max flips is `%s`.' % CoinFlip.max_coin_flips) rand_bits = bin(random.getrandbits(tosses))[2:] heads = rand_bits.count('0') tails = rand_bits.count('1') response['text'] = '*_%s_* coins are flipped and the result is *_%s Heads_* and *_%s Tails_*!' % ( tosses, heads, tails) except ValueError: raise PluginException('Invalid argument! Specify a *number* of coins to flip. E.g. `!coin 5`') response['mrkdwn_in'] = ['text'] bot.sc.api_call('chat.postMessage', **response)
Add CoinFlip plugin for some coin flipping fun
d20f04d65437138559445bf557be52a87690c7f2
test/checker/test_checker_ipaddress.py
test/checker/test_checker_ipaddress.py
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from __future__ import unicode_literals from ipaddress import ip_address import itertools import pytest import six from typepy import ( Typecode, StrictLevel, ) from typepy.type import IpAddress nan = float("nan") inf = float("inf") class Test_IpAddress_is_type: @pytest.mark.parametrize( ["value", "strict_level", "expected"], list(itertools.product( ["", " ", six.MAXSIZE, str(six.MAXSIZE), inf, nan, None], [StrictLevel.MIN, StrictLevel.MAX], [False] )) + list(itertools.product( [ "127.0.0.1", "::1", ip_address("127.0.0.1"), ip_address("::1"), ], [StrictLevel.MIN, StrictLevel.MAX], [True], ))) def test_normal(self, value, strict_level, expected): type_checker = IpAddress(value, strict_level=strict_level) assert type_checker.is_type() == expected assert type_checker.typecode == Typecode.IP_ADDRESS
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from __future__ import unicode_literals from ipaddress import ip_address import itertools import pytest import six from typepy import ( Typecode, StrictLevel, ) from typepy.type import IpAddress nan = float("nan") inf = float("inf") class Test_IpAddress_is_type(object): @pytest.mark.parametrize( ["value", "strict_level", "expected"], list(itertools.product( ["", " ", six.MAXSIZE, str(six.MAXSIZE), inf, nan, None], [StrictLevel.MIN, StrictLevel.MAX], [False] )) + list(itertools.product( [ "127.0.0.1", "::1", ip_address("127.0.0.1"), ip_address("::1"), ], [StrictLevel.MIN, StrictLevel.MAX], [True], ))) def test_normal(self, value, strict_level, expected): type_checker = IpAddress(value, strict_level=strict_level) assert type_checker.is_type() == expected assert type_checker.typecode == Typecode.IP_ADDRESS
Change class definitions from old style to new style
Change class definitions from old style to new style
Python
mit
thombashi/typepy
# encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from __future__ import unicode_literals from ipaddress import ip_address import itertools import pytest import six from typepy import ( Typecode, StrictLevel, ) from typepy.type import IpAddress nan = float("nan") inf = float("inf") class Test_IpAddress_is_type(object): @pytest.mark.parametrize( ["value", "strict_level", "expected"], list(itertools.product( ["", " ", six.MAXSIZE, str(six.MAXSIZE), inf, nan, None], [StrictLevel.MIN, StrictLevel.MAX], [False] )) + list(itertools.product( [ "127.0.0.1", "::1", ip_address("127.0.0.1"), ip_address("::1"), ], [StrictLevel.MIN, StrictLevel.MAX], [True], ))) def test_normal(self, value, strict_level, expected): type_checker = IpAddress(value, strict_level=strict_level) assert type_checker.is_type() == expected assert type_checker.typecode == Typecode.IP_ADDRESS
Change class definitions from old style to new style # encoding: utf-8 """ .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from __future__ import unicode_literals from ipaddress import ip_address import itertools import pytest import six from typepy import ( Typecode, StrictLevel, ) from typepy.type import IpAddress nan = float("nan") inf = float("inf") class Test_IpAddress_is_type: @pytest.mark.parametrize( ["value", "strict_level", "expected"], list(itertools.product( ["", " ", six.MAXSIZE, str(six.MAXSIZE), inf, nan, None], [StrictLevel.MIN, StrictLevel.MAX], [False] )) + list(itertools.product( [ "127.0.0.1", "::1", ip_address("127.0.0.1"), ip_address("::1"), ], [StrictLevel.MIN, StrictLevel.MAX], [True], ))) def test_normal(self, value, strict_level, expected): type_checker = IpAddress(value, strict_level=strict_level) assert type_checker.is_type() == expected assert type_checker.typecode == Typecode.IP_ADDRESS
53b9eff3ffc1768d3503021e7248351e24d59af7
tests/httpd.py
tests/httpd.py
import SimpleHTTPServer import BaseHTTPServer class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_POST(s): s.send_response(200) s.end_headers() if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
import BaseHTTPServer class Handler(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): content_type = self.headers.getheader('content-type') content_length = int(self.headers.getheader('content-length')) self.send_response(200) self.send_header('Content-Type', content_type) self.send_header('Content-Length', str(content_length)) self.end_headers() self.wfile.write(self.rfile.read(content_length)) if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
Fix test http server, change to echo back request body
Fix test http server, change to echo back request body
Python
bsd-2-clause
chop-dbhi/django-webhooks,pombredanne/django-webhooks,pombredanne/django-webhooks,chop-dbhi/django-webhooks
import BaseHTTPServer class Handler(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): content_type = self.headers.getheader('content-type') content_length = int(self.headers.getheader('content-length')) self.send_response(200) self.send_header('Content-Type', content_type) self.send_header('Content-Length', str(content_length)) self.end_headers() self.wfile.write(self.rfile.read(content_length)) if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
Fix test http server, change to echo back request body import SimpleHTTPServer import BaseHTTPServer class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler): def do_POST(s): s.send_response(200) s.end_headers() if __name__ == '__main__': server_class = BaseHTTPServer.HTTPServer httpd = server_class(('0.0.0.0', 8328), Handler) try: httpd.serve_forever() except KeyboardInterrupt: httpd.server_close()
26f1506607a2042d508dc69f5a155ed88668d22a
setup.py
setup.py
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "ideone", version = "0.0.1", author = "Joe Schafer", author_email = "[email protected]", url = "http://github.com/jschaf", description = "A Python binding to the Ideone API.", license = "BSD", keywords = "API ideone codepad", packages = ['ideone'], long_description=read('README.rst'), classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], install_requires=['suds',] )
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "ideone", version = "0.0.1", author = "Joe Schafer", author_email = "[email protected]", url = "http://github.com/jschaf/ideone-api/", description = "A Python binding to the Ideone API.", license = "BSD", keywords = "API ideone codepad", packages = ['ideone'], long_description=read('README.rst'), classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], install_requires=['suds',] )
Fix url to point to repository.
Fix url to point to repository.
Python
bsd-3-clause
jschaf/ideone-api
import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "ideone", version = "0.0.1", author = "Joe Schafer", author_email = "[email protected]", url = "http://github.com/jschaf/ideone-api/", description = "A Python binding to the Ideone API.", license = "BSD", keywords = "API ideone codepad", packages = ['ideone'], long_description=read('README.rst'), classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], install_requires=['suds',] )
Fix url to point to repository. import os from setuptools import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name = "ideone", version = "0.0.1", author = "Joe Schafer", author_email = "[email protected]", url = "http://github.com/jschaf", description = "A Python binding to the Ideone API.", license = "BSD", keywords = "API ideone codepad", packages = ['ideone'], long_description=read('README.rst'), classifiers=[ "Development Status :: 3 - Alpha", "Topic :: Utilities", "License :: OSI Approved :: BSD License", ], install_requires=['suds',] )
bf36831f062c8262e9d7f8a5f63b5b4a0f413c5f
molecule/default/tests/test_default.py
molecule/default/tests/test_default.py
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') # check if MongoDB package is installed def test_mongodb_is_installed(host): package = host.package('mongodb-org') assert package.is_installed # check if MongoDB is enabled and running def test_mongod_is_running(host): mongo = host.service('mongod') assert mongo.is_running assert mongo.is_enabled # check if configuration file contains the required line def test_mongod_config_file(File): config_file = File('/etc/mongod.conf') assert config_file.contains('port: 27017') assert config_file.contains('bindIp: 127.0.0.1') assert config_file.is_file # check if mongod process is listening on localhost def test_mongod_is_listening(host): port = host.socket('tcp://127.0.0.1:27017') assert port.is_listening
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') # check if MongoDB package is installed def test_mongodb_is_installed(host): package = host.package('mongodb-org') assert package.is_installed assert package.version.startswith('3.4.7') # check if MongoDB is enabled and running def test_mongod_is_running(host): mongo = host.service('mongod') assert mongo.is_running assert mongo.is_enabled # check if configuration file contains the required line def test_mongod_config_file(File): config_file = File('/etc/mongod.conf') assert config_file.contains('port: 27017') assert config_file.contains('bindIp: 127.0.0.1') assert config_file.is_file # check if mongod process is listening on localhost def test_mongod_is_listening(host): port = host.socket('tcp://127.0.0.1:27017') assert port.is_listening
Add test of mongodb package version
Add test of mongodb package version
Python
bsd-2-clause
jugatsu-infra/ansible-role-mongodb
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') # check if MongoDB package is installed def test_mongodb_is_installed(host): package = host.package('mongodb-org') assert package.is_installed assert package.version.startswith('3.4.7') # check if MongoDB is enabled and running def test_mongod_is_running(host): mongo = host.service('mongod') assert mongo.is_running assert mongo.is_enabled # check if configuration file contains the required line def test_mongod_config_file(File): config_file = File('/etc/mongod.conf') assert config_file.contains('port: 27017') assert config_file.contains('bindIp: 127.0.0.1') assert config_file.is_file # check if mongod process is listening on localhost def test_mongod_is_listening(host): port = host.socket('tcp://127.0.0.1:27017') assert port.is_listening
Add test of mongodb package version import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') # check if MongoDB package is installed def test_mongodb_is_installed(host): package = host.package('mongodb-org') assert package.is_installed # check if MongoDB is enabled and running def test_mongod_is_running(host): mongo = host.service('mongod') assert mongo.is_running assert mongo.is_enabled # check if configuration file contains the required line def test_mongod_config_file(File): config_file = File('/etc/mongod.conf') assert config_file.contains('port: 27017') assert config_file.contains('bindIp: 127.0.0.1') assert config_file.is_file # check if mongod process is listening on localhost def test_mongod_is_listening(host): port = host.socket('tcp://127.0.0.1:27017') assert port.is_listening
c407c067495d76ebed7c36ef005861c80fdcfdce
textx/__init__.py
textx/__init__.py
__version__ = "1.6.dev"
from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa from textx.langapi import get_language, iter_languages # noqa __version__ = "1.6.dev"
Make metamodel factory methods and lang API available in textx package.
Make metamodel factory methods and lang API available in textx package.
Python
mit
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
from textx.metamodel import metamodel_from_file, metamodel_from_str # noqa from textx.langapi import get_language, iter_languages # noqa __version__ = "1.6.dev"
Make metamodel factory methods and lang API available in textx package. __version__ = "1.6.dev"
dad58aa0162290627e9d96a5047a507237a49b76
calculate.py
calculate.py
operators = {'+', '-', '*', '/', '(', ')'} def parse_formula(text): tokens = [] buffer = '' for c in text: if '0' <= c <= '9': buffer += c elif c in operators: if buffer: tokens.append(int(buffer)) tokens.append(c) buffer = '' if buffer: tokens.append(int(buffer)) return tokens if __name__ == '__main__': import sys if len(sys.argv) < 2: print('Input formula required') else: formula = sys.argv[1] print('tokens={}'.format(parse_formula(formula)))
# List of operators along with their associated precedence operators = {None: 100, '+': 3, '-': 3, '*': 2, '/': 2, '(': 1, ')': 1} def operation(v1, v2, operator): if item == '+': return v1 + v2 elif item == '-': return v1 - v2 elif item == '*': return v1 * v2 elif item == '/': return int(v1 / v2) else: raise ValueError('Unknown operator specified: {}'.format(item)) def parse_formula(text): tokens = [] buffer = '' for c in text: if '0' <= c <= '9': buffer += c elif c in operators: if buffer: tokens.append(int(buffer)) tokens.append(c) buffer = '' if buffer: tokens.append(int(buffer)) return tokens if __name__ == '__main__': import sys if len(sys.argv) < 2: print('Input formula required') else: formula = sys.argv[1] tokens = parse_formula(formula) operator_stack = [] operand_stack = [] for item in tokens: if type(item) is int: operand_stack.append(item) elif type(item) is str: if operator_stack: peek = operator_stack[-1] else: peek = None if operators[item] < operators[peek]: operator_stack.append(item) else: value2 = operand_stack.pop() value1 = operand_stack.pop() operand_stack.append(operation(value1, value2, item)) else: raise ValueError('Unknown item found in tokens') while operator_stack: item = operator_stack.pop() value2 = operand_stack.pop() value1 = operand_stack.pop() operand_stack.append(operation(value1, value2, item)) print('Result = {}'.format(operand_stack.pop()))
Add support for basic binary operations
Add support for basic binary operations
Python
mit
MichaelAquilina/Simple-Calculator
# List of operators along with their associated precedence operators = {None: 100, '+': 3, '-': 3, '*': 2, '/': 2, '(': 1, ')': 1} def operation(v1, v2, operator): if item == '+': return v1 + v2 elif item == '-': return v1 - v2 elif item == '*': return v1 * v2 elif item == '/': return int(v1 / v2) else: raise ValueError('Unknown operator specified: {}'.format(item)) def parse_formula(text): tokens = [] buffer = '' for c in text: if '0' <= c <= '9': buffer += c elif c in operators: if buffer: tokens.append(int(buffer)) tokens.append(c) buffer = '' if buffer: tokens.append(int(buffer)) return tokens if __name__ == '__main__': import sys if len(sys.argv) < 2: print('Input formula required') else: formula = sys.argv[1] tokens = parse_formula(formula) operator_stack = [] operand_stack = [] for item in tokens: if type(item) is int: operand_stack.append(item) elif type(item) is str: if operator_stack: peek = operator_stack[-1] else: peek = None if operators[item] < operators[peek]: operator_stack.append(item) else: value2 = operand_stack.pop() value1 = operand_stack.pop() operand_stack.append(operation(value1, value2, item)) else: raise ValueError('Unknown item found in tokens') while operator_stack: item = operator_stack.pop() value2 = operand_stack.pop() value1 = operand_stack.pop() operand_stack.append(operation(value1, value2, item)) print('Result = {}'.format(operand_stack.pop()))
Add support for basic binary operations operators = {'+', '-', '*', '/', '(', ')'} def parse_formula(text): tokens = [] buffer = '' for c in text: if '0' <= c <= '9': buffer += c elif c in operators: if buffer: tokens.append(int(buffer)) tokens.append(c) buffer = '' if buffer: tokens.append(int(buffer)) return tokens if __name__ == '__main__': import sys if len(sys.argv) < 2: print('Input formula required') else: formula = sys.argv[1] print('tokens={}'.format(parse_formula(formula)))
3f39c9d89da004556bcf53fa815f88a3092f600e
syft/frameworks/torch/tensors/native.py
syft/frameworks/torch/tensors/native.py
import random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location.id != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) return ptr
import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location.id != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) return ptr
Set local worker as default for SyftTensor owner
Undone: Set local worker as default for SyftTensor owner
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
import random from syft.frameworks.torch.tensors import PointerTensor class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = None def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location.id != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) return ptr
Undone: Set local worker as default for SyftTensor owner import random from syft.frameworks.torch.tensors import PointerTensor import syft class TorchTensor: """ This tensor is simply a more convenient way to add custom functions to all Torch tensor types. """ def __init__(self): self.id = None self.owner = syft.local_worker def create_pointer( self, location=None, id_at_location=None, register=False, owner=None, ptr_id=None ): if owner is None: owner = self.owner if location is None: location = self.owner.id owner = self.owner.get_worker(owner) location = self.owner.get_worker(location) if id_at_location is None: id_at_location = self.id if ptr_id is None: if location.id != self.owner.id: ptr_id = self.id else: ptr_id = int(10e10 * random.random()) # previous_pointer = owner.get_pointer_to(location, id_at_location) previous_pointer = None if previous_pointer is None: ptr = PointerTensor( parent=self, location=location, id_at_location=id_at_location, register=register, owner=owner, id=ptr_id, ) return ptr
b8359e6b04d13f550aec308308f2e91e194bc372
uberlogs/handlers/kill_process.py
uberlogs/handlers/kill_process.py
import sys import os from logging import Handler as LoggingHandler class KillProcessHandler(LoggingHandler): @profile def emit(self, record): if record.levelno != self.level: return # flush text before exiting for fd in [sys.stdout, sys.stderr]: fd.flush() # Twisted writes unhandled errors in different calls # If we exit on the first call, we'd lose the actual error for log_to_ignore in ["Unhandled error in Deferred"]: if log_to_ignore.lower() in record.getMessage().lower(): return os._exit(1)
import sys import os from logging import Handler as LoggingHandler class KillProcessHandler(LoggingHandler): @profile def emit(self, record): if record.levelno != self.level: return # flush text before exiting for fd in [sys.stdout, sys.stderr]: fd.flush() msg = record.getMessage() # Twisted writes unhandled errors in different calls # If we exit on the first call, we'd lose the actual error for log_to_ignore in ["Unhandled error in Deferred"]: if log_to_ignore.lower() in msg.lower(): return os._exit(1)
Remove repetitive getMessage calls in KillProcesshandler
Remove repetitive getMessage calls in KillProcesshandler
Python
mit
odedlaz/uberlogs,odedlaz/uberlogs
import sys import os from logging import Handler as LoggingHandler class KillProcessHandler(LoggingHandler): @profile def emit(self, record): if record.levelno != self.level: return # flush text before exiting for fd in [sys.stdout, sys.stderr]: fd.flush() msg = record.getMessage() # Twisted writes unhandled errors in different calls # If we exit on the first call, we'd lose the actual error for log_to_ignore in ["Unhandled error in Deferred"]: if log_to_ignore.lower() in msg.lower(): return os._exit(1)
Remove repetitive getMessage calls in KillProcesshandler import sys import os from logging import Handler as LoggingHandler class KillProcessHandler(LoggingHandler): @profile def emit(self, record): if record.levelno != self.level: return # flush text before exiting for fd in [sys.stdout, sys.stderr]: fd.flush() # Twisted writes unhandled errors in different calls # If we exit on the first call, we'd lose the actual error for log_to_ignore in ["Unhandled error in Deferred"]: if log_to_ignore.lower() in record.getMessage().lower(): return os._exit(1)
cf0193adcf6c58d82b577f09842c265bc09a685a
candidates/csv_helpers.py
candidates/csv_helpers.py
import csv import StringIO from .models import CSV_ROW_FIELDS def encode_row_values(d): return { k: unicode('' if v is None else v).encode('utf-8') for k, v in d.items() } def list_to_csv(candidates_list): output = StringIO.StringIO() writer = csv.DictWriter( output, fieldnames=CSV_ROW_FIELDS, dialect=csv.excel) writer.writeheader() for row in candidates_list: writer.writerow(encode_row_values(row)) return output.getvalue()
import csv import StringIO from .models import CSV_ROW_FIELDS def encode_row_values(d): return { k: unicode('' if v is None else v).encode('utf-8') for k, v in d.items() } def candidate_sort_key(row): return (row['constituency'], row['name'].split()[-1]) def list_to_csv(candidates_list): output = StringIO.StringIO() writer = csv.DictWriter( output, fieldnames=CSV_ROW_FIELDS, dialect=csv.excel) writer.writeheader() for row in sorted(candidates_list, key=candidate_sort_key): writer.writerow(encode_row_values(row)) return output.getvalue()
Sort the rows in CSV output on (constituency, last name)
Sort the rows in CSV output on (constituency, last name)
Python
agpl-3.0
datamade/yournextmp-popit,openstate/yournextrepresentative,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,openstate/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,openstate/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextmp-popit
import csv import StringIO from .models import CSV_ROW_FIELDS def encode_row_values(d): return { k: unicode('' if v is None else v).encode('utf-8') for k, v in d.items() } def candidate_sort_key(row): return (row['constituency'], row['name'].split()[-1]) def list_to_csv(candidates_list): output = StringIO.StringIO() writer = csv.DictWriter( output, fieldnames=CSV_ROW_FIELDS, dialect=csv.excel) writer.writeheader() for row in sorted(candidates_list, key=candidate_sort_key): writer.writerow(encode_row_values(row)) return output.getvalue()
Sort the rows in CSV output on (constituency, last name) import csv import StringIO from .models import CSV_ROW_FIELDS def encode_row_values(d): return { k: unicode('' if v is None else v).encode('utf-8') for k, v in d.items() } def list_to_csv(candidates_list): output = StringIO.StringIO() writer = csv.DictWriter( output, fieldnames=CSV_ROW_FIELDS, dialect=csv.excel) writer.writeheader() for row in candidates_list: writer.writerow(encode_row_values(row)) return output.getvalue()
bc20949f8e5461d6ffa901d24677acb1bae922dd
mangopaysdk/types/payinexecutiondetailsdirect.py
mangopaysdk/types/payinexecutiondetailsdirect.py
from mangopaysdk.types.payinexecutiondetails import PayInExecutionDetails class PayInExecutionDetailsDirect(PayInExecutionDetails): def __init__(self): # direct card self.CardId = None self.SecureModeReturnURL = None self.SecureModeRedirectURL = None # Mode3DSType { DEFAULT, FORCE } self.SecureMode = None
from mangopaysdk.types.payinexecutiondetails import PayInExecutionDetails class PayInExecutionDetailsDirect(PayInExecutionDetails): def __init__(self): # direct card self.CardId = None self.SecureModeReturnURL = None self.SecureModeRedirectURL = None # Mode3DSType { DEFAULT, FORCE } self.SecureMode = None self.StatementDescriptor = None
Add StatementDescriptor for card direct payins
Add StatementDescriptor for card direct payins
Python
mit
chocopoche/mangopay2-python-sdk,Mangopay/mangopay2-python-sdk
from mangopaysdk.types.payinexecutiondetails import PayInExecutionDetails class PayInExecutionDetailsDirect(PayInExecutionDetails): def __init__(self): # direct card self.CardId = None self.SecureModeReturnURL = None self.SecureModeRedirectURL = None # Mode3DSType { DEFAULT, FORCE } self.SecureMode = None self.StatementDescriptor = None
Add StatementDescriptor for card direct payins from mangopaysdk.types.payinexecutiondetails import PayInExecutionDetails class PayInExecutionDetailsDirect(PayInExecutionDetails): def __init__(self): # direct card self.CardId = None self.SecureModeReturnURL = None self.SecureModeRedirectURL = None # Mode3DSType { DEFAULT, FORCE } self.SecureMode = None
984dd9d20814e3190ee197b47c756f2b8f4ecb52
django_prometheus/testutils.py
django_prometheus/testutils.py
from prometheus_client import REGISTRY METRIC_EQUALS_ERR_EXPLANATION = """ %s%s = %s, expected %s. The values for %s are: %s""" class PrometheusTestCaseMixin(object): """A collection of utilities that make it easier to write test cases that interact with metrics. """ def setUp(self): self.clearRegistry() def clearRegistry(self): """Resets the values of all collectors in the global registry. This is so we can test the value of exported metrics in unit tests. This is quite a hack since it relies on the internal representation of the prometheus_client, and it should probably be provided as a function there instead. """ with REGISTRY._lock: for c in REGISTRY._collectors: if hasattr(c, '_metrics'): c._metrics = {} if hasattr(c, '_value'): c._value = 0.0 if hasattr(c, '_count'): c._count = 0.0 if hasattr(c, '_sum'): c._sum = 0.0 if hasattr(c, '_buckets'): c._buckets = [0.0] * len(c._buckets) def getMetric(self, metric_name, **labels): return REGISTRY.get_sample_value(metric_name, labels=labels) def getMetricVector(self, metric_name): """Returns the values for all labels of a given metric. The result is returned as a list of (labels, value) tuples, where `labels` is a dict. This is quite a hack since it relies on the internal representation of the prometheus_client, and it should probably be provided as a function there instead. """ all_metrics = REGISTRY.collect() output = [] for metric in all_metrics: for n, l, value in metric._samples: if n == metric_name: output.append((l, value)) return output def formatLabels(self, labels): """Format a set of labels to Prometheus representation. In: {'method': 'GET', 'port': '80'} Out: '{method="GET",port="80"}' """ return '{%s}' % ','.join([ '%s="%s"' % (k, v) for k, v in labels.items()]) def formatVector(self, vector): """Formats a list of (labels, value) where labels is a dict into a human-readable representation. """ return '\n'.join([ '%s = %s' % (self.formatLabels(labels), value) for labels, value in vector]) def assertMetricEquals(self, expected_value, metric_name, **labels): """Asserts that metric_name{**labels} == expected_value.""" value = self.getMetric(metric_name, **labels) self.assertEqual( expected_value, value, METRIC_EQUALS_ERR_EXPLANATION % ( metric_name, self.formatLabels(labels), value, expected_value, metric_name, self.formatVector(self.getMetricVector(metric_name))))
Add a mixin to test exported metrics.
Add a mixin to test exported metrics. Most of this mixin should be moved to prometheus_client eventually, since it relies heavily on its internals.
Python
apache-2.0
wangwanzhong/django-prometheus,obytes/django-prometheus,obytes/django-prometheus,wangwanzhong/django-prometheus,korfuri/django-prometheus,DingaGa/django-prometheus,DingaGa/django-prometheus,korfuri/django-prometheus
from prometheus_client import REGISTRY METRIC_EQUALS_ERR_EXPLANATION = """ %s%s = %s, expected %s. The values for %s are: %s""" class PrometheusTestCaseMixin(object): """A collection of utilities that make it easier to write test cases that interact with metrics. """ def setUp(self): self.clearRegistry() def clearRegistry(self): """Resets the values of all collectors in the global registry. This is so we can test the value of exported metrics in unit tests. This is quite a hack since it relies on the internal representation of the prometheus_client, and it should probably be provided as a function there instead. """ with REGISTRY._lock: for c in REGISTRY._collectors: if hasattr(c, '_metrics'): c._metrics = {} if hasattr(c, '_value'): c._value = 0.0 if hasattr(c, '_count'): c._count = 0.0 if hasattr(c, '_sum'): c._sum = 0.0 if hasattr(c, '_buckets'): c._buckets = [0.0] * len(c._buckets) def getMetric(self, metric_name, **labels): return REGISTRY.get_sample_value(metric_name, labels=labels) def getMetricVector(self, metric_name): """Returns the values for all labels of a given metric. The result is returned as a list of (labels, value) tuples, where `labels` is a dict. This is quite a hack since it relies on the internal representation of the prometheus_client, and it should probably be provided as a function there instead. """ all_metrics = REGISTRY.collect() output = [] for metric in all_metrics: for n, l, value in metric._samples: if n == metric_name: output.append((l, value)) return output def formatLabels(self, labels): """Format a set of labels to Prometheus representation. In: {'method': 'GET', 'port': '80'} Out: '{method="GET",port="80"}' """ return '{%s}' % ','.join([ '%s="%s"' % (k, v) for k, v in labels.items()]) def formatVector(self, vector): """Formats a list of (labels, value) where labels is a dict into a human-readable representation. """ return '\n'.join([ '%s = %s' % (self.formatLabels(labels), value) for labels, value in vector]) def assertMetricEquals(self, expected_value, metric_name, **labels): """Asserts that metric_name{**labels} == expected_value.""" value = self.getMetric(metric_name, **labels) self.assertEqual( expected_value, value, METRIC_EQUALS_ERR_EXPLANATION % ( metric_name, self.formatLabels(labels), value, expected_value, metric_name, self.formatVector(self.getMetricVector(metric_name))))
Add a mixin to test exported metrics. Most of this mixin should be moved to prometheus_client eventually, since it relies heavily on its internals.
cb30232b201934622efb2f972cca5087a1373cf7
src/waldur_mastermind/marketplace_remote/extension.py
src/waldur_mastermind/marketplace_remote/extension.py
from waldur_core.core import WaldurExtension class MarketplaceRemoteExtension(WaldurExtension): @staticmethod def django_app(): return 'waldur_mastermind.marketplace_remote' @staticmethod def is_assembly(): return True @staticmethod def django_urls(): from .urls import urlpatterns return urlpatterns @staticmethod def celery_tasks(): from datetime import timedelta return { 'waldur-remote-pull-offerings': { 'task': 'waldur_mastermind.marketplace_remote.pull_offerings', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-pull-order-items': { 'task': 'waldur_mastermind.marketplace_remote.pull_order_items', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-pull-usage': { 'task': 'waldur_mastermind.marketplace_remote.pull_usage', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-sync-remote-project-permissions': { 'task': 'waldur_mastermind.marketplace_remote.sync_remote_project_permissions', 'schedule': timedelta(hours=6), 'args': (), }, 'waldur-remote-pull-invoices': { 'task': 'waldur_mastermind.marketplace_remote.pull_invoices', 'schedule': timedelta(minutes=60), 'args': (), }, }
from waldur_core.core import WaldurExtension class MarketplaceRemoteExtension(WaldurExtension): @staticmethod def django_app(): return 'waldur_mastermind.marketplace_remote' @staticmethod def is_assembly(): return True @staticmethod def django_urls(): from .urls import urlpatterns return urlpatterns @staticmethod def celery_tasks(): from datetime import timedelta return { 'waldur-remote-pull-offerings': { 'task': 'waldur_mastermind.marketplace_remote.pull_offerings', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-pull-order-items': { 'task': 'waldur_mastermind.marketplace_remote.pull_order_items', 'schedule': timedelta(minutes=5), 'args': (), }, 'waldur-remote-pull-usage': { 'task': 'waldur_mastermind.marketplace_remote.pull_usage', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-sync-remote-project-permissions': { 'task': 'waldur_mastermind.marketplace_remote.sync_remote_project_permissions', 'schedule': timedelta(hours=6), 'args': (), }, 'waldur-remote-pull-invoices': { 'task': 'waldur_mastermind.marketplace_remote.pull_invoices', 'schedule': timedelta(minutes=60), 'args': (), }, }
Increase frequency of order items pulling.
Increase frequency of order items pulling.
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
from waldur_core.core import WaldurExtension class MarketplaceRemoteExtension(WaldurExtension): @staticmethod def django_app(): return 'waldur_mastermind.marketplace_remote' @staticmethod def is_assembly(): return True @staticmethod def django_urls(): from .urls import urlpatterns return urlpatterns @staticmethod def celery_tasks(): from datetime import timedelta return { 'waldur-remote-pull-offerings': { 'task': 'waldur_mastermind.marketplace_remote.pull_offerings', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-pull-order-items': { 'task': 'waldur_mastermind.marketplace_remote.pull_order_items', 'schedule': timedelta(minutes=5), 'args': (), }, 'waldur-remote-pull-usage': { 'task': 'waldur_mastermind.marketplace_remote.pull_usage', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-sync-remote-project-permissions': { 'task': 'waldur_mastermind.marketplace_remote.sync_remote_project_permissions', 'schedule': timedelta(hours=6), 'args': (), }, 'waldur-remote-pull-invoices': { 'task': 'waldur_mastermind.marketplace_remote.pull_invoices', 'schedule': timedelta(minutes=60), 'args': (), }, }
Increase frequency of order items pulling. from waldur_core.core import WaldurExtension class MarketplaceRemoteExtension(WaldurExtension): @staticmethod def django_app(): return 'waldur_mastermind.marketplace_remote' @staticmethod def is_assembly(): return True @staticmethod def django_urls(): from .urls import urlpatterns return urlpatterns @staticmethod def celery_tasks(): from datetime import timedelta return { 'waldur-remote-pull-offerings': { 'task': 'waldur_mastermind.marketplace_remote.pull_offerings', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-pull-order-items': { 'task': 'waldur_mastermind.marketplace_remote.pull_order_items', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-pull-usage': { 'task': 'waldur_mastermind.marketplace_remote.pull_usage', 'schedule': timedelta(minutes=60), 'args': (), }, 'waldur-remote-sync-remote-project-permissions': { 'task': 'waldur_mastermind.marketplace_remote.sync_remote_project_permissions', 'schedule': timedelta(hours=6), 'args': (), }, 'waldur-remote-pull-invoices': { 'task': 'waldur_mastermind.marketplace_remote.pull_invoices', 'schedule': timedelta(minutes=60), 'args': (), }, }
0261a0f9a1dde9f9f6167e3630561219e3dca124
statsmodels/datasets/__init__.py
statsmodels/datasets/__init__.py
""" Datasets module """ #__all__ = filter(lambda s:not s.startswith('_'),dir()) import anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, \ macrodata, nile, randhie, scotland, spector, stackloss, star98, \ strikes, sunspots, fair, heart, statecrime
""" Datasets module """ #__all__ = filter(lambda s:not s.startswith('_'),dir()) from . import (anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, macrodata, nile, randhie, scotland, spector, stackloss, star98, strikes, sunspots, fair, heart, statecrime)
Switch to relative imports and fix pep-8
STY: Switch to relative imports and fix pep-8
Python
bsd-3-clause
bsipocz/statsmodels,bsipocz/statsmodels,bsipocz/statsmodels,hlin117/statsmodels,bashtage/statsmodels,nguyentu1602/statsmodels,hlin117/statsmodels,musically-ut/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,wwf5067/statsmodels,bert9bert/statsmodels,nvoron23/statsmodels,bert9bert/statsmodels,astocko/statsmodels,jseabold/statsmodels,YihaoLu/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,bert9bert/statsmodels,kiyoto/statsmodels,astocko/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,alekz112/statsmodels,waynenilsen/statsmodels,phobson/statsmodels,rgommers/statsmodels,nguyentu1602/statsmodels,josef-pkt/statsmodels,gef756/statsmodels,Averroes/statsmodels,phobson/statsmodels,statsmodels/statsmodels,yl565/statsmodels,statsmodels/statsmodels,adammenges/statsmodels,wdurhamh/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,adammenges/statsmodels,jstoxrocky/statsmodels,alekz112/statsmodels,rgommers/statsmodels,wwf5067/statsmodels,waynenilsen/statsmodels,huongttlan/statsmodels,bavardage/statsmodels,yarikoptic/pystatsmodels,jstoxrocky/statsmodels,musically-ut/statsmodels,ChadFulton/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,huongttlan/statsmodels,hainm/statsmodels,bashtage/statsmodels,bzero/statsmodels,wzbozon/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,musically-ut/statsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,gef756/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,saketkc/statsmodels,josef-pkt/statsmodels,detrout/debian-statsmodels,astocko/statsmodels,wzbozon/statsmodels,yl565/statsmodels,adammenges/statsmodels,hlin117/statsmodels,detrout/debian-statsmodels,bzero/statsmodels,kiyoto/statsmodels,yl565/statsmodels,alekz112/statsmodels,bavardage/statsmodels,nvoron23/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,hainm/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,bsipocz/statsmodels,ChadFulton/statsmodels,wkfwkf/statsmodels,astocko/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,saketkc/statsmodels,musically-ut/statsmodels,jseabold/statsmodels,bavardage/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,bzero/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,wwf5067/statsmodels,wdurhamh/statsmodels,nvoron23/statsmodels,detrout/debian-statsmodels,edhuckle/statsmodels,jseabold/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,kiyoto/statsmodels,adammenges/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,DonBeo/statsmodels,bert9bert/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,hlin117/statsmodels,YihaoLu/statsmodels,bzero/statsmodels,phobson/statsmodels,nvoron23/statsmodels,Averroes/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,gef756/statsmodels,bzero/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,detrout/debian-statsmodels,wzbozon/statsmodels,phobson/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,gef756/statsmodels,wwf5067/statsmodels,wkfwkf/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,wzbozon/statsmodels,bavardage/statsmodels,waynenilsen/statsmodels,cbmoore/statsmodels,cbmoore/statsmodels,jstoxrocky/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,kiyoto/statsmodels,saketkc/statsmodels,yarikoptic/pystatsmodels,wdurhamh/statsmodels,gef756/statsmodels,rgommers/statsmodels,yarikoptic/pystatsmodels
""" Datasets module """ #__all__ = filter(lambda s:not s.startswith('_'),dir()) from . import (anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, macrodata, nile, randhie, scotland, spector, stackloss, star98, strikes, sunspots, fair, heart, statecrime)
STY: Switch to relative imports and fix pep-8 """ Datasets module """ #__all__ = filter(lambda s:not s.startswith('_'),dir()) import anes96, cancer, committee, ccard, copper, cpunish, elnino, grunfeld, longley, \ macrodata, nile, randhie, scotland, spector, stackloss, star98, \ strikes, sunspots, fair, heart, statecrime
421dbe962dae44cad7aa734a397cb16fe9b1632f
reactive/datanode.py
reactive/datanode.py
from charms.reactive import when, when_not, set_state, remove_state from charms.hadoop import get_hadoop_base from jujubigdata.handlers import HDFS from jujubigdata import utils @when('namenode.ready') @when_not('datanode.started') def start_datanode(namenode): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.configure_datanode(namenode.namenodes()[0], namenode.port()) utils.install_ssh_key('hdfs', namenode.ssh_key()) utils.update_kv_hosts(namenode.hosts_map()) utils.manage_etc_hosts() hdfs.start_datanode() hadoop.open_ports('datanode') set_state('datanode.started') @when('datanode.started') @when_not('namenode.ready') def stop_datanode(): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.stop_datanode() hadoop.close_ports('datanode') remove_state('datanode.started')
from charms.reactive import when, when_not, set_state, remove_state from charms.layer.hadoop_base import get_hadoop_base from jujubigdata.handlers import HDFS from jujubigdata import utils @when('namenode.ready') @when_not('datanode.started') def start_datanode(namenode): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.configure_datanode(namenode.namenodes()[0], namenode.port()) utils.install_ssh_key('hdfs', namenode.ssh_key()) utils.update_kv_hosts(namenode.hosts_map()) utils.manage_etc_hosts() hdfs.start_datanode() hadoop.open_ports('datanode') set_state('datanode.started') @when('datanode.started') @when_not('namenode.ready') def stop_datanode(): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.stop_datanode() hadoop.close_ports('datanode') remove_state('datanode.started')
Update charms.hadoop reference to follow convention
Update charms.hadoop reference to follow convention
Python
apache-2.0
johnsca/layer-apache-hadoop-datanode,juju-solutions/layer-apache-hadoop-datanode
from charms.reactive import when, when_not, set_state, remove_state from charms.layer.hadoop_base import get_hadoop_base from jujubigdata.handlers import HDFS from jujubigdata import utils @when('namenode.ready') @when_not('datanode.started') def start_datanode(namenode): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.configure_datanode(namenode.namenodes()[0], namenode.port()) utils.install_ssh_key('hdfs', namenode.ssh_key()) utils.update_kv_hosts(namenode.hosts_map()) utils.manage_etc_hosts() hdfs.start_datanode() hadoop.open_ports('datanode') set_state('datanode.started') @when('datanode.started') @when_not('namenode.ready') def stop_datanode(): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.stop_datanode() hadoop.close_ports('datanode') remove_state('datanode.started')
Update charms.hadoop reference to follow convention from charms.reactive import when, when_not, set_state, remove_state from charms.hadoop import get_hadoop_base from jujubigdata.handlers import HDFS from jujubigdata import utils @when('namenode.ready') @when_not('datanode.started') def start_datanode(namenode): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.configure_datanode(namenode.namenodes()[0], namenode.port()) utils.install_ssh_key('hdfs', namenode.ssh_key()) utils.update_kv_hosts(namenode.hosts_map()) utils.manage_etc_hosts() hdfs.start_datanode() hadoop.open_ports('datanode') set_state('datanode.started') @when('datanode.started') @when_not('namenode.ready') def stop_datanode(): hadoop = get_hadoop_base() hdfs = HDFS(hadoop) hdfs.stop_datanode() hadoop.close_ports('datanode') remove_state('datanode.started')
17f4e610e272d24c9178e43caa79a6f7c17a568b
tests/test_orderbook.py
tests/test_orderbook.py
from src import orderbook as ob def test_create_msg_incrementing_message_id(): first_message = ob.create_msg() second_message = ob.create_msg() assert first_message['message-id'] == 0, 'Expected 0, got {}'.format(first_message['message-id']) assert second_message['message-id'] == 1, 'Expected 1, got {}'.format(second_message['message-id']) def test_create_msg(): message = ob.create_msg() assert type(message) == dict def test_create_msg_passing_options(): options = { 'hello': 'world', } message = ob.create_msg(options=options) assert 'hello' in message assert message['hello'] == 'world' def test_create_msg_passing_options_overriding_default(): options = { 'id': 1234, } message = ob.create_msg(options=options) assert 'id' in message assert message['id'] == 1234 def test_create_ask(): import datetime ask = ob.create_ask(1, 1, datetime.datetime.now()) assert ask['type'] == 'ask' assert ask['price'] == 1 assert ask['quantity'] == 1 assert len(ob.offers) == 1
Add some basic testing for orderbook.create_msg.
Add some basic testing for orderbook.create_msg.
Python
mit
Tribler/decentral-market
from src import orderbook as ob def test_create_msg_incrementing_message_id(): first_message = ob.create_msg() second_message = ob.create_msg() assert first_message['message-id'] == 0, 'Expected 0, got {}'.format(first_message['message-id']) assert second_message['message-id'] == 1, 'Expected 1, got {}'.format(second_message['message-id']) def test_create_msg(): message = ob.create_msg() assert type(message) == dict def test_create_msg_passing_options(): options = { 'hello': 'world', } message = ob.create_msg(options=options) assert 'hello' in message assert message['hello'] == 'world' def test_create_msg_passing_options_overriding_default(): options = { 'id': 1234, } message = ob.create_msg(options=options) assert 'id' in message assert message['id'] == 1234 def test_create_ask(): import datetime ask = ob.create_ask(1, 1, datetime.datetime.now()) assert ask['type'] == 'ask' assert ask['price'] == 1 assert ask['quantity'] == 1 assert len(ob.offers) == 1
Add some basic testing for orderbook.create_msg.
c8a41bbf11538dbc17de12e32ba5af5e93fd0b2c
src/utils/plugins.py
src/utils/plugins.py
from utils import models class Plugin: plugin_name = None display_name = None description = None author = None short_name = None stage = None manager_url = None version = None janeway_version = None is_workflow_plugin = False jump_url = None handshake_url = None article_pk_in_handshake_url = False press_wide = False kanban_card = '{plugin_name}/kanban_card.html'.format( plugin_name=plugin_name, ) @classmethod def install(cls): plugin, created = cls.get_or_create_plugin_object() if not created and plugin.version != cls.version: plugin.version = cls.version plugin.save() return plugin, created @classmethod def hook_registry(cls): pass @classmethod def get_or_create_plugin_object(cls): plugin, created = models.Plugin.objects.get_or_create( name=cls.short_name, display_name=cls.display_name, press_wide=cls.press_wide, defaults={'version': cls.version, 'enabled': True}, ) return plugin, created
from utils import models class Plugin: plugin_name = None display_name = None description = None author = None short_name = None stage = None manager_url = None version = None janeway_version = None is_workflow_plugin = False jump_url = None handshake_url = None article_pk_in_handshake_url = False press_wide = False kanban_card = '{plugin_name}/kanban_card.html'.format( plugin_name=plugin_name, ) @classmethod def install(cls): plugin, created = cls.get_or_create_plugin_object() if not created and plugin.version != cls.version: print('Plugin updated: {0} -> {1}'.format(cls.version, plugin.version)) plugin.version = cls.version plugin.save() return plugin, created @classmethod def hook_registry(cls): pass @classmethod def get_or_create_plugin_object(cls): plugin, created = models.Plugin.objects.get_or_create( name=cls.short_name, defaults={ 'display_name': cls.display_name, 'version': cls.version, 'enabled': True, 'press_wide': cls.press_wide, }, ) return plugin, created @classmethod def get_self(cls): try: plugin = models.Plugin.objects.get( name=cls.short_name, ) except models.Plugin.MultipleObjectsReturned: plugin = models.Plugin.objects.filter( name=cls.short_name, ).order_by( '-version' ).first() except models.Plugin.DoesNotExist: return None return plugin
Add get_self and change get_or_create to avoid mis-creation.
Add get_self and change get_or_create to avoid mis-creation.
Python
agpl-3.0
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
from utils import models class Plugin: plugin_name = None display_name = None description = None author = None short_name = None stage = None manager_url = None version = None janeway_version = None is_workflow_plugin = False jump_url = None handshake_url = None article_pk_in_handshake_url = False press_wide = False kanban_card = '{plugin_name}/kanban_card.html'.format( plugin_name=plugin_name, ) @classmethod def install(cls): plugin, created = cls.get_or_create_plugin_object() if not created and plugin.version != cls.version: print('Plugin updated: {0} -> {1}'.format(cls.version, plugin.version)) plugin.version = cls.version plugin.save() return plugin, created @classmethod def hook_registry(cls): pass @classmethod def get_or_create_plugin_object(cls): plugin, created = models.Plugin.objects.get_or_create( name=cls.short_name, defaults={ 'display_name': cls.display_name, 'version': cls.version, 'enabled': True, 'press_wide': cls.press_wide, }, ) return plugin, created @classmethod def get_self(cls): try: plugin = models.Plugin.objects.get( name=cls.short_name, ) except models.Plugin.MultipleObjectsReturned: plugin = models.Plugin.objects.filter( name=cls.short_name, ).order_by( '-version' ).first() except models.Plugin.DoesNotExist: return None return plugin
Add get_self and change get_or_create to avoid mis-creation. from utils import models class Plugin: plugin_name = None display_name = None description = None author = None short_name = None stage = None manager_url = None version = None janeway_version = None is_workflow_plugin = False jump_url = None handshake_url = None article_pk_in_handshake_url = False press_wide = False kanban_card = '{plugin_name}/kanban_card.html'.format( plugin_name=plugin_name, ) @classmethod def install(cls): plugin, created = cls.get_or_create_plugin_object() if not created and plugin.version != cls.version: plugin.version = cls.version plugin.save() return plugin, created @classmethod def hook_registry(cls): pass @classmethod def get_or_create_plugin_object(cls): plugin, created = models.Plugin.objects.get_or_create( name=cls.short_name, display_name=cls.display_name, press_wide=cls.press_wide, defaults={'version': cls.version, 'enabled': True}, ) return plugin, created
424aa401806ddf536b9bc75efb1493561a5c2a5b
product/views.py
product/views.py
from django.views.generic import DetailView, ListView from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.urls import reverse_lazy from django.shortcuts import render from django.contrib.messages.views import SuccessMessageMixin from product.models import ProductCategory # Create your views here. class ProductCategoryList(ListView): model = ProductCategory context_object_name = 'product_categories' class ProductCategoryDetail(DetailView): model = ProductCategory context_object_name = 'product_category' class ProductCategoryCreate(CreateView, SuccessMessageMixin): model = ProductCategory fields = ['name'] success_message = "Category %(name)s created" class ProductCategoryUpdate(UpdateView, SuccessMessageMixin): model = ProductCategory fields = ['name'] success_message = "Category %(name)s updated" class ProductCategoryDelete(DeleteView, SuccessMessageMixin): model = ProductCategory context_object_name = 'product_category' success_url = reverse_lazy('product-category-list') success_message = "Category %(name)s removed"
from django.http import HttpResponseRedirect from django.views.generic import DetailView, ListView from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.urls import reverse_lazy from django.shortcuts import render from django.contrib import messages from django.contrib.messages.views import SuccessMessageMixin from product.models import ProductCategory # Create your views here. class ProductCategoryList(ListView): model = ProductCategory context_object_name = 'product_categories' class ProductCategoryDetail(DetailView): model = ProductCategory context_object_name = 'product_category' class ProductCategoryCreate(SuccessMessageMixin, CreateView): model = ProductCategory fields = ['name'] success_message = "Category %(name)s created" class ProductCategoryUpdate(SuccessMessageMixin, UpdateView): model = ProductCategory fields = ['name'] success_message = "Category %(name)s updated" class ProductCategoryDelete(DeleteView): model = ProductCategory context_object_name = 'product_category' success_url = reverse_lazy('product-category-list') success_message = "Category removed" cancel_message = "Removal cancelled" def post(self, request, *args, **kwargs): if "cancel" in request.POST: url = self.success_url messages.warning(self.request, self.cancel_message) return HttpResponseRedirect(url) else: messages.success(self.request, self.success_message) return super(ProductCategoryDelete, self).delete(request, *args, **kwargs)
Make messages show in delete view.
Make messages show in delete view.
Python
mit
borderitsolutions/amadaa,borderitsolutions/amadaa,borderitsolutions/amadaa
from django.http import HttpResponseRedirect from django.views.generic import DetailView, ListView from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.urls import reverse_lazy from django.shortcuts import render from django.contrib import messages from django.contrib.messages.views import SuccessMessageMixin from product.models import ProductCategory # Create your views here. class ProductCategoryList(ListView): model = ProductCategory context_object_name = 'product_categories' class ProductCategoryDetail(DetailView): model = ProductCategory context_object_name = 'product_category' class ProductCategoryCreate(SuccessMessageMixin, CreateView): model = ProductCategory fields = ['name'] success_message = "Category %(name)s created" class ProductCategoryUpdate(SuccessMessageMixin, UpdateView): model = ProductCategory fields = ['name'] success_message = "Category %(name)s updated" class ProductCategoryDelete(DeleteView): model = ProductCategory context_object_name = 'product_category' success_url = reverse_lazy('product-category-list') success_message = "Category removed" cancel_message = "Removal cancelled" def post(self, request, *args, **kwargs): if "cancel" in request.POST: url = self.success_url messages.warning(self.request, self.cancel_message) return HttpResponseRedirect(url) else: messages.success(self.request, self.success_message) return super(ProductCategoryDelete, self).delete(request, *args, **kwargs)
Make messages show in delete view. from django.views.generic import DetailView, ListView from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.urls import reverse_lazy from django.shortcuts import render from django.contrib.messages.views import SuccessMessageMixin from product.models import ProductCategory # Create your views here. class ProductCategoryList(ListView): model = ProductCategory context_object_name = 'product_categories' class ProductCategoryDetail(DetailView): model = ProductCategory context_object_name = 'product_category' class ProductCategoryCreate(CreateView, SuccessMessageMixin): model = ProductCategory fields = ['name'] success_message = "Category %(name)s created" class ProductCategoryUpdate(UpdateView, SuccessMessageMixin): model = ProductCategory fields = ['name'] success_message = "Category %(name)s updated" class ProductCategoryDelete(DeleteView, SuccessMessageMixin): model = ProductCategory context_object_name = 'product_category' success_url = reverse_lazy('product-category-list') success_message = "Category %(name)s removed"
2b8869bb508f4fb67867385f3058372bde664ca5
CheckProxy/CheckProxy.py
CheckProxy/CheckProxy.py
import discord import requests from discord.ext import commands class checkproxy: """Cog for proxy checking""" def __init__(self, bot): self.bot = bot @commands.command(pass_context=True) async def checkproxy(self, ctx, proxy): """Checks the provided proxy.""" p = proxy pr = { 'http': p, 'https': p } try: r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr) if r.status_code == 200: await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.') if r.status_code == 403: await self.bot.say(':x: 403 Forbidden, proxy is banned.') except requests.exceptions.RequestException as e: await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.') if not ctx.message.channel.is_private: await self.bot.delete_message(ctx.message) def setup(bot): bot.add_cog(checkproxy(bot))
import discord import requests from discord.ext import commands class checkproxy: """Cog for proxy checking""" def __init__(self, bot): self.bot = bot @commands.command(pass_context=True) async def checkproxy(self, ctx, proxy): """Checks the provided proxy.""" p = proxy pr = { 'http': p, 'https': p } try: r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5) if r.status_code == 200: await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.') if r.status_code == 403: await self.bot.say(':x: 403 Forbidden, proxy is banned.') except requests.exceptions.timeout: await self.bot.say(':x: Timed out checking proxy.') except requests.exceptions.RequestException as e: await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.') if not ctx.message.channel.is_private: await self.bot.delete_message(ctx.message) def setup(bot): bot.add_cog(checkproxy(bot))
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
Python
agpl-3.0
FrostTheFox/RocketMap-cogs
import discord import requests from discord.ext import commands class checkproxy: """Cog for proxy checking""" def __init__(self, bot): self.bot = bot @commands.command(pass_context=True) async def checkproxy(self, ctx, proxy): """Checks the provided proxy.""" p = proxy pr = { 'http': p, 'https': p } try: r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5) if r.status_code == 200: await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.') if r.status_code == 403: await self.bot.say(':x: 403 Forbidden, proxy is banned.') except requests.exceptions.timeout: await self.bot.say(':x: Timed out checking proxy.') except requests.exceptions.RequestException as e: await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.') if not ctx.message.channel.is_private: await self.bot.delete_message(ctx.message) def setup(bot): bot.add_cog(checkproxy(bot))
Add 5s timeout to checkproxy (in an effort to prevent bot hanging import discord import requests from discord.ext import commands class checkproxy: """Cog for proxy checking""" def __init__(self, bot): self.bot = bot @commands.command(pass_context=True) async def checkproxy(self, ctx, proxy): """Checks the provided proxy.""" p = proxy pr = { 'http': p, 'https': p } try: r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr) if r.status_code == 200: await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.') if r.status_code == 403: await self.bot.say(':x: 403 Forbidden, proxy is banned.') except requests.exceptions.RequestException as e: await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.') if not ctx.message.channel.is_private: await self.bot.delete_message(ctx.message) def setup(bot): bot.add_cog(checkproxy(bot))
6448691ed77be2fd74761e056eeb5f16a881fd54
test_settings.py
test_settings.py
from foundry.settings import * # We cannot use ssqlite or spatialite because it cannot handle the 'distinct' # in admin.py. DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'competition', 'USER': 'test', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } SOUTH_TESTS_MIGRATE = False
from foundry.settings import * DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'competition', 'USER': 'test', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } # Need this last line until django-setuptest is improved.
Adjust test settings to be in line with jmbo-skeleton
Adjust test settings to be in line with jmbo-skeleton
Python
bsd-3-clause
praekelt/jmbo-competition,praekelt/jmbo-competition
from foundry.settings import * DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'competition', 'USER': 'test', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } # Need this last line until django-setuptest is improved.
Adjust test settings to be in line with jmbo-skeleton from foundry.settings import * # We cannot use ssqlite or spatialite because it cannot handle the 'distinct' # in admin.py. DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'competition', 'USER': 'test', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } SOUTH_TESTS_MIGRATE = False
f9343573e2d1b3f3960c99eb90b5df7aad1573ca
python/problem4.py
python/problem4.py
for i in range(10, 100): for j in range(i, 100): n = i * j if str(n) == str(n)[::-1]: print "{} x {} = {}".format(i, j, n) for i in range(100, 1000): for j in range(i, 1000): n = i * j if str(n) == str(n)[::-1]: print "{} x {} = {}".format(i, j, n)
Solve problem 4 with python.
Solve problem 4 with python.
Python
mit
a-suenami/challenge-project-euler,a-suenami/challenge-project-euler
for i in range(10, 100): for j in range(i, 100): n = i * j if str(n) == str(n)[::-1]: print "{} x {} = {}".format(i, j, n) for i in range(100, 1000): for j in range(i, 1000): n = i * j if str(n) == str(n)[::-1]: print "{} x {} = {}".format(i, j, n)
Solve problem 4 with python.
202fba50c287d3df99b22a4f30a96a3d8d9c8141
tests/test_pypi.py
tests/test_pypi.py
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
Update test after adding cleaning of dist
test: Update test after adding cleaning of dist
Python
mit
relekang/python-semantic-release,relekang/python-semantic-release
from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('rm -rf build dist'), mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
test: Update test after adding cleaning of dist from unittest import TestCase from semantic_release.pypi import upload_to_pypi from . import mock class PypiTests(TestCase): @mock.patch('semantic_release.pypi.run') def test_upload_without_arguments(self, mock_run): upload_to_pypi(username='username', password='password') self.assertEqual( mock_run.call_args_list, [ mock.call('python setup.py sdist bdist_wheel'), mock.call('twine upload -u username -p password dist/*'), mock.call('rm -rf build dist') ] )
925864a916e5c06b58cac1caa3f2bac5907bbbd3
grader/grader/grade/__init__.py
grader/grader/grade/__init__.py
'''TODO: Grade package docs ''' from grader.grade.main import grade from docker import Client help = "Grade assignments" def setup_parser(parser): parser.add_argument('folder', metavar='folder', help='Folder of tarballs or assignment folders.') parser.add_argument('--image', default='5201', help='Docker image for assignments.') #NOTE: This could be done with volumes. Is that better..? parser.add_argument('--extra', default=None, help='Extra files to copy into container (tarball).') parser.add_argument('--force', action='store_true', default=False, help='Force removal of conflicting containers ' 'even if their image doesn\'t match.') parser.set_defaults(run=run) def run(args): # Connect up with docker cli = Client(base_url='unix://var/run/docker.sock') grade(args, cli)
'''TODO: Grade package docs ''' from grader.grade.main import grade from docker import Client help = "Grade assignments" def setup_parser(parser): parser.add_argument('folder', metavar='folder', help='Folder of tarballs or assignment folders.') parser.add_argument('--image', default='5201', help='Docker image for assignments.') # NOTE: This could be done with volumes. Is that better..? parser.add_argument('--extra', default=None, help='Extra files to copy into container (tarball).') parser.add_argument('--force', action='store_true', default=False, help='Force removal of conflicting containers ' 'even if their image doesn\'t match.') parser.set_defaults(run=run) def run(args): # Connect up with docker cli = Client(base_url='unix://var/run/docker.sock') grade(args, cli)
Fix a flake style issue
Fix a flake style issue
Python
mit
redkyn/grader,grade-it/grader,redkyn/grader
'''TODO: Grade package docs ''' from grader.grade.main import grade from docker import Client help = "Grade assignments" def setup_parser(parser): parser.add_argument('folder', metavar='folder', help='Folder of tarballs or assignment folders.') parser.add_argument('--image', default='5201', help='Docker image for assignments.') # NOTE: This could be done with volumes. Is that better..? parser.add_argument('--extra', default=None, help='Extra files to copy into container (tarball).') parser.add_argument('--force', action='store_true', default=False, help='Force removal of conflicting containers ' 'even if their image doesn\'t match.') parser.set_defaults(run=run) def run(args): # Connect up with docker cli = Client(base_url='unix://var/run/docker.sock') grade(args, cli)
Fix a flake style issue '''TODO: Grade package docs ''' from grader.grade.main import grade from docker import Client help = "Grade assignments" def setup_parser(parser): parser.add_argument('folder', metavar='folder', help='Folder of tarballs or assignment folders.') parser.add_argument('--image', default='5201', help='Docker image for assignments.') #NOTE: This could be done with volumes. Is that better..? parser.add_argument('--extra', default=None, help='Extra files to copy into container (tarball).') parser.add_argument('--force', action='store_true', default=False, help='Force removal of conflicting containers ' 'even if their image doesn\'t match.') parser.set_defaults(run=run) def run(args): # Connect up with docker cli = Client(base_url='unix://var/run/docker.sock') grade(args, cli)
2c7b64ea22ebe7e35945550f5726a72a721213b4
minitests/litex/src.yosys/missing_bit_report.py
minitests/litex/src.yosys/missing_bit_report.py
""" Generates a missing feature/bit report for LiteX design. This script is fairly fragile, because it depends on the specific observation that all of the remaining bits appear to either belong to HCLK_IOI or IOI3 tiles. A more general version of this script could be created, but that was not the point of this script. """ from fasm import parse_fasm_filename def main(): fasm_file = 'top.fasm' fasm_model = list(parse_fasm_filename(fasm_file)) unknown_bits = { 'HCLK_IOI': {}, 'IOI3': {}, } total_unknown = 0 for l in fasm_model: if l.annotations is None: continue annotations = {} for annotation in l.annotations: annotations[annotation.name] = annotation.value if 'unknown_bit' not in annotations: continue total_unknown += 1 frame, word, bit = annotations['unknown_bit'].split('_') frame = int(frame, 16) word = int(word) bit = int(bit) frame_offset = frame % 0x80 base_frame = frame - frame_offset # All remaining LiteX bits appear to be in this one IO bank, so limit # the tool this this one IO bank. assert base_frame == 0x00401580, hex(frame) SIZE = 4 INITIAL_OFFSET = -2 if word == 50: group = 'HCLK_IOI' offset = 50 elif word < 50: group = 'IOI3' offset = ((word - INITIAL_OFFSET) // SIZE) * SIZE + INITIAL_OFFSET else: group = 'IOI3' word -= 1 offset = ((word - INITIAL_OFFSET) // SIZE) * SIZE + INITIAL_OFFSET offset += 1 word += 1 bit = '{}_{:02d}'.format( frame_offset, (word - offset) * 32 + bit, ) if bit not in unknown_bits[group]: unknown_bits[group][bit] = 0 unknown_bits[group][bit] += 1 print('Total unknown bits: {}'.format(total_unknown)) for group in unknown_bits: print('Group {} (count = {}):'.format(group, len(unknown_bits[group]))) for bit in sorted(unknown_bits[group]): print(' {} (count = {})'.format(bit, unknown_bits[group][bit])) if __name__ == "__main__": main()
Create script for generating remaining bit report.
Create script for generating remaining bit report. This report is fairly fragile, but works well enough for the remaining LiteX bits. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
Python
isc
SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray
""" Generates a missing feature/bit report for LiteX design. This script is fairly fragile, because it depends on the specific observation that all of the remaining bits appear to either belong to HCLK_IOI or IOI3 tiles. A more general version of this script could be created, but that was not the point of this script. """ from fasm import parse_fasm_filename def main(): fasm_file = 'top.fasm' fasm_model = list(parse_fasm_filename(fasm_file)) unknown_bits = { 'HCLK_IOI': {}, 'IOI3': {}, } total_unknown = 0 for l in fasm_model: if l.annotations is None: continue annotations = {} for annotation in l.annotations: annotations[annotation.name] = annotation.value if 'unknown_bit' not in annotations: continue total_unknown += 1 frame, word, bit = annotations['unknown_bit'].split('_') frame = int(frame, 16) word = int(word) bit = int(bit) frame_offset = frame % 0x80 base_frame = frame - frame_offset # All remaining LiteX bits appear to be in this one IO bank, so limit # the tool this this one IO bank. assert base_frame == 0x00401580, hex(frame) SIZE = 4 INITIAL_OFFSET = -2 if word == 50: group = 'HCLK_IOI' offset = 50 elif word < 50: group = 'IOI3' offset = ((word - INITIAL_OFFSET) // SIZE) * SIZE + INITIAL_OFFSET else: group = 'IOI3' word -= 1 offset = ((word - INITIAL_OFFSET) // SIZE) * SIZE + INITIAL_OFFSET offset += 1 word += 1 bit = '{}_{:02d}'.format( frame_offset, (word - offset) * 32 + bit, ) if bit not in unknown_bits[group]: unknown_bits[group][bit] = 0 unknown_bits[group][bit] += 1 print('Total unknown bits: {}'.format(total_unknown)) for group in unknown_bits: print('Group {} (count = {}):'.format(group, len(unknown_bits[group]))) for bit in sorted(unknown_bits[group]): print(' {} (count = {})'.format(bit, unknown_bits[group][bit])) if __name__ == "__main__": main()
Create script for generating remaining bit report. This report is fairly fragile, but works well enough for the remaining LiteX bits. Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
429bd22a98895252dfb993d770c9b3060fef0fe3
tests/runalldoctests.py
tests/runalldoctests.py
import doctest import glob import pkg_resources try: pkg_resources.require('OWSLib') except (ImportError, pkg_resources.DistributionNotFound): pass testfiles = glob.glob('*.txt') for file in testfiles: doctest.testfile(file)
import doctest import getopt import glob import sys import pkg_resources try: pkg_resources.require('OWSLib') except (ImportError, pkg_resources.DistributionNotFound): pass def run(pattern): if pattern is None: testfiles = glob.glob('*.txt') else: testfiles = glob.glob(pattern) for file in testfiles: doctest.testfile(file) if __name__ == "__main__": try: opts, args = getopt.getopt(sys.argv[1:], "t:v") except getopt.GetoptError: print "Usage: python runalldoctests.py [-t GLOB_PATTERN]" sys.exit(2) pattern = None for o, a in opts: if o == '-t': pattern = a run(pattern)
Add option to pick single test file from the runner
Add option to pick single test file from the runner
Python
bsd-3-clause
datagovuk/OWSLib,kwilcox/OWSLib,QuLogic/OWSLib,KeyproOy/OWSLib,tomkralidis/OWSLib,menegon/OWSLib,datagovuk/OWSLib,datagovuk/OWSLib,dblodgett-usgs/OWSLib,ocefpaf/OWSLib,mbertrand/OWSLib,gfusca/OWSLib,jaygoldfinch/OWSLib,daf/OWSLib,JuergenWeichand/OWSLib,bird-house/OWSLib,geographika/OWSLib,kalxas/OWSLib,Jenselme/OWSLib,robmcmullen/OWSLib,geopython/OWSLib,jachym/OWSLib,daf/OWSLib,daf/OWSLib,b-cube/OWSLib,jaygoldfinch/OWSLib
import doctest import getopt import glob import sys import pkg_resources try: pkg_resources.require('OWSLib') except (ImportError, pkg_resources.DistributionNotFound): pass def run(pattern): if pattern is None: testfiles = glob.glob('*.txt') else: testfiles = glob.glob(pattern) for file in testfiles: doctest.testfile(file) if __name__ == "__main__": try: opts, args = getopt.getopt(sys.argv[1:], "t:v") except getopt.GetoptError: print "Usage: python runalldoctests.py [-t GLOB_PATTERN]" sys.exit(2) pattern = None for o, a in opts: if o == '-t': pattern = a run(pattern)
Add option to pick single test file from the runner import doctest import glob import pkg_resources try: pkg_resources.require('OWSLib') except (ImportError, pkg_resources.DistributionNotFound): pass testfiles = glob.glob('*.txt') for file in testfiles: doctest.testfile(file)
affad020348ca8aa6a7b9431811d707ab8f6d99a
pyramid/__init__.py
pyramid/__init__.py
# -*- coding: utf-8 -*- # # Author: Taylor Smith <[email protected]> # # The pyramid module __version__ = "0.7.0-dev" try: # this var is injected in the setup build to enable # the retrieval of the version number without actually # importing the un-built submodules. __PYRAMID_SETUP__ except NameError: __PYRAMID_SETUP__ = False if __PYRAMID_SETUP__: import sys import os sys.stderr.write('Partial import of pyramid during the build process.' + os.linesep) else: # check that the build completed properly. This prints an informative # message in the case that any of the C code was not properly compiled. from . import __check_build __all__ = [ 'arima', 'compat', 'datasets', 'utils' ] def setup_module(module): import numpy as np import random _random_seed = int(np.random.uniform() * (2 ** 31 - 1)) np.random.seed(_random_seed) random.seed(_random_seed)
# -*- coding: utf-8 -*- # # Author: Taylor Smith <[email protected]> # # The pyramid module __version__ = "0.7.0" try: # this var is injected in the setup build to enable # the retrieval of the version number without actually # importing the un-built submodules. __PYRAMID_SETUP__ except NameError: __PYRAMID_SETUP__ = False if __PYRAMID_SETUP__: import sys import os sys.stderr.write('Partial import of pyramid during the build process.' + os.linesep) else: # check that the build completed properly. This prints an informative # message in the case that any of the C code was not properly compiled. from . import __check_build __all__ = [ 'arima', 'compat', 'datasets', 'utils' ] def setup_module(module): import numpy as np import random _random_seed = int(np.random.uniform() * (2 ** 31 - 1)) np.random.seed(_random_seed) random.seed(_random_seed)
Bump version for v0.7.0 release
Bump version for v0.7.0 release
Python
mit
tgsmith61591/pyramid,alkaline-ml/pmdarima,tgsmith61591/pyramid,tgsmith61591/pyramid,alkaline-ml/pmdarima,alkaline-ml/pmdarima
# -*- coding: utf-8 -*- # # Author: Taylor Smith <[email protected]> # # The pyramid module __version__ = "0.7.0" try: # this var is injected in the setup build to enable # the retrieval of the version number without actually # importing the un-built submodules. __PYRAMID_SETUP__ except NameError: __PYRAMID_SETUP__ = False if __PYRAMID_SETUP__: import sys import os sys.stderr.write('Partial import of pyramid during the build process.' + os.linesep) else: # check that the build completed properly. This prints an informative # message in the case that any of the C code was not properly compiled. from . import __check_build __all__ = [ 'arima', 'compat', 'datasets', 'utils' ] def setup_module(module): import numpy as np import random _random_seed = int(np.random.uniform() * (2 ** 31 - 1)) np.random.seed(_random_seed) random.seed(_random_seed)
Bump version for v0.7.0 release # -*- coding: utf-8 -*- # # Author: Taylor Smith <[email protected]> # # The pyramid module __version__ = "0.7.0-dev" try: # this var is injected in the setup build to enable # the retrieval of the version number without actually # importing the un-built submodules. __PYRAMID_SETUP__ except NameError: __PYRAMID_SETUP__ = False if __PYRAMID_SETUP__: import sys import os sys.stderr.write('Partial import of pyramid during the build process.' + os.linesep) else: # check that the build completed properly. This prints an informative # message in the case that any of the C code was not properly compiled. from . import __check_build __all__ = [ 'arima', 'compat', 'datasets', 'utils' ] def setup_module(module): import numpy as np import random _random_seed = int(np.random.uniform() * (2 ** 31 - 1)) np.random.seed(_random_seed) random.seed(_random_seed)
33524fe8cad5f8bf4448c7dd7426d1e1452bb324
example_of_usage.py
example_of_usage.py
# ----------------------------------------------------------------------------- # Created: 04.03.2014 # Copyright: (c) Josua Schmid 2014 # Licence: GPLv3 # # Sample script for parsing HTML tables # ----------------------------------------------------------------------------- import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'http://www.twitter.com' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) pprint(p.tables) if __name__ == '__main__': main()
# ----------------------------------------------------------------------------- # Created: 04.03.2014 # Copyright: (c) Josua Schmid 2014 # Licence: AGPLv3 # # Sample script for parsing HTML tables # ----------------------------------------------------------------------------- import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'http://www.twitter.com' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) pprint(p.tables) if __name__ == '__main__': main()
Change license according to project license
Change license according to project license
Python
agpl-3.0
schmijos/html-table-parser-python3,schmijos/html-table-parser-python3
# ----------------------------------------------------------------------------- # Created: 04.03.2014 # Copyright: (c) Josua Schmid 2014 # Licence: AGPLv3 # # Sample script for parsing HTML tables # ----------------------------------------------------------------------------- import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'http://www.twitter.com' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) pprint(p.tables) if __name__ == '__main__': main()
Change license according to project license # ----------------------------------------------------------------------------- # Created: 04.03.2014 # Copyright: (c) Josua Schmid 2014 # Licence: GPLv3 # # Sample script for parsing HTML tables # ----------------------------------------------------------------------------- import urllib.request from pprint import pprint from html_table_parser import HTMLTableParser def url_get_contents(url): """ Opens a website and read its binary contents (HTTP Response Body) """ req = urllib.request.Request(url=url) f = urllib.request.urlopen(req) return f.read() def main(): url = 'http://www.twitter.com' xhtml = url_get_contents(url).decode('utf-8') p = HTMLTableParser() p.feed(xhtml) pprint(p.tables) if __name__ == '__main__': main()
8fc4713375c4eadd83ec376c3e839d921c39b5dc
src/encoded/predicates.py
src/encoded/predicates.py
from pyramid.security import has_permission def includeme(config): config.add_view_predicate('subpath_segments', SubpathSegmentsPredicate) config.add_view_predicate('additional_permission', AdditionalPermissionPredicate) class SubpathSegmentsPredicate(object): def __init__(self, val, config): self.val = val def text(self): return 'subpath_segments = %r' % self.val phash = text def __call__(self, context, request): return len(request.subpath) == self.val class AdditionalPermissionPredicate(object): def __init__(self, val, config): self.val = val def text(self): return 'additional_permission = %r' % self.val phash = text def __call__(self, context, request): return has_permission(self.val, context, request)
from pyramid.security import has_permission def includeme(config): config.add_view_predicate('subpath_segments', SubpathSegmentsPredicate) config.add_view_predicate('additional_permission', AdditionalPermissionPredicate) class SubpathSegmentsPredicate(object): def __init__(self, val, config): if isinstance(val, int): val = (val,) self.val = frozenset(val) def text(self): return 'subpath_segments in %r' % sorted(self.val) phash = text def __call__(self, context, request): return len(request.subpath) in self.val class AdditionalPermissionPredicate(object): def __init__(self, val, config): self.val = val def text(self): return 'additional_permission = %r' % self.val phash = text def __call__(self, context, request): return has_permission(self.val, context, request)
Allow specification of multiple subpath_segments
Allow specification of multiple subpath_segments
Python
mit
4dn-dcic/fourfront,ClinGen/clincoded,kidaa/encoded,T2DREAM/t2dream-portal,philiptzou/clincoded,hms-dbmi/fourfront,philiptzou/clincoded,4dn-dcic/fourfront,4dn-dcic/fourfront,ENCODE-DCC/encoded,ENCODE-DCC/encoded,ClinGen/clincoded,T2DREAM/t2dream-portal,kidaa/encoded,ENCODE-DCC/snovault,ENCODE-DCC/snovault,ClinGen/clincoded,ENCODE-DCC/encoded,kidaa/encoded,T2DREAM/t2dream-portal,kidaa/encoded,4dn-dcic/fourfront,philiptzou/clincoded,ENCODE-DCC/snovault,ClinGen/clincoded,hms-dbmi/fourfront,hms-dbmi/fourfront,kidaa/encoded,hms-dbmi/fourfront,philiptzou/clincoded,philiptzou/clincoded,hms-dbmi/fourfront,ENCODE-DCC/snovault,ClinGen/clincoded,T2DREAM/t2dream-portal,ENCODE-DCC/snovault,ENCODE-DCC/encoded
from pyramid.security import has_permission def includeme(config): config.add_view_predicate('subpath_segments', SubpathSegmentsPredicate) config.add_view_predicate('additional_permission', AdditionalPermissionPredicate) class SubpathSegmentsPredicate(object): def __init__(self, val, config): if isinstance(val, int): val = (val,) self.val = frozenset(val) def text(self): return 'subpath_segments in %r' % sorted(self.val) phash = text def __call__(self, context, request): return len(request.subpath) in self.val class AdditionalPermissionPredicate(object): def __init__(self, val, config): self.val = val def text(self): return 'additional_permission = %r' % self.val phash = text def __call__(self, context, request): return has_permission(self.val, context, request)
Allow specification of multiple subpath_segments from pyramid.security import has_permission def includeme(config): config.add_view_predicate('subpath_segments', SubpathSegmentsPredicate) config.add_view_predicate('additional_permission', AdditionalPermissionPredicate) class SubpathSegmentsPredicate(object): def __init__(self, val, config): self.val = val def text(self): return 'subpath_segments = %r' % self.val phash = text def __call__(self, context, request): return len(request.subpath) == self.val class AdditionalPermissionPredicate(object): def __init__(self, val, config): self.val = val def text(self): return 'additional_permission = %r' % self.val phash = text def __call__(self, context, request): return has_permission(self.val, context, request)
7b5de280562f5984b04c63432de8f28e03b57cbd
firecares/firestation/migrations/0020_update_greeley_headquarters_location.py
firecares/firestation/migrations/0020_update_greeley_headquarters_location.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib.gis.geos import Point from django.db import models, migrations from genericm2m.utils import monkey_patch class Migration(migrations.Migration): dependencies = [ ('firestation', '0019_assign-station-number-2'), ('usgs', '0003_auto_20151105_2156') ] def update_greeley_headquarters_location(apps, schema_editor): FD = apps.get_model("firestation", "firedepartment") IP = apps.get_model("usgs", "IncorporatedPlace") # Have to patch this in since RelatedObjectsDescriptor won't be attached monkey_patch(FD, 'government_unit') greeley = IP.objects.filter(place_name='Greeley', state_name='Colorado').first() fd = FD.objects.filter(id=97668).first() if fd: fd.headquarters_address.geom = Point(-104.694001, 40.426638) fd.headquarters_address.save() fd.geom = greeley.geom fd.government_unit.connect(greeley) fd.population = greeley.population fd.save() operations = [ migrations.RunPython(update_greeley_headquarters_location) ]
Move Union Colony Fire rescue authority to correct location
Move Union Colony Fire rescue authority to correct location
Python
mit
HunterConnelly/firecares,HunterConnelly/firecares,HunterConnelly/firecares,meilinger/firecares,FireCARES/firecares,FireCARES/firecares,HunterConnelly/firecares,FireCARES/firecares,FireCARES/firecares,meilinger/firecares,meilinger/firecares,meilinger/firecares,FireCARES/firecares
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.contrib.gis.geos import Point from django.db import models, migrations from genericm2m.utils import monkey_patch class Migration(migrations.Migration): dependencies = [ ('firestation', '0019_assign-station-number-2'), ('usgs', '0003_auto_20151105_2156') ] def update_greeley_headquarters_location(apps, schema_editor): FD = apps.get_model("firestation", "firedepartment") IP = apps.get_model("usgs", "IncorporatedPlace") # Have to patch this in since RelatedObjectsDescriptor won't be attached monkey_patch(FD, 'government_unit') greeley = IP.objects.filter(place_name='Greeley', state_name='Colorado').first() fd = FD.objects.filter(id=97668).first() if fd: fd.headquarters_address.geom = Point(-104.694001, 40.426638) fd.headquarters_address.save() fd.geom = greeley.geom fd.government_unit.connect(greeley) fd.population = greeley.population fd.save() operations = [ migrations.RunPython(update_greeley_headquarters_location) ]
Move Union Colony Fire rescue authority to correct location
95e1d4c2ec42f09fddf48c5a32f0fe409132380b
lab/monitors/nova_service_list.py
lab/monitors/nova_service_list.py
def start(lab, log, args): import time from fabric.context_managers import shell_env grep_host = args.get('grep_host', 'overcloud-') duration = args['duration'] period = args['period'] statuses = {'up': 1, 'down': 0} server = lab.director() start_time = time.time() while start_time + duration > time.time(): with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant): res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True) results = [line.split() for line in res.split('\n')] msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results]) log.info('{1}'.format(grep_host, msg)) time.sleep(period)
def start(lab, log, args): from fabric.context_managers import shell_env grep_host = args.get('grep_host', 'overcloud-') statuses = {'up': 1, 'down': 0} server = lab.director() with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant): res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True) results = [line.split() for line in res.split('\n')] msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results]) log.info('{1}'.format(grep_host, msg))
Verify services status if FI is rebooted
Verify services status if FI is rebooted Change-Id: Ia02ef16d53fbb7b55a8de884ff16a4bef345a1f2
Python
apache-2.0
CiscoSystems/os-sqe,CiscoSystems/os-sqe,CiscoSystems/os-sqe
def start(lab, log, args): from fabric.context_managers import shell_env grep_host = args.get('grep_host', 'overcloud-') statuses = {'up': 1, 'down': 0} server = lab.director() with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant): res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True) results = [line.split() for line in res.split('\n')] msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results]) log.info('{1}'.format(grep_host, msg))
Verify services status if FI is rebooted Change-Id: Ia02ef16d53fbb7b55a8de884ff16a4bef345a1f2 def start(lab, log, args): import time from fabric.context_managers import shell_env grep_host = args.get('grep_host', 'overcloud-') duration = args['duration'] period = args['period'] statuses = {'up': 1, 'down': 0} server = lab.director() start_time = time.time() while start_time + duration > time.time(): with shell_env(OS_AUTH_URL=lab.cloud.end_point, OS_USERNAME=lab.cloud.user, OS_PASSWORD=lab.cloud.password, OS_TENANT_NAME=lab.cloud.tenant): res = server.run("nova service-list | grep {0} | awk '{{print $4 \" \" $6 \" \" $12}}'".format(grep_host), warn_only=True) results = [line.split() for line in res.split('\n')] msg = ' '.join(['{1}:{0}={2}'.format(r[0], r[1], statuses[r[2]]) for r in results]) log.info('{1}'.format(grep_host, msg)) time.sleep(period)
eb33d70bfda4857fbd76616cf3bf7fb7d7feec71
spoj/00005/palin.py
spoj/00005/palin.py
#!/usr/bin/env python3 def next_palindrome(k): palin = list(k) n = len(k) mid = n // 2 # case 1: forward right just_copy = False for i in range(mid, n): mirrored = n - 1 - i if k[i] < k[mirrored]: just_copy = True if just_copy: palin[i] = palin[mirrored] # case 2: backward left if not just_copy: i = (n - 1) // 2 while i >= 0 and k[i] == '9': i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) for j in range(i + 1, mid): palin[j] = '0' for j in range(mid, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else: # case 3: "99...9" -> "100..01" palin = ['0'] * (n + 1) palin[0] = palin[-1] = '1' return ''.join(palin) if __name__ == '__main__': t = int(input()) for _ in range(t): k = input() print(next_palindrome(k))
#!/usr/bin/env python3 def next_palindrome(k): palin = list(k) n = len(k) mid = n // 2 # case 1: forward right just_copy = False for i in range(mid, n): mirrored = n - 1 - i if k[i] < k[mirrored]: just_copy = True if just_copy: palin[i] = palin[mirrored] # case 2: backward left if not just_copy: i = (n - 1) // 2 while i >= 0 and k[i] == '9': i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) for j in range(i + 1, (n + 1) // 2): palin[j] = '0' for j in range((n + 1) // 2, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else: # case 3: "99...9" -> "100..01" palin = ['0'] * (n + 1) palin[0] = palin[-1] = '1' return ''.join(palin) if __name__ == '__main__': t = int(input()) for _ in range(t): k = input() print(next_palindrome(k))
Fix bug in ranges (to middle)
Fix bug in ranges (to middle) - in SPOJ palin Signed-off-by: Karel Ha <[email protected]>
Python
mit
mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming,mathemage/CompetitiveProgramming
#!/usr/bin/env python3 def next_palindrome(k): palin = list(k) n = len(k) mid = n // 2 # case 1: forward right just_copy = False for i in range(mid, n): mirrored = n - 1 - i if k[i] < k[mirrored]: just_copy = True if just_copy: palin[i] = palin[mirrored] # case 2: backward left if not just_copy: i = (n - 1) // 2 while i >= 0 and k[i] == '9': i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) for j in range(i + 1, (n + 1) // 2): palin[j] = '0' for j in range((n + 1) // 2, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else: # case 3: "99...9" -> "100..01" palin = ['0'] * (n + 1) palin[0] = palin[-1] = '1' return ''.join(palin) if __name__ == '__main__': t = int(input()) for _ in range(t): k = input() print(next_palindrome(k))
Fix bug in ranges (to middle) - in SPOJ palin Signed-off-by: Karel Ha <[email protected]> #!/usr/bin/env python3 def next_palindrome(k): palin = list(k) n = len(k) mid = n // 2 # case 1: forward right just_copy = False for i in range(mid, n): mirrored = n - 1 - i if k[i] < k[mirrored]: just_copy = True if just_copy: palin[i] = palin[mirrored] # case 2: backward left if not just_copy: i = (n - 1) // 2 while i >= 0 and k[i] == '9': i -= 1 if i >= 0: palin[i] = str(int(k[i]) + 1) for j in range(i + 1, mid): palin[j] = '0' for j in range(mid, n): mirrored = n - 1 - j palin[j] = palin[mirrored] else: # case 3: "99...9" -> "100..01" palin = ['0'] * (n + 1) palin[0] = palin[-1] = '1' return ''.join(palin) if __name__ == '__main__': t = int(input()) for _ in range(t): k = input() print(next_palindrome(k))
4bc31e675659af54ee26fe5df16a0ee3ebeb5947
firefed/__main__.py
firefed/__main__.py
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
Add default argument for profile
Add default argument for profile
Python
mit
numirias/firefed
import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, default='default') parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
Add default argument for profile import argparse import os import re from firefed import Firefed from feature import feature_map, Summary def feature_type(val): try: return feature_map()[val] except KeyError as key: raise argparse.ArgumentTypeError( 'Feature %s not found. Choose from: {%s}' % (key, ', '.join(feature_map()))) def profile_dir(dirname): if dirname is None: dirname = 'default' if os.path.isdir(dirname): return dirname if re.match('^[\\w-]+$', dirname): home = os.path.expanduser('~/.mozilla/firefox') profile_names = os.listdir(home) for name in profile_names: if name.endswith('.%s' % dirname): return os.path.join(home, name) raise argparse.ArgumentTypeError('Profile %s not found.' % dirname) def main(): parser = argparse.ArgumentParser( 'firefed', description= 'Firefed is a Firefox profile analyzer focusing on privacy and security.', ) parser.add_argument( '-p', '--profile', help='profile name or directory', type=profile_dir, required=True) parser.add_argument( '-f', '--feature', type=feature_type, default=Summary, help='{%s}' % ', '.join(feature_map())) parser.add_argument( '-s', '--summarize', action='store_true', help='summarize results') args = parser.parse_args() Firefed(args) if __name__ == '__main__': main()
faae5df8648afbfa5921bd67a7f3e082ba626a95
poyo/__init__.py
poyo/__init__.py
# -*- coding: utf-8 -*- __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.1.0' from .parser import parse_string __all__ = ['parse_string']
# -*- coding: utf-8 -*- from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.1.0' __all__ = ['parse_string']
Move module level import to top of file
Move module level import to top of file
Python
mit
hackebrot/poyo
# -*- coding: utf-8 -*- from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.1.0' __all__ = ['parse_string']
Move module level import to top of file # -*- coding: utf-8 -*- __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.1.0' from .parser import parse_string __all__ = ['parse_string']
4604cf73a45e8bcecf38238366cfdac37cdb7897
pyfr/readers/base.py
pyfr/readers/base.py
# -*- coding: utf-8 -*- import re import uuid import itertools as it from abc import ABCMeta, abstractmethod import numpy as np class BaseReader(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): pass @abstractmethod def _to_raw_pyfrm(self): pass def _optimize(self, mesh): # Sort interior interfaces for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh): mesh[f] = mesh[f][:,np.argsort(mesh[f][0])] def to_pyfrm(self): mesh = self._to_raw_pyfrm() # Perform some simple optimizations on the mesh self._optimize(mesh) # Add metadata mesh['mesh_uuid'] = str(uuid.uuid4()) return mesh
# -*- coding: utf-8 -*- import re import uuid from abc import ABCMeta, abstractmethod import numpy as np class BaseReader(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): pass @abstractmethod def _to_raw_pyfrm(self): pass def _optimize(self, mesh): # Sort interior interfaces for f in filter(lambda f: re.match(r'^con_p\d+$', f), mesh): mesh[f] = mesh[f][:,np.argsort(mesh[f][0])] def to_pyfrm(self): mesh = self._to_raw_pyfrm() # Perform some simple optimizations on the mesh self._optimize(mesh) # Add metadata mesh['mesh_uuid'] = str(uuid.uuid4()) return mesh
Fix a bug in the mesh optimizer.
Fix a bug in the mesh optimizer.
Python
bsd-3-clause
iyer-arvind/PyFR,tjcorona/PyFR,tjcorona/PyFR,BrianVermeire/PyFR,Aerojspark/PyFR,tjcorona/PyFR
# -*- coding: utf-8 -*- import re import uuid from abc import ABCMeta, abstractmethod import numpy as np class BaseReader(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): pass @abstractmethod def _to_raw_pyfrm(self): pass def _optimize(self, mesh): # Sort interior interfaces for f in filter(lambda f: re.match(r'^con_p\d+$', f), mesh): mesh[f] = mesh[f][:,np.argsort(mesh[f][0])] def to_pyfrm(self): mesh = self._to_raw_pyfrm() # Perform some simple optimizations on the mesh self._optimize(mesh) # Add metadata mesh['mesh_uuid'] = str(uuid.uuid4()) return mesh
Fix a bug in the mesh optimizer. # -*- coding: utf-8 -*- import re import uuid import itertools as it from abc import ABCMeta, abstractmethod import numpy as np class BaseReader(object): __metaclass__ = ABCMeta @abstractmethod def __init__(self): pass @abstractmethod def _to_raw_pyfrm(self): pass def _optimize(self, mesh): # Sort interior interfaces for f in it.ifilter(lambda f: re.match('con_p\d+', f), mesh): mesh[f] = mesh[f][:,np.argsort(mesh[f][0])] def to_pyfrm(self): mesh = self._to_raw_pyfrm() # Perform some simple optimizations on the mesh self._optimize(mesh) # Add metadata mesh['mesh_uuid'] = str(uuid.uuid4()) return mesh
e1240aa33b286ba52507128458fc6d6b3b68dfb3
statsmodels/stats/multicomp.py
statsmodels/stats/multicomp.py
# -*- coding: utf-8 -*- """ Created on Fri Mar 30 18:27:25 2012 Author: Josef Perktold """ from statsmodels.sandbox.stats.multicomp import MultiComparison def pairwise_tukeyhsd(endog, groups, alpha=0.05): '''calculate all pairwise comparisons with TukeyHSD confidence intervals this is just a wrapper around tukeyhsd method of MultiComparison Parameters ---------- endog : ndarray, float, 1d response variable groups : ndarray, 1d array with groups, can be string or integers alpha : float significance level for the test Returns ------- results : TukeyHSDResults instance A results class containing relevant data and some post-hoc calculations See Also -------- MultiComparison tukeyhsd statsmodels.sandbox.stats.multicomp.TukeyHSDResults ''' return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
# -*- coding: utf-8 -*- """ Created on Fri Mar 30 18:27:25 2012 Author: Josef Perktold """ from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison def pairwise_tukeyhsd(endog, groups, alpha=0.05): '''calculate all pairwise comparisons with TukeyHSD confidence intervals this is just a wrapper around tukeyhsd method of MultiComparison Parameters ---------- endog : ndarray, float, 1d response variable groups : ndarray, 1d array with groups, can be string or integers alpha : float significance level for the test Returns ------- results : TukeyHSDResults instance A results class containing relevant data and some post-hoc calculations See Also -------- MultiComparison tukeyhsd statsmodels.sandbox.stats.multicomp.TukeyHSDResults ''' return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
Put back an import that my IDE incorrectly flagged as unused
Put back an import that my IDE incorrectly flagged as unused
Python
bsd-3-clause
gef756/statsmodels,detrout/debian-statsmodels,detrout/debian-statsmodels,bzero/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,musically-ut/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,rgommers/statsmodels,hlin117/statsmodels,ChadFulton/statsmodels,edhuckle/statsmodels,hainm/statsmodels,musically-ut/statsmodels,gef756/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,jseabold/statsmodels,jstoxrocky/statsmodels,adammenges/statsmodels,waynenilsen/statsmodels,bzero/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,huongttlan/statsmodels,alekz112/statsmodels,adammenges/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,yl565/statsmodels,phobson/statsmodels,alekz112/statsmodels,wzbozon/statsmodels,huongttlan/statsmodels,saketkc/statsmodels,hainm/statsmodels,hlin117/statsmodels,kiyoto/statsmodels,YihaoLu/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,wzbozon/statsmodels,jseabold/statsmodels,gef756/statsmodels,phobson/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,rgommers/statsmodels,astocko/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,bzero/statsmodels,jseabold/statsmodels,bert9bert/statsmodels,hlin117/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,wkfwkf/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,wdurhamh/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,nguyentu1602/statsmodels,kiyoto/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,detrout/debian-statsmodels,hainm/statsmodels,wwf5067/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,bert9bert/statsmodels,alekz112/statsmodels,nguyentu1602/statsmodels,hainm/statsmodels,bert9bert/statsmodels,yl565/statsmodels,nguyentu1602/statsmodels,Averroes/statsmodels,bzero/statsmodels,kiyoto/statsmodels,wwf5067/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,Averroes/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,nvoron23/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,Averroes/statsmodels,astocko/statsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,bzero/statsmodels,wzbozon/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,hlin117/statsmodels,alekz112/statsmodels,astocko/statsmodels,musically-ut/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,adammenges/statsmodels,yl565/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,rgommers/statsmodels,astocko/statsmodels,bashtage/statsmodels,phobson/statsmodels,Averroes/statsmodels,huongttlan/statsmodels,yl565/statsmodels,jstoxrocky/statsmodels,wkfwkf/statsmodels,bsipocz/statsmodels,cbmoore/statsmodels,gef756/statsmodels,wwf5067/statsmodels,jstoxrocky/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,bsipocz/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,detrout/debian-statsmodels,yl565/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels
# -*- coding: utf-8 -*- """ Created on Fri Mar 30 18:27:25 2012 Author: Josef Perktold """ from statsmodels.sandbox.stats.multicomp import tukeyhsd, MultiComparison def pairwise_tukeyhsd(endog, groups, alpha=0.05): '''calculate all pairwise comparisons with TukeyHSD confidence intervals this is just a wrapper around tukeyhsd method of MultiComparison Parameters ---------- endog : ndarray, float, 1d response variable groups : ndarray, 1d array with groups, can be string or integers alpha : float significance level for the test Returns ------- results : TukeyHSDResults instance A results class containing relevant data and some post-hoc calculations See Also -------- MultiComparison tukeyhsd statsmodels.sandbox.stats.multicomp.TukeyHSDResults ''' return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
Put back an import that my IDE incorrectly flagged as unused # -*- coding: utf-8 -*- """ Created on Fri Mar 30 18:27:25 2012 Author: Josef Perktold """ from statsmodels.sandbox.stats.multicomp import MultiComparison def pairwise_tukeyhsd(endog, groups, alpha=0.05): '''calculate all pairwise comparisons with TukeyHSD confidence intervals this is just a wrapper around tukeyhsd method of MultiComparison Parameters ---------- endog : ndarray, float, 1d response variable groups : ndarray, 1d array with groups, can be string or integers alpha : float significance level for the test Returns ------- results : TukeyHSDResults instance A results class containing relevant data and some post-hoc calculations See Also -------- MultiComparison tukeyhsd statsmodels.sandbox.stats.multicomp.TukeyHSDResults ''' return MultiComparison(endog, groups).tukeyhsd(alpha=alpha)
c1785e0713a5af6b849baaa1b314a13ac777f3f5
tests/test_str_py3.py
tests/test_str_py3.py
from os import SEEK_SET from random import choice, seed from string import ascii_uppercase, digits import fastavro from fastavro.compat import BytesIO letters = ascii_uppercase + digits id_size = 100 seed('str_py3') # Repeatable results def gen_id(): return ''.join(choice(letters) for _ in range(id_size)) keys = ['first', 'second', 'third', 'fourth'] testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)] schema = { "fields": [{'name': key, 'type': 'string'} for key in keys], "namespace": "namespace", "name": "zerobyte", "type": "record" } def test_str_py3(): buf = BytesIO() fastavro.writer(buf, schema, testdata) buf.seek(0, SEEK_SET) for i, rec in enumerate(fastavro.iter_avro(buf), 1): pass size = len(testdata) assert i == size, 'bad number of records' assert rec == testdata[-1], 'bad last record' if __name__ == '__main__': test_str_py3()
# -*- coding: utf-8 -*- """Python3 string tests for fastavro""" from __future__ import absolute_import from os import SEEK_SET from random import choice, seed from string import ascii_uppercase, digits try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO import fastavro letters = ascii_uppercase + digits id_size = 100 seed('str_py3') # Repeatable results def gen_id(): return ''.join(choice(letters) for _ in range(id_size)) keys = ['first', 'second', 'third', 'fourth'] testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)] schema = { "fields": [{'name': key, 'type': 'string'} for key in keys], "namespace": "namespace", "name": "zerobyte", "type": "record" } def test_str_py3(): buf = BytesIO() fastavro.writer(buf, schema, testdata) buf.seek(0, SEEK_SET) for i, rec in enumerate(fastavro.iter_avro(buf), 1): pass size = len(testdata) assert i == size, 'bad number of records' assert rec == testdata[-1], 'bad last record' if __name__ == '__main__': test_str_py3()
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually.
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually.
Python
mit
e-heller/fastavro,e-heller/fastavro
# -*- coding: utf-8 -*- """Python3 string tests for fastavro""" from __future__ import absolute_import from os import SEEK_SET from random import choice, seed from string import ascii_uppercase, digits try: from cStringIO import StringIO as BytesIO except ImportError: from io import BytesIO import fastavro letters = ascii_uppercase + digits id_size = 100 seed('str_py3') # Repeatable results def gen_id(): return ''.join(choice(letters) for _ in range(id_size)) keys = ['first', 'second', 'third', 'fourth'] testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)] schema = { "fields": [{'name': key, 'type': 'string'} for key in keys], "namespace": "namespace", "name": "zerobyte", "type": "record" } def test_str_py3(): buf = BytesIO() fastavro.writer(buf, schema, testdata) buf.seek(0, SEEK_SET) for i, rec in enumerate(fastavro.iter_avro(buf), 1): pass size = len(testdata) assert i == size, 'bad number of records' assert rec == testdata[-1], 'bad last record' if __name__ == '__main__': test_str_py3()
Test files shouldn't import 'fastavro.compat'. Just import BytesIO manually. from os import SEEK_SET from random import choice, seed from string import ascii_uppercase, digits import fastavro from fastavro.compat import BytesIO letters = ascii_uppercase + digits id_size = 100 seed('str_py3') # Repeatable results def gen_id(): return ''.join(choice(letters) for _ in range(id_size)) keys = ['first', 'second', 'third', 'fourth'] testdata = [dict((key, gen_id()) for key in keys) for _ in range(50)] schema = { "fields": [{'name': key, 'type': 'string'} for key in keys], "namespace": "namespace", "name": "zerobyte", "type": "record" } def test_str_py3(): buf = BytesIO() fastavro.writer(buf, schema, testdata) buf.seek(0, SEEK_SET) for i, rec in enumerate(fastavro.iter_avro(buf), 1): pass size = len(testdata) assert i == size, 'bad number of records' assert rec == testdata[-1], 'bad last record' if __name__ == '__main__': test_str_py3()
a08604f85b82300a4a3b4f2e70f91c3ee129859b
openelex/tests/test_fetch.py
openelex/tests/test_fetch.py
from unittest import TestCase from openelex.base.fetch import ErrorHandlingURLopener, HTTPError class TestErrorHandlingURLopener(TestCase): def setUp(self): self.opener = ErrorHandlingURLopener() def test_404(self): url = "http://example.com/test.csv" self.assertRaises(HTTPError, self.opener.retrieve, url)
Add missing test for ErrorHandlingURLopener
Add missing test for ErrorHandlingURLopener I forgot to add this with commit df9cfda52e952bb4d69cc0ee724e713cd6f468d5
Python
mit
cathydeng/openelections-core,datamade/openelections-core,openelections/openelections-core,datamade/openelections-core,openelections/openelections-core,cathydeng/openelections-core
from unittest import TestCase from openelex.base.fetch import ErrorHandlingURLopener, HTTPError class TestErrorHandlingURLopener(TestCase): def setUp(self): self.opener = ErrorHandlingURLopener() def test_404(self): url = "http://example.com/test.csv" self.assertRaises(HTTPError, self.opener.retrieve, url)
Add missing test for ErrorHandlingURLopener I forgot to add this with commit df9cfda52e952bb4d69cc0ee724e713cd6f468d5
ff4c49b9d89d4f92804ce1d827015072b6b60b7b
addons/sale_margin/__init__.py
addons/sale_margin/__init__.py
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from functools import partial import openerp from openerp import api, SUPERUSER_ID from . import models # noqa from . import report # noqa def uninstall_hook(cr, registry): def recreate_view(dbname): db_registry = openerp.modules.registry.Registry.new(dbname) with api.Environment.manage(), db_registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) if 'sale.report' in env: env['sale.report'].init() cr.after("commit", partial(recreate_view, cr.dbname))
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from functools import partial import odoo from odoo import api, SUPERUSER_ID from . import models # noqa from . import report # noqa def uninstall_hook(cr, registry): def recreate_view(dbname): db_registry = odoo.modules.registry.Registry.new(dbname) with api.Environment.manage(), db_registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) if 'sale.report' in env: env['sale.report'].init() cr.after("commit", partial(recreate_view, cr.dbname))
Use odoo instead of openerp
[IMP] sale_margin: Use odoo instead of openerp Closes #23451
Python
agpl-3.0
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from functools import partial import odoo from odoo import api, SUPERUSER_ID from . import models # noqa from . import report # noqa def uninstall_hook(cr, registry): def recreate_view(dbname): db_registry = odoo.modules.registry.Registry.new(dbname) with api.Environment.manage(), db_registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) if 'sale.report' in env: env['sale.report'].init() cr.after("commit", partial(recreate_view, cr.dbname))
[IMP] sale_margin: Use odoo instead of openerp Closes #23451 # -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from functools import partial import openerp from openerp import api, SUPERUSER_ID from . import models # noqa from . import report # noqa def uninstall_hook(cr, registry): def recreate_view(dbname): db_registry = openerp.modules.registry.Registry.new(dbname) with api.Environment.manage(), db_registry.cursor() as cr: env = api.Environment(cr, SUPERUSER_ID, {}) if 'sale.report' in env: env['sale.report'].init() cr.after("commit", partial(recreate_view, cr.dbname))
10a0d12f39760d2c2d57f66bc445f0cb87cde69f
django_website/aggregator/management/commands/mark_defunct_feeds.py
django_website/aggregator/management/commands/mark_defunct_feeds.py
import urllib2 from django.core.management.base import BaseCommand from django_website.apps.aggregator.models import Feed class Command(BaseCommand): """ Mark people with 404'ing feeds as defunct. """ def handle(self, *args, **kwargs): verbose = kwargs.get('verbosity') for f in Feed.objects.all(): try: socket.setdefaulttimeout(15) r = urllib2.urlopen(f.feed_url) except urllib2.HTTPError, e: if e.code == 404 or e.code == 500: if verbose: print "%s on %s; marking defunct" % (e.code, f) f.is_defunct = True f.save() else: raise
import urllib2 from django.core.management.base import BaseCommand from django_website.apps.aggregator.models import Feed class Command(BaseCommand): """ Mark people with 404'ing feeds as defunct. """ def handle(self, *args, **kwargs): verbose = kwargs.get('verbosity') for f in Feed.objects.all(): try: r = urllib2.urlopen(f.feed_url, timeout=15) except urllib2.HTTPError, e: if e.code == 404 or e.code == 500: if verbose: print "%s on %s; marking defunct" % (e.code, f) f.is_defunct = True f.save() else: raise
Set feed update timeouts in a more modern way.
Set feed update timeouts in a more modern way.
Python
bsd-3-clause
vxvinh1511/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,nanuxbe/django,django/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,gnarf/djangoproject.com,gnarf/djangoproject.com,rmoorman/djangoproject.com,gnarf/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,alawnchen/djangoproject.com,nanuxbe/django,khkaminska/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,xavierdutreilh/djangoproject.com,rmoorman/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,hassanabidpk/djangoproject.com
import urllib2 from django.core.management.base import BaseCommand from django_website.apps.aggregator.models import Feed class Command(BaseCommand): """ Mark people with 404'ing feeds as defunct. """ def handle(self, *args, **kwargs): verbose = kwargs.get('verbosity') for f in Feed.objects.all(): try: r = urllib2.urlopen(f.feed_url, timeout=15) except urllib2.HTTPError, e: if e.code == 404 or e.code == 500: if verbose: print "%s on %s; marking defunct" % (e.code, f) f.is_defunct = True f.save() else: raise
Set feed update timeouts in a more modern way. import urllib2 from django.core.management.base import BaseCommand from django_website.apps.aggregator.models import Feed class Command(BaseCommand): """ Mark people with 404'ing feeds as defunct. """ def handle(self, *args, **kwargs): verbose = kwargs.get('verbosity') for f in Feed.objects.all(): try: socket.setdefaulttimeout(15) r = urllib2.urlopen(f.feed_url) except urllib2.HTTPError, e: if e.code == 404 or e.code == 500: if verbose: print "%s on %s; marking defunct" % (e.code, f) f.is_defunct = True f.save() else: raise
6dfc5a3d7845633570b83aac06c47756292cf8ac
st2common/tests/unit/test_db_model_uids.py
st2common/tests/unit/test_db_model_uids.py
# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest2 from st2common.models.db.pack import PackDB from st2common.models.db.sensor import SensorTypeDB from st2common.models.db.action import ActionDB from st2common.models.db.rule import RuleDB from st2common.models.db.trigger import TriggerTypeDB from st2common.models.db.trigger import TriggerDB __all__ = [ 'DBModelUIDFieldTestCase' ] class DBModelUIDFieldTestCase(unittest2.TestCase): def test_get_uid(self): pack_db = PackDB(ref='ma_pack') self.assertEqual(pack_db.get_uid(), 'pack:ma_pack') sensor_type_db = SensorTypeDB(name='sname', pack='spack') self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname') action_db = ActionDB(name='aname', pack='apack', runner_info={}) self.assertEqual(action_db.get_uid(), 'action:apack:aname') rule_db = RuleDB(name='rname', pack='rpack') self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname') trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack') self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname') trigger_db = TriggerDB(name='tname', pack='tpack') self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
Add tests for get_uid() method for common DB models.
Add tests for get_uid() method for common DB models.
Python
apache-2.0
dennybaa/st2,StackStorm/st2,pixelrebel/st2,Itxaka/st2,Plexxi/st2,pixelrebel/st2,nzlosh/st2,punalpatel/st2,nzlosh/st2,Itxaka/st2,emedvedev/st2,dennybaa/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,peak6/st2,dennybaa/st2,StackStorm/st2,tonybaloney/st2,peak6/st2,StackStorm/st2,StackStorm/st2,armab/st2,alfasin/st2,nzlosh/st2,emedvedev/st2,Itxaka/st2,punalpatel/st2,armab/st2,pixelrebel/st2,alfasin/st2,lakshmi-kannan/st2,tonybaloney/st2,lakshmi-kannan/st2,emedvedev/st2,armab/st2,peak6/st2,nzlosh/st2,Plexxi/st2,lakshmi-kannan/st2,alfasin/st2,Plexxi/st2
# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest2 from st2common.models.db.pack import PackDB from st2common.models.db.sensor import SensorTypeDB from st2common.models.db.action import ActionDB from st2common.models.db.rule import RuleDB from st2common.models.db.trigger import TriggerTypeDB from st2common.models.db.trigger import TriggerDB __all__ = [ 'DBModelUIDFieldTestCase' ] class DBModelUIDFieldTestCase(unittest2.TestCase): def test_get_uid(self): pack_db = PackDB(ref='ma_pack') self.assertEqual(pack_db.get_uid(), 'pack:ma_pack') sensor_type_db = SensorTypeDB(name='sname', pack='spack') self.assertEqual(sensor_type_db.get_uid(), 'sensor_type:spack:sname') action_db = ActionDB(name='aname', pack='apack', runner_info={}) self.assertEqual(action_db.get_uid(), 'action:apack:aname') rule_db = RuleDB(name='rname', pack='rpack') self.assertEqual(rule_db.get_uid(), 'rule:rpack:rname') trigger_type_db = TriggerTypeDB(name='ttname', pack='ttpack') self.assertEqual(trigger_type_db.get_uid(), 'trigger_type:ttpack:ttname') trigger_db = TriggerDB(name='tname', pack='tpack') self.assertTrue(trigger_db.get_uid().startswith('trigger:tpack:tname:'))
Add tests for get_uid() method for common DB models.
c1dc3c503d09e95321fc6f3fe3d7ab114ff58fc9
patty/segmentation/pointCloudMeasurer.py
patty/segmentation/pointCloudMeasurer.py
import numpy as np from sklearn.decomposition import PCA def measureLength(pointCloud): """Returns the length of a point cloud in its longest direction.""" if len(pointCloud) == 0: return 0 pca = PCA(n_components = 1) pca.fit(np.asarray(pointCloud)) primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0] return np.max(primary_axis) - np.min(primary_axis)
import numpy as np from sklearn.decomposition import PCA def measureLength(pointCloud): """Returns the length of a point cloud in its longest direction.""" if len(pointCloud) == 0: return 0 pca = PCA(n_components = 1) pc_array = np.asarray(pointCloud) pca.fit(pc_array) primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0] return np.max(primary_axis) - np.min(primary_axis)
Make sure np.array is used for PCA in measureLength
Make sure np.array is used for PCA in measureLength
Python
apache-2.0
NLeSC/PattyAnalytics
import numpy as np from sklearn.decomposition import PCA def measureLength(pointCloud): """Returns the length of a point cloud in its longest direction.""" if len(pointCloud) == 0: return 0 pca = PCA(n_components = 1) pc_array = np.asarray(pointCloud) pca.fit(pc_array) primary_axis = np.dot(pc_array, np.transpose(pca.components_))[:,0] return np.max(primary_axis) - np.min(primary_axis)
Make sure np.array is used for PCA in measureLength import numpy as np from sklearn.decomposition import PCA def measureLength(pointCloud): """Returns the length of a point cloud in its longest direction.""" if len(pointCloud) == 0: return 0 pca = PCA(n_components = 1) pca.fit(np.asarray(pointCloud)) primary_axis = np.dot(pointCloud, np.transpose(pca.components_))[:,0] return np.max(primary_axis) - np.min(primary_axis)
44e5d35b6d43a22a480000b39a4e85335a27904b
corehq/apps/es/management/commands/wipe_es.py
corehq/apps/es/management/commands/wipe_es.py
from django.core.management import BaseCommand from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new class Command(BaseCommand): """ Wipe all data from BlobDB. """ def add_arguments(self, parser): parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, *args, **options): confirm_destructive_operation() data = wipe_es(options['commit']) if data: print(data) if not options['commit']: print("You need to run with --commit for the deletion to happen.") def wipe_es(commit=False): """ The equivalent of calling :: $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() if commit: return es.transport.perform_request('DELETE', '_all')
from django.core.management import BaseCommand from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new from corehq.util.es.elasticsearch import IndicesClient class Command(BaseCommand): """ Wipe all data from BlobDB. """ def add_arguments(self, parser): parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, *args, **options): confirm_destructive_operation() data = wipe_es(options['commit']) if data: print(data) if not options['commit']: print("You need to run with --commit for the deletion to happen.") def wipe_es(commit=False): """ The equivalent of calling :: $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() client = IndicesClient(es) if commit: client.delete('_all')
Use IndicesClient to get full URL
Use IndicesClient to get full URL
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from django.core.management import BaseCommand from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new from corehq.util.es.elasticsearch import IndicesClient class Command(BaseCommand): """ Wipe all data from BlobDB. """ def add_arguments(self, parser): parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, *args, **options): confirm_destructive_operation() data = wipe_es(options['commit']) if data: print(data) if not options['commit']: print("You need to run with --commit for the deletion to happen.") def wipe_es(commit=False): """ The equivalent of calling :: $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() client = IndicesClient(es) if commit: client.delete('_all')
Use IndicesClient to get full URL from django.core.management import BaseCommand from corehq.apps.cleanup.utils import confirm_destructive_operation from corehq.elastic import get_es_new class Command(BaseCommand): """ Wipe all data from BlobDB. """ def add_arguments(self, parser): parser.add_argument( '--commit', action='store_true', dest='commit', default=False, ) def handle(self, *args, **options): confirm_destructive_operation() data = wipe_es(options['commit']) if data: print(data) if not options['commit']: print("You need to run with --commit for the deletion to happen.") def wipe_es(commit=False): """ The equivalent of calling :: $ curl -X DELETE "$PROTO://$HOSTNAME:$PORT/_all" """ es = get_es_new() if commit: return es.transport.perform_request('DELETE', '_all')
2f1bcd83bf9069e5fc599aa20e1ed533bebd5e67
Detect_Face_Sides.py
Detect_Face_Sides.py
import numpy as np def get_leftside_average(self): """Return Array of Left Most Points.""" width = self.size[0] height = self.size[1] left_most_points = [] for row in range(height): for column in range(width): if image.getpixel(row, column) > 200: left_most_points.append(column) break return np.median(left_most_points)
import numpy as np def get_leftside_average(self): """Return the value of the Average of the left_most_points.""" width = self.size[0] height = self.size[1] left_most_points = [] for row in range(height): for column in range(width): if image.getpixel(row, column) > 200: left_most_points.append(row) break return np.median(left_most_points) def get_rightside_average(self): """Return the value of the average of the right_most_points.""" width = self.size[0] height = self.size[1] right_most_points = [] for row in range(height): for column in range(width, -1, -1): #Indices moving right to left if image.getpixel(row column) > 200: right_most_points.append(row) break return np.median(right_most_points)
Add get_rightside_face and Fix bug
Add get_rightside_face and Fix bug
Python
mit
anassinator/codejam-2014,anassinator/codejam
import numpy as np def get_leftside_average(self): """Return the value of the Average of the left_most_points.""" width = self.size[0] height = self.size[1] left_most_points = [] for row in range(height): for column in range(width): if image.getpixel(row, column) > 200: left_most_points.append(row) break return np.median(left_most_points) def get_rightside_average(self): """Return the value of the average of the right_most_points.""" width = self.size[0] height = self.size[1] right_most_points = [] for row in range(height): for column in range(width, -1, -1): #Indices moving right to left if image.getpixel(row column) > 200: right_most_points.append(row) break return np.median(right_most_points)
Add get_rightside_face and Fix bug import numpy as np def get_leftside_average(self): """Return Array of Left Most Points.""" width = self.size[0] height = self.size[1] left_most_points = [] for row in range(height): for column in range(width): if image.getpixel(row, column) > 200: left_most_points.append(column) break return np.median(left_most_points)
2c2c51d5fa0594aa2d160d28c15895ece358cafe
setup.py
setup.py
#!/usr/bin/env python3 from os import curdir, pardir from os.path import join from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup( name = "VapourSynth", description = "A frameserver for the 21st century", url = "http://www.vapoursynth.com/", download_url = "http://code.google.com/p/vapoursynth/", author = "Fredrik Mellbin", author_email = "[email protected]", license = "LGPL 2.1 or later", version = "1.0.0", long_description = "A portable replacement for Avisynth", platforms = "All", cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")], libraries = ["vapoursynth"], library_dirs = [curdir, "build"], include_dirs = [curdir, join("src", "cython")])] )
#!/usr/bin/env python3 from os import curdir, pardir from os.path import join from distutils.core import setup from Cython.Distutils import Extension, build_ext setup( name = "VapourSynth", description = "A frameserver for the 21st century", url = "http://www.vapoursynth.com/", download_url = "http://code.google.com/p/vapoursynth/", author = "Fredrik Mellbin", author_email = "[email protected]", license = "LGPL 2.1 or later", version = "1.0.0", long_description = "A portable replacement for Avisynth", platforms = "All", cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")], libraries = ["vapoursynth"], library_dirs = [curdir, "build"], include_dirs = [curdir, join("src", "cython")], cython_c_in_temp = 1)] )
Use the Cython Extension class so we can place generated C files in the build dir.
Use the Cython Extension class so we can place generated C files in the build dir. git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f
Python
lgpl-2.1
Kamekameha/vapoursynth,Kamekameha/vapoursynth,Kamekameha/vapoursynth,vapoursynth/vapoursynth,vapoursynth/vapoursynth,Kamekameha/vapoursynth,vapoursynth/vapoursynth,vapoursynth/vapoursynth
#!/usr/bin/env python3 from os import curdir, pardir from os.path import join from distutils.core import setup from Cython.Distutils import Extension, build_ext setup( name = "VapourSynth", description = "A frameserver for the 21st century", url = "http://www.vapoursynth.com/", download_url = "http://code.google.com/p/vapoursynth/", author = "Fredrik Mellbin", author_email = "[email protected]", license = "LGPL 2.1 or later", version = "1.0.0", long_description = "A portable replacement for Avisynth", platforms = "All", cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")], libraries = ["vapoursynth"], library_dirs = [curdir, "build"], include_dirs = [curdir, join("src", "cython")], cython_c_in_temp = 1)] )
Use the Cython Extension class so we can place generated C files in the build dir. git-svn-id: ac1113e4705722bd5ee69cef058b32c421e857b8@491 f9120d27-2007-6f97-8312-0f4ebfa7710f #!/usr/bin/env python3 from os import curdir, pardir from os.path import join from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext setup( name = "VapourSynth", description = "A frameserver for the 21st century", url = "http://www.vapoursynth.com/", download_url = "http://code.google.com/p/vapoursynth/", author = "Fredrik Mellbin", author_email = "[email protected]", license = "LGPL 2.1 or later", version = "1.0.0", long_description = "A portable replacement for Avisynth", platforms = "All", cmdclass = {'build_ext': build_ext}, ext_modules = [Extension("vapoursynth", [join("src", "cython", "vapoursynth.pyx")], libraries = ["vapoursynth"], library_dirs = [curdir, "build"], include_dirs = [curdir, join("src", "cython")])] )
dbb9becd09bbb9808060272b74e664afc354dfa8
modeltranslation/tests/settings.py
modeltranslation/tests/settings.py
# -*- coding: utf-8 -*- """ Settings overrided for test time """ from django.conf import settings INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ( 'modeltranslation.tests', ) LANGUAGES = (('de', 'Deutsch'), ('en', 'English')) LANGUAGE_CODE = 'de' MODELTRANSLATION_DEFAULT_LANGUAGE = 'de' USE_I18N = True USE_TZ = False MIDDLEWARE_CLASSES = () MODELTRANSLATION_AUTO_POPULATE = False MODELTRANSLATION_FALLBACK_LANGUAGES = () ROOT_URLCONF = 'modeltranslation.tests.urls' MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
# -*- coding: utf-8 -*- """ Settings overrided for test time """ import django from django.conf import settings INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ( 'modeltranslation.tests', ) LANGUAGES = (('de', 'Deutsch'), ('en', 'English')) LANGUAGE_CODE = 'de' MODELTRANSLATION_DEFAULT_LANGUAGE = 'de' USE_I18N = True USE_TZ = False MIDDLEWARE_CLASSES = () MODELTRANSLATION_AUTO_POPULATE = False MODELTRANSLATION_FALLBACK_LANGUAGES = () ROOT_URLCONF = 'modeltranslation.tests.urls' if django.VERSION < (1, 11): # TODO: Check what this was about MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'} else: MIGRATION_MODULES = {}
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again.
Python
bsd-3-clause
deschler/django-modeltranslation,deschler/django-modeltranslation
# -*- coding: utf-8 -*- """ Settings overrided for test time """ import django from django.conf import settings INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ( 'modeltranslation.tests', ) LANGUAGES = (('de', 'Deutsch'), ('en', 'English')) LANGUAGE_CODE = 'de' MODELTRANSLATION_DEFAULT_LANGUAGE = 'de' USE_I18N = True USE_TZ = False MIDDLEWARE_CLASSES = () MODELTRANSLATION_AUTO_POPULATE = False MODELTRANSLATION_FALLBACK_LANGUAGES = () ROOT_URLCONF = 'modeltranslation.tests.urls' if django.VERSION < (1, 11): # TODO: Check what this was about MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'} else: MIGRATION_MODULES = {}
Set empty MIGRATION_MODULES setting for Django 1.11. Should make the tests run again. # -*- coding: utf-8 -*- """ Settings overrided for test time """ from django.conf import settings INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ( 'modeltranslation.tests', ) LANGUAGES = (('de', 'Deutsch'), ('en', 'English')) LANGUAGE_CODE = 'de' MODELTRANSLATION_DEFAULT_LANGUAGE = 'de' USE_I18N = True USE_TZ = False MIDDLEWARE_CLASSES = () MODELTRANSLATION_AUTO_POPULATE = False MODELTRANSLATION_FALLBACK_LANGUAGES = () ROOT_URLCONF = 'modeltranslation.tests.urls' MIGRATION_MODULES = {'auth': 'modeltranslation.tests.auth_migrations'}
4bf26b6d976171b5a388134ad9716af639f15a3b
setup.py
setup.py
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README')).read() CHANGES = open(os.path.join(here, 'CHANGES')).read() requires = [ 'oauth2client', ] tests_require = [] testing_requires = tests_require + [ 'nose', 'coverage', ] develop_requires = [ ] setup(name='acctwatch', version='0.0', description='acctwatch', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", ], author='Bert JW Regeer', author_email='[email protected]', url='', keywords='', packages=find_packages(), include_package_data=True, zip_safe=False, test_suite='acctwatch.tests', install_requires=requires, tests_require=tests_require, extras_require = { 'develop': develop_requires, 'testing': testing_requires, }, entry_points="""\ [console_scripts] acctwatch = acctwatch.acctwatch:main """, )
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README')).read() CHANGES = open(os.path.join(here, 'CHANGES')).read() requires = [ 'oauth2client', 'google-api-python-client', ] tests_require = [] testing_requires = tests_require + [ 'nose', 'coverage', ] develop_requires = [ ] setup(name='acctwatch', version='0.0', description='acctwatch', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", ], author='Bert JW Regeer', author_email='[email protected]', url='', keywords='', packages=find_packages(), include_package_data=True, zip_safe=False, test_suite='acctwatch.tests', install_requires=requires, tests_require=tests_require, extras_require = { 'develop': develop_requires, 'testing': testing_requires, }, entry_points="""\ [console_scripts] acctwatch = acctwatch.acctwatch:main """, )
Add the Google API client as a requirement
Add the Google API client as a requirement
Python
isc
GuardedRisk/Google-Apps-Auditing
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README')).read() CHANGES = open(os.path.join(here, 'CHANGES')).read() requires = [ 'oauth2client', 'google-api-python-client', ] tests_require = [] testing_requires = tests_require + [ 'nose', 'coverage', ] develop_requires = [ ] setup(name='acctwatch', version='0.0', description='acctwatch', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", ], author='Bert JW Regeer', author_email='[email protected]', url='', keywords='', packages=find_packages(), include_package_data=True, zip_safe=False, test_suite='acctwatch.tests', install_requires=requires, tests_require=tests_require, extras_require = { 'develop': develop_requires, 'testing': testing_requires, }, entry_points="""\ [console_scripts] acctwatch = acctwatch.acctwatch:main """, )
Add the Google API client as a requirement import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README')).read() CHANGES = open(os.path.join(here, 'CHANGES')).read() requires = [ 'oauth2client', ] tests_require = [] testing_requires = tests_require + [ 'nose', 'coverage', ] develop_requires = [ ] setup(name='acctwatch', version='0.0', description='acctwatch', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", ], author='Bert JW Regeer', author_email='[email protected]', url='', keywords='', packages=find_packages(), include_package_data=True, zip_safe=False, test_suite='acctwatch.tests', install_requires=requires, tests_require=tests_require, extras_require = { 'develop': develop_requires, 'testing': testing_requires, }, entry_points="""\ [console_scripts] acctwatch = acctwatch.acctwatch:main """, )
72fb6ca12b685809bd5de0c5df9f051eef1163c4
test/TestBaseUtils.py
test/TestBaseUtils.py
import unittest import sys sys.path.append('../src') import BaseUtils class TestBaseUtils(unittest.TestCase): def test_word_segmenter(self): segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_word_segmenter_ignores_whitespace(self): segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_word_segmenter_ignores_special_chars(self): segments = BaseUtils.get_words('this is $$%%a random --00sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) if __name__ == '__main__': unittest.main()
''' Tests for BaseUtils ''' import unittest import sys sys.path.append('../src') import BaseUtils class TestBaseUtils(unittest.TestCase): ''' Main test class for the BaseUtils ''' def test_word_segmenter_with_empty(self): ''' For an empty string, the segmenter returns just an empty list ''' segments = BaseUtils.get_words('') self.assertEqual(segments, []) def test_word_segmenter(self): ''' The word segmenter returns the expected array of strings ''' segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_ignoring_whitespace(self): ''' Whitespace in the input string is ignored in the input string ''' segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_ignoring_special_chars(self): ''' If there are special characters in the input, they are ignored as well ''' segments = BaseUtils.get_words('this is $$%%a random --00sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) if __name__ == '__main__': unittest.main()
Add test for empty string; cleanup
Add test for empty string; cleanup
Python
bsd-2-clause
ambidextrousTx/RNLTK
''' Tests for BaseUtils ''' import unittest import sys sys.path.append('../src') import BaseUtils class TestBaseUtils(unittest.TestCase): ''' Main test class for the BaseUtils ''' def test_word_segmenter_with_empty(self): ''' For an empty string, the segmenter returns just an empty list ''' segments = BaseUtils.get_words('') self.assertEqual(segments, []) def test_word_segmenter(self): ''' The word segmenter returns the expected array of strings ''' segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_ignoring_whitespace(self): ''' Whitespace in the input string is ignored in the input string ''' segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_ignoring_special_chars(self): ''' If there are special characters in the input, they are ignored as well ''' segments = BaseUtils.get_words('this is $$%%a random --00sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) if __name__ == '__main__': unittest.main()
Add test for empty string; cleanup import unittest import sys sys.path.append('../src') import BaseUtils class TestBaseUtils(unittest.TestCase): def test_word_segmenter(self): segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_word_segmenter_ignores_whitespace(self): segments = BaseUtils.get_words('this is a random sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) def test_word_segmenter_ignores_special_chars(self): segments = BaseUtils.get_words('this is $$%%a random --00sentence') self.assertEqual(segments, ['this', 'is', 'a', 'random', 'sentence']) if __name__ == '__main__': unittest.main()
5d448435477ce94273051b8351275d8c18838b8b
icekit/utils/fluent_contents.py
icekit/utils/fluent_contents.py
from django.contrib.contenttypes.models import ContentType # USEFUL FUNCTIONS FOR FLUENT CONTENTS ############################################################# # Fluent Contents Helper Functions ################################################################# def create_content_instance(content_plugin_class, page, placeholder_name='main', **kwargs): """ Creates a content instance from a content plugin class. :param content_plugin_class: The class of the content plugin. :param page: The fluent_page instance to create the content instance one. :param placeholder_name: The placeholder name defined in the template. [DEFAULT: main] :param kwargs: Additional keyword arguments to be used in the content instance creation. :return: The content instance created. """ # Get the placeholders that are currently available for the slot. placeholders = page.get_placeholder_by_slot(placeholder_name) # If a placeholder exists for the placeholder_name use the first one provided otherwise create # a new placeholder instance. if placeholders.exists(): placeholder = placeholders[0] else: placeholder = page.create_placeholder(placeholder_name) # Obtain the content type for the page instance class. ct = ContentType.objects.get_for_model(type(page)) # Create the actual plugin instance. content_instance = content_plugin_class.objects.create( parent_type=ct, parent_id=page.id, placeholder=placeholder, **kwargs ) return content_instance # END Fluent Contents Helper Functions #############################################################
from django.contrib.contenttypes.models import ContentType # USEFUL FUNCTIONS FOR FLUENT CONTENTS ############################################################# # Fluent Contents Helper Functions ################################################################# def create_content_instance(content_plugin_class, test_page, placeholder_name='main', **kwargs): """ Creates a content instance from a content plugin class. :param content_plugin_class: The class of the content plugin. :param test_page: The fluent_page instance to create the content instance one. :param placeholder_name: The placeholder name defined in the template. [DEFAULT: main] :param kwargs: Additional keyword arguments to be used in the content instance creation. :return: The content instance created. """ # Get the placeholders that are currently available for the slot. placeholders = test_page.get_placeholder_by_slot(placeholder_name) # If a placeholder exists for the placeholder_name use the first one provided otherwise create # a new placeholder instance. if placeholders.exists(): placeholder = placeholders[0] else: placeholder = test_page.create_placeholder(placeholder_name) # Obtain the content type for the page instance class. ct = ContentType.objects.get_for_model(type(test_page)) # Create the actual plugin instance. content_instance = content_plugin_class.objects.create( parent_type=ct, parent_id=test_page.id, placeholder=placeholder, **kwargs ) return content_instance # END Fluent Contents Helper Functions #############################################################
Change argument name to stop probable name clash.
Change argument name to stop probable name clash.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
from django.contrib.contenttypes.models import ContentType # USEFUL FUNCTIONS FOR FLUENT CONTENTS ############################################################# # Fluent Contents Helper Functions ################################################################# def create_content_instance(content_plugin_class, test_page, placeholder_name='main', **kwargs): """ Creates a content instance from a content plugin class. :param content_plugin_class: The class of the content plugin. :param test_page: The fluent_page instance to create the content instance one. :param placeholder_name: The placeholder name defined in the template. [DEFAULT: main] :param kwargs: Additional keyword arguments to be used in the content instance creation. :return: The content instance created. """ # Get the placeholders that are currently available for the slot. placeholders = test_page.get_placeholder_by_slot(placeholder_name) # If a placeholder exists for the placeholder_name use the first one provided otherwise create # a new placeholder instance. if placeholders.exists(): placeholder = placeholders[0] else: placeholder = test_page.create_placeholder(placeholder_name) # Obtain the content type for the page instance class. ct = ContentType.objects.get_for_model(type(test_page)) # Create the actual plugin instance. content_instance = content_plugin_class.objects.create( parent_type=ct, parent_id=test_page.id, placeholder=placeholder, **kwargs ) return content_instance # END Fluent Contents Helper Functions #############################################################
Change argument name to stop probable name clash. from django.contrib.contenttypes.models import ContentType # USEFUL FUNCTIONS FOR FLUENT CONTENTS ############################################################# # Fluent Contents Helper Functions ################################################################# def create_content_instance(content_plugin_class, page, placeholder_name='main', **kwargs): """ Creates a content instance from a content plugin class. :param content_plugin_class: The class of the content plugin. :param page: The fluent_page instance to create the content instance one. :param placeholder_name: The placeholder name defined in the template. [DEFAULT: main] :param kwargs: Additional keyword arguments to be used in the content instance creation. :return: The content instance created. """ # Get the placeholders that are currently available for the slot. placeholders = page.get_placeholder_by_slot(placeholder_name) # If a placeholder exists for the placeholder_name use the first one provided otherwise create # a new placeholder instance. if placeholders.exists(): placeholder = placeholders[0] else: placeholder = page.create_placeholder(placeholder_name) # Obtain the content type for the page instance class. ct = ContentType.objects.get_for_model(type(page)) # Create the actual plugin instance. content_instance = content_plugin_class.objects.create( parent_type=ct, parent_id=page.id, placeholder=placeholder, **kwargs ) return content_instance # END Fluent Contents Helper Functions #############################################################
922c6350fda965068927611348bdd9127ee405d9
scaffolder/commands/vcs.py
scaffolder/commands/vcs.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder.core.commands import BaseCommand from scaffolder.vcs import VCS class VcsCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-u", "--url", dest="url", help='Clone URL, it can be SSH or HTTPS. Git only for now.', metavar="REPO_URL" ), make_option( "-t", "--target", dest="target", default='.', help="Target directory where the repo will be cloned.", metavar="TARGET" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'Command to clone github repos' aliases = ('git','hg',) #TODO: Move to BaseCommand, create methods and have each subcommand override parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} [OPTIONS] FILE...'.format(name), description='', epilog='' ) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) # self.update_parser() def update_parser(self): self.parser.set_usage('%prog [OPTIONS] FILE...') # self.parser.prog = '%s %s' % (self.parser.get_prog_name(), self.name) self.parser.version = self.get_version() self.parser.option_list = sorted(self.get_option_list()) def run(self, *args, **options): url = options.get('url') tgt = options.get('target') boot = VCS(url) boot.clone(target_dir=tgt)
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder.core.commands import BaseCommand from scaffolder.vcs import VCS class VcsCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-u", "--url", dest="url", help='Clone URL, it can be SSH or HTTPS. Git only for now.', metavar="REPO_URL" ), make_option( "-t", "--target", dest="target", default='.', help="Target directory where the repo will be cloned.", metavar="TARGET" ), ) help = 'Command to clone github repos' def run(self, *args, **options): url = options.get('url') tgt = options.get('target') boot = VCS(url) boot.clone(target_dir=tgt)
Remove __init__ method, not needed.
VcsCommand: Remove __init__ method, not needed.
Python
mit
goliatone/minions
#!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder.core.commands import BaseCommand from scaffolder.vcs import VCS class VcsCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-u", "--url", dest="url", help='Clone URL, it can be SSH or HTTPS. Git only for now.', metavar="REPO_URL" ), make_option( "-t", "--target", dest="target", default='.', help="Target directory where the repo will be cloned.", metavar="TARGET" ), ) help = 'Command to clone github repos' def run(self, *args, **options): url = options.get('url') tgt = options.get('target') boot = VCS(url) boot.clone(target_dir=tgt)
VcsCommand: Remove __init__ method, not needed. #!/usr/bin/env python # -*- coding: utf-8 -*- from optparse import make_option from optparse import OptionParser from scaffolder.core.commands import BaseCommand from scaffolder.vcs import VCS class VcsCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option( "-u", "--url", dest="url", help='Clone URL, it can be SSH or HTTPS. Git only for now.', metavar="REPO_URL" ), make_option( "-t", "--target", dest="target", default='.', help="Target directory where the repo will be cloned.", metavar="TARGET" ), ) def __init__(self, name, help='', aliases=(), stdout=None, stderr=None): help = 'Command to clone github repos' aliases = ('git','hg',) #TODO: Move to BaseCommand, create methods and have each subcommand override parser = OptionParser( version=self.get_version(), option_list=self.get_option_list(), usage='\n %prog {0} [OPTIONS] FILE...'.format(name), description='', epilog='' ) BaseCommand.__init__(self, name, parser=parser, help=help, aliases=aliases) # self.update_parser() def update_parser(self): self.parser.set_usage('%prog [OPTIONS] FILE...') # self.parser.prog = '%s %s' % (self.parser.get_prog_name(), self.name) self.parser.version = self.get_version() self.parser.option_list = sorted(self.get_option_list()) def run(self, *args, **options): url = options.get('url') tgt = options.get('target') boot = VCS(url) boot.clone(target_dir=tgt)
79358f9eb3b12b45d3e1ebe8840aed9e9d8a7274
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-oscar-fancypages', version=":versiontools:fancypages:", url='https://github.com/tangentlabs/django-oscar-fancypages', author="Sebastian Vetter", author_email="[email protected]", description="Adding fancy CMS-style pages to Oscar", long_description=open('README.rst').read(), keywords="django, oscar, e-commerce, cms, pages, flatpages", license='BSD', platforms=['linux'], packages=find_packages(exclude=["sandbox*", "tests*"]), include_package_data=True, install_requires=[ 'versiontools>=1.9.1', 'Django>=1.4.1', 'django-oscar>=0.3', 'django-model-utils>=1.1.0', 'django-compressor>=1.2', ], dependency_links=[ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4' ], # See http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', #'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python' ] )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-oscar-fancypages', version=":versiontools:fancypages:", url='https://github.com/tangentlabs/django-oscar-fancypages', author="Sebastian Vetter", author_email="[email protected]", description="Adding fancy CMS-style pages to Oscar", long_description=open('README.rst').read(), keywords="django, oscar, e-commerce, cms, pages, flatpages", license='BSD', platforms=['linux'], packages=find_packages(exclude=["sandbox*", "tests*"]), include_package_data=True, install_requires=[ 'versiontools>=1.9.1', 'Django>=1.4.2', 'django-oscar>=0.3', 'django-model-utils>=1.1.0', 'django-compressor>=1.2', ], dependency_links=[ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4' ], # See http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', #'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python' ] )
Update django to use latest security release
Update django to use latest security release
Python
bsd-3-clause
tangentlabs/django-oscar-fancypages,tangentlabs/django-oscar-fancypages
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-oscar-fancypages', version=":versiontools:fancypages:", url='https://github.com/tangentlabs/django-oscar-fancypages', author="Sebastian Vetter", author_email="[email protected]", description="Adding fancy CMS-style pages to Oscar", long_description=open('README.rst').read(), keywords="django, oscar, e-commerce, cms, pages, flatpages", license='BSD', platforms=['linux'], packages=find_packages(exclude=["sandbox*", "tests*"]), include_package_data=True, install_requires=[ 'versiontools>=1.9.1', 'Django>=1.4.2', 'django-oscar>=0.3', 'django-model-utils>=1.1.0', 'django-compressor>=1.2', ], dependency_links=[ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4' ], # See http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', #'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python' ] )
Update django to use latest security release #!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-oscar-fancypages', version=":versiontools:fancypages:", url='https://github.com/tangentlabs/django-oscar-fancypages', author="Sebastian Vetter", author_email="[email protected]", description="Adding fancy CMS-style pages to Oscar", long_description=open('README.rst').read(), keywords="django, oscar, e-commerce, cms, pages, flatpages", license='BSD', platforms=['linux'], packages=find_packages(exclude=["sandbox*", "tests*"]), include_package_data=True, install_requires=[ 'versiontools>=1.9.1', 'Django>=1.4.1', 'django-oscar>=0.3', 'django-model-utils>=1.1.0', 'django-compressor>=1.2', ], dependency_links=[ 'http://github.com/tangentlabs/django-oscar/tarball/master#egg=django-oscar-0.4' ], # See http://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', #'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python' ] )
d425fa64ece6826d299ca2daadb9a08afa6e87b5
test/test_searchentities.py
test/test_searchentities.py
import unittest from . import models from sir.schema.searchentities import SearchEntity as E, SearchField as F class QueryResultToDictTest(unittest.TestCase): def setUp(self): self.entity = E(models.B, [ F("id", "id"), F("c_bar", "c.bar"), F("c_bar_trans", "c.bar", transformfunc=lambda v: v.union(set(["yay"]))) ], 1.1 ) self.expected = { "id": 1, "c_bar": "foo", "c_bar_trans": set(["foo", "yay"]), } c = models.C(id=2, bar="foo") self.val = models.B(id=1, c=c) def test_fields(self): res = self.entity.query_result_to_dict(self.val) self.assertDictEqual(self.expected, res)
Add a test for query_result_to_dict
Add a test for query_result_to_dict
Python
mit
jeffweeksio/sir
import unittest from . import models from sir.schema.searchentities import SearchEntity as E, SearchField as F class QueryResultToDictTest(unittest.TestCase): def setUp(self): self.entity = E(models.B, [ F("id", "id"), F("c_bar", "c.bar"), F("c_bar_trans", "c.bar", transformfunc=lambda v: v.union(set(["yay"]))) ], 1.1 ) self.expected = { "id": 1, "c_bar": "foo", "c_bar_trans": set(["foo", "yay"]), } c = models.C(id=2, bar="foo") self.val = models.B(id=1, c=c) def test_fields(self): res = self.entity.query_result_to_dict(self.val) self.assertDictEqual(self.expected, res)
Add a test for query_result_to_dict
0b1a4a57a9e8f9e7038fb4330cbac68596f5960c
tests/plugins/test_property_mod_tracker.py
tests/plugins/test_property_mod_tracker.py
import sqlalchemy as sa from sqlalchemy_continuum.plugins import PropertyModTrackerPlugin from tests import TestCase class TestPropertyModificationsTracking(TestCase): plugins = [PropertyModTrackerPlugin] def create_models(self): class User(self.Model): __tablename__ = 'text_item' __versioned__ = { 'base_classes': (self.Model, ), 'track_property_modifications': True } id = sa.Column(sa.Integer, autoincrement=True, primary_key=True) name = sa.Column(sa.Unicode(255)) age = sa.Column(sa.Integer) self.User = User def test_each_column_generates_additional_mod_column(self): UserHistory = self.User.__versioned__['class'] assert 'name_mod' in UserHistory.__table__.c column = UserHistory.__table__.c['name_mod'] assert not column.nullable assert isinstance(column.type, sa.Boolean) def test_primary_keys_not_included(self): UserHistory = self.User.__versioned__['class'] assert 'id_mod' not in UserHistory.__table__.c def test_mod_properties_get_updated(self): user = self.User(name=u'John') self.session.add(user) self.session.commit() assert user.versions[-1].name_mod
Add tests for PropertyModTracker plugin
Add tests for PropertyModTracker plugin
Python
bsd-3-clause
avilaton/sqlalchemy-continuum,piotr-dobrogost/sqlalchemy-continuum,rmoorman/sqlalchemy-continuum,kvesteri/sqlalchemy-continuum
import sqlalchemy as sa from sqlalchemy_continuum.plugins import PropertyModTrackerPlugin from tests import TestCase class TestPropertyModificationsTracking(TestCase): plugins = [PropertyModTrackerPlugin] def create_models(self): class User(self.Model): __tablename__ = 'text_item' __versioned__ = { 'base_classes': (self.Model, ), 'track_property_modifications': True } id = sa.Column(sa.Integer, autoincrement=True, primary_key=True) name = sa.Column(sa.Unicode(255)) age = sa.Column(sa.Integer) self.User = User def test_each_column_generates_additional_mod_column(self): UserHistory = self.User.__versioned__['class'] assert 'name_mod' in UserHistory.__table__.c column = UserHistory.__table__.c['name_mod'] assert not column.nullable assert isinstance(column.type, sa.Boolean) def test_primary_keys_not_included(self): UserHistory = self.User.__versioned__['class'] assert 'id_mod' not in UserHistory.__table__.c def test_mod_properties_get_updated(self): user = self.User(name=u'John') self.session.add(user) self.session.commit() assert user.versions[-1].name_mod
Add tests for PropertyModTracker plugin
1c7c0f081711654095023a984978cd812ac94fb6
numpy/tests/typing/reveal/modules.py
numpy/tests/typing/reveal/modules.py
import numpy as np reveal_type(np) # E: ModuleType reveal_type(np.char) # E: ModuleType reveal_type(np.compat) # E: ModuleType reveal_type(np.core) # E: ModuleType reveal_type(np.ctypeslib) # E: ModuleType reveal_type(np.emath) # E: ModuleType reveal_type(np.fft) # E: ModuleType reveal_type(np.lib) # E: ModuleType reveal_type(np.linalg) # E: ModuleType reveal_type(np.ma) # E: ModuleType reveal_type(np.matrixlib) # E: ModuleType reveal_type(np.polynomial) # E: ModuleType reveal_type(np.random) # E: ModuleType reveal_type(np.rec) # E: ModuleType reveal_type(np.testing) # E: ModuleType reveal_type(np.version) # E: ModuleType # TODO: Remove when annotations have been added to `np.testing.assert_equal` reveal_type(np.testing.assert_equal) # E: Any
import numpy as np reveal_type(np) # E: ModuleType reveal_type(np.char) # E: ModuleType reveal_type(np.ctypeslib) # E: ModuleType reveal_type(np.emath) # E: ModuleType reveal_type(np.fft) # E: ModuleType reveal_type(np.lib) # E: ModuleType reveal_type(np.linalg) # E: ModuleType reveal_type(np.ma) # E: ModuleType reveal_type(np.matrixlib) # E: ModuleType reveal_type(np.polynomial) # E: ModuleType reveal_type(np.random) # E: ModuleType reveal_type(np.rec) # E: ModuleType reveal_type(np.testing) # E: ModuleType reveal_type(np.version) # E: ModuleType # TODO: Remove when annotations have been added to `np.testing.assert_equal` reveal_type(np.testing.assert_equal) # E: Any
Remove `compat` and `core` from the tests
TST: Remove `compat` and `core` from the tests
Python
bsd-3-clause
anntzer/numpy,numpy/numpy,rgommers/numpy,mhvk/numpy,anntzer/numpy,seberg/numpy,numpy/numpy,madphysicist/numpy,madphysicist/numpy,madphysicist/numpy,mattip/numpy,mattip/numpy,endolith/numpy,numpy/numpy,jakirkham/numpy,grlee77/numpy,simongibbons/numpy,grlee77/numpy,charris/numpy,anntzer/numpy,jakirkham/numpy,seberg/numpy,simongibbons/numpy,jakirkham/numpy,endolith/numpy,pbrod/numpy,pdebuyl/numpy,pdebuyl/numpy,pdebuyl/numpy,mattip/numpy,mhvk/numpy,anntzer/numpy,simongibbons/numpy,jakirkham/numpy,seberg/numpy,charris/numpy,grlee77/numpy,grlee77/numpy,grlee77/numpy,rgommers/numpy,pbrod/numpy,simongibbons/numpy,seberg/numpy,charris/numpy,rgommers/numpy,mhvk/numpy,pbrod/numpy,pbrod/numpy,pdebuyl/numpy,endolith/numpy,simongibbons/numpy,pbrod/numpy,mattip/numpy,jakirkham/numpy,charris/numpy,madphysicist/numpy,numpy/numpy,madphysicist/numpy,rgommers/numpy,mhvk/numpy,endolith/numpy,mhvk/numpy
import numpy as np reveal_type(np) # E: ModuleType reveal_type(np.char) # E: ModuleType reveal_type(np.ctypeslib) # E: ModuleType reveal_type(np.emath) # E: ModuleType reveal_type(np.fft) # E: ModuleType reveal_type(np.lib) # E: ModuleType reveal_type(np.linalg) # E: ModuleType reveal_type(np.ma) # E: ModuleType reveal_type(np.matrixlib) # E: ModuleType reveal_type(np.polynomial) # E: ModuleType reveal_type(np.random) # E: ModuleType reveal_type(np.rec) # E: ModuleType reveal_type(np.testing) # E: ModuleType reveal_type(np.version) # E: ModuleType # TODO: Remove when annotations have been added to `np.testing.assert_equal` reveal_type(np.testing.assert_equal) # E: Any
TST: Remove `compat` and `core` from the tests import numpy as np reveal_type(np) # E: ModuleType reveal_type(np.char) # E: ModuleType reveal_type(np.compat) # E: ModuleType reveal_type(np.core) # E: ModuleType reveal_type(np.ctypeslib) # E: ModuleType reveal_type(np.emath) # E: ModuleType reveal_type(np.fft) # E: ModuleType reveal_type(np.lib) # E: ModuleType reveal_type(np.linalg) # E: ModuleType reveal_type(np.ma) # E: ModuleType reveal_type(np.matrixlib) # E: ModuleType reveal_type(np.polynomial) # E: ModuleType reveal_type(np.random) # E: ModuleType reveal_type(np.rec) # E: ModuleType reveal_type(np.testing) # E: ModuleType reveal_type(np.version) # E: ModuleType # TODO: Remove when annotations have been added to `np.testing.assert_equal` reveal_type(np.testing.assert_equal) # E: Any
d7eb2dc9eb5f391a6a6742bea3692c8ab1d8aa69
doc/examples/plot_edge_filter.py
doc/examples/plot_edge_filter.py
import matplotlib.pyplot as plt from skimage.data import camera from skimage.filter import roberts, sobel image = camera() edge_roberts = roberts(image) edge_sobel = sobel(image) fig, (ax0, ax1) = plt.subplots(ncols=2) ax0.imshow(edge_roberts, cmap=plt.cm.gray) ax0.set_title('Roberts Edge Detection') ax0.axis('off') ax1.imshow(edge_sobel, cmap=plt.cm.gray) ax1.set_title('Sobel Edge Detection') ax1.axis('off') plt.show()
""" ============== Edge operators ============== Edge operators are used in image processing within edge detection algorithms. They are discrete differentiation operators, computing an approximation of the gradient of the image intensity function. """ import matplotlib.pyplot as plt from skimage.data import camera from skimage.filter import roberts, sobel image = camera() edge_roberts = roberts(image) edge_sobel = sobel(image) fig, (ax0, ax1) = plt.subplots(ncols=2) ax0.imshow(edge_roberts, cmap=plt.cm.gray) ax0.set_title('Roberts Edge Detection') ax0.axis('off') ax1.imshow(edge_sobel, cmap=plt.cm.gray) ax1.set_title('Sobel Edge Detection') ax1.axis('off') plt.show()
Add short description to edge filter example
Add short description to edge filter example
Python
bsd-3-clause
ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,bsipocz/scikit-image,oew1v07/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,paalge/scikit-image,almarklein/scikit-image,Midafi/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,juliusbierk/scikit-image,rjeli/scikit-image,chintak/scikit-image,SamHames/scikit-image,chriscrosscutler/scikit-image,SamHames/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,newville/scikit-image,robintw/scikit-image,warmspringwinds/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,WarrenWeckesser/scikits-image,jwiggins/scikit-image,bennlich/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,keflavich/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,rjeli/scikit-image,paalge/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,bennlich/scikit-image,dpshelio/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,bsipocz/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,Britefury/scikit-image,dpshelio/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,vighneshbirodkar/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,almarklein/scikit-image,Midafi/scikit-image,blink1073/scikit-image,juliusbierk/scikit-image,Hiyorimi/scikit-image,oew1v07/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,youprofit/scikit-image,warmspringwinds/scikit-image,emon10005/scikit-image,paalge/scikit-image,newville/scikit-image
""" ============== Edge operators ============== Edge operators are used in image processing within edge detection algorithms. They are discrete differentiation operators, computing an approximation of the gradient of the image intensity function. """ import matplotlib.pyplot as plt from skimage.data import camera from skimage.filter import roberts, sobel image = camera() edge_roberts = roberts(image) edge_sobel = sobel(image) fig, (ax0, ax1) = plt.subplots(ncols=2) ax0.imshow(edge_roberts, cmap=plt.cm.gray) ax0.set_title('Roberts Edge Detection') ax0.axis('off') ax1.imshow(edge_sobel, cmap=plt.cm.gray) ax1.set_title('Sobel Edge Detection') ax1.axis('off') plt.show()
Add short description to edge filter example import matplotlib.pyplot as plt from skimage.data import camera from skimage.filter import roberts, sobel image = camera() edge_roberts = roberts(image) edge_sobel = sobel(image) fig, (ax0, ax1) = plt.subplots(ncols=2) ax0.imshow(edge_roberts, cmap=plt.cm.gray) ax0.set_title('Roberts Edge Detection') ax0.axis('off') ax1.imshow(edge_sobel, cmap=plt.cm.gray) ax1.set_title('Sobel Edge Detection') ax1.axis('off') plt.show()
a116c3eae892a73b11372225a9bdf0194db75598
glanerbeard/web.py
glanerbeard/web.py
import logging from flask import ( Flask, render_template, abort ) from glanerbeard.server import Server app = Flask(__name__) app.config.from_object('glanerbeard.default_settings') app.config.from_envvar('GLANERBEARD_SETTINGS') numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logging.basicConfig(level=numeric_level) log = logging.getLogger(__name__) servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS']) @app.route('/') def index(): shows = [server.getShows() for server in servers] return str(shows) if __name__ == '__main__': app.debug = True app.run()
import logging from flask import ( Flask, render_template, abort ) from glanerbeard.server import Server app = Flask(__name__) app.config.from_object('glanerbeard.default_settings') app.config.from_envvar('GLANERBEARD_SETTINGS') numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logging.basicConfig(level=numeric_level) log = logging.getLogger(__name__) servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS']) @app.route('/') def index(): shows = [server.getShows() for server in servers] return render_template('json.html', json=shows)
Use a template to render json.
Use a template to render json.
Python
apache-2.0
daenney/glanerbeard
import logging from flask import ( Flask, render_template, abort ) from glanerbeard.server import Server app = Flask(__name__) app.config.from_object('glanerbeard.default_settings') app.config.from_envvar('GLANERBEARD_SETTINGS') numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logging.basicConfig(level=numeric_level) log = logging.getLogger(__name__) servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS']) @app.route('/') def index(): shows = [server.getShows() for server in servers] return render_template('json.html', json=shows)
Use a template to render json. import logging from flask import ( Flask, render_template, abort ) from glanerbeard.server import Server app = Flask(__name__) app.config.from_object('glanerbeard.default_settings') app.config.from_envvar('GLANERBEARD_SETTINGS') numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % loglevel) logging.basicConfig(level=numeric_level) log = logging.getLogger(__name__) servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS']) @app.route('/') def index(): shows = [server.getShows() for server in servers] return str(shows) if __name__ == '__main__': app.debug = True app.run()
63ce9ac2a46f74704810d62e22c0b75ca071442a
minesweeper/minesweeper.py
minesweeper/minesweeper.py
import re class InvalidBoard(ValueError): pass def board(b): if not is_valid_board(b): raise InvalidBoard("Board is malformed and thus invalid") b = [[ch for ch in row] for row in b] for i in range(1, len(b)-1): for j in range(1, len(b[0])-1): if b[i][j] == " ": m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2]) count = m.count("*") if count: b[i][j] = str(count) return list(map("".join, b)) def is_valid_board(b): width = "{" + str(len(b[0]) - 2) + "}" height = "{" + str(len(b) - 2) + "}" r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width, h=height)) return bool(r.match("".join(b)))
import re class InvalidBoard(ValueError): pass def board(b): if not is_valid_board(b): raise InvalidBoard("Board is malformed and thus invalid") b = [[ch for ch in row] for row in b] for i in range(1, len(b)-1): for j in range(1, len(b[0])-1): if b[i][j] == " ": m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2]) count = m.count("*") if count: b[i][j] = str(count) return list(map("".join, b)) def is_valid_board(b): width = "{" + str(len(b[0]) - 2) + "}" height = "{" + str(len(b) - 2) + "}" r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width, h=height)) # bool is technically redundant here, but I'd rather that this function # return an explicit True/False return bool(r.match("".join(b)))
Add note regarding use of bool in validation
Add note regarding use of bool in validation
Python
agpl-3.0
CubicComet/exercism-python-solutions
import re class InvalidBoard(ValueError): pass def board(b): if not is_valid_board(b): raise InvalidBoard("Board is malformed and thus invalid") b = [[ch for ch in row] for row in b] for i in range(1, len(b)-1): for j in range(1, len(b[0])-1): if b[i][j] == " ": m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2]) count = m.count("*") if count: b[i][j] = str(count) return list(map("".join, b)) def is_valid_board(b): width = "{" + str(len(b[0]) - 2) + "}" height = "{" + str(len(b) - 2) + "}" r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width, h=height)) # bool is technically redundant here, but I'd rather that this function # return an explicit True/False return bool(r.match("".join(b)))
Add note regarding use of bool in validation import re class InvalidBoard(ValueError): pass def board(b): if not is_valid_board(b): raise InvalidBoard("Board is malformed and thus invalid") b = [[ch for ch in row] for row in b] for i in range(1, len(b)-1): for j in range(1, len(b[0])-1): if b[i][j] == " ": m = "".join(b[i-1][j-1:j+2] + b[i][j-1:j+2] + b[i+1][j-1:j+2]) count = m.count("*") if count: b[i][j] = str(count) return list(map("".join, b)) def is_valid_board(b): width = "{" + str(len(b[0]) - 2) + "}" height = "{" + str(len(b) - 2) + "}" r = re.compile("^(\+-{w}\+)(\|[ *]{w}\|){h}(\+-{w}\+)$".format(w=width, h=height)) return bool(r.match("".join(b)))
282a8d120d486f15cc1e5cd9e1432c1100e722bf
examples/nogallery/create_a_dummy_hdf5_file.py
examples/nogallery/create_a_dummy_hdf5_file.py
import numpy as np import km3pipe as kp class APump(kp.Pump): def configure(self): self.index = 0 def process(self, blob): data = {'a': self.index * np.arange(5), 'b': np.arange(5) ** self.index} data2 = {'c': self.index * np.arange(10, dtype='f4') + 0.1, 'd': np.arange(10, dtype='f4') ** self.index + 0.2} print(data2) blob['Tablelike'] = kp.Table(data, h5loc='/tablelike', name='2D Table') print(blob['Tablelike']) blob['Columnwise'] = kp.Table(data2, h5loc='/columnwise', split_h5=True, name='Column-wise Split') self.index += 1 return blob pipe = kp.Pipeline() pipe.attach(APump) pipe.attach(kp.io.HDF5Sink, filename='km3hdf5_example.h5') pipe.drain(13)
Add a dummy script which creates an hdf5 file
Add a dummy script which creates an hdf5 file
Python
mit
tamasgal/km3pipe,tamasgal/km3pipe
import numpy as np import km3pipe as kp class APump(kp.Pump): def configure(self): self.index = 0 def process(self, blob): data = {'a': self.index * np.arange(5), 'b': np.arange(5) ** self.index} data2 = {'c': self.index * np.arange(10, dtype='f4') + 0.1, 'd': np.arange(10, dtype='f4') ** self.index + 0.2} print(data2) blob['Tablelike'] = kp.Table(data, h5loc='/tablelike', name='2D Table') print(blob['Tablelike']) blob['Columnwise'] = kp.Table(data2, h5loc='/columnwise', split_h5=True, name='Column-wise Split') self.index += 1 return blob pipe = kp.Pipeline() pipe.attach(APump) pipe.attach(kp.io.HDF5Sink, filename='km3hdf5_example.h5') pipe.drain(13)
Add a dummy script which creates an hdf5 file
16dda42316176f0ad9c747731764855792fe88d6
lymph/utils/observables.py
lymph/utils/observables.py
# Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, action, callback): self.observers.setdefault(action, []).append(callback)
class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): kwargs.setdefault('action', action) for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, actions, callback): if not isinstance(actions, (tuple, list)): actions = (actions,) for action in actions: self.observers.setdefault(action, []).append(callback)
Allow observing more than one action at once
Allow observing more than one action at once
Python
apache-2.0
lyudmildrx/lymph,mouadino/lymph,Drahflow/lymph,itakouna/lymph,vpikulik/lymph,deliveryhero/lymph,kstrempel/lymph,alazaro/lymph,lyudmildrx/lymph,itakouna/lymph,mamachanko/lymph,torte/lymph,mamachanko/lymph,lyudmildrx/lymph,alazaro/lymph,mouadino/lymph,mamachanko/lymph,mouadino/lymph,alazaro/lymph,itakouna/lymph,dushyant88/lymph
class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): kwargs.setdefault('action', action) for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, actions, callback): if not isinstance(actions, (tuple, list)): actions = (actions,) for action in actions: self.observers.setdefault(action, []).append(callback)
Allow observing more than one action at once # Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py class Observable(object): def __init__(self): self.observers = {} def notify_observers(self, action, *args, **kwargs): for callback in self.observers.get(action, ()): callback(*args, **kwargs) def observe(self, action, callback): self.observers.setdefault(action, []).append(callback)
122f596f3568d1ee1031733344e7eebb057cc032
setup.py
setup.py
import os import sys from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.md')).read() sys.path.insert(0, here) from titlecase import __version__ setup(name='titlecase', version=__version__, description="Python Port of John Gruber's titlecase.pl", long_description=README, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Topic :: Text Processing :: Filters", ], keywords='string formatting', author="Stuart Colville", author_email="[email protected]", url="http://muffinresearch.co.uk/", license="MIT", packages=find_packages(), include_package_data=True, zip_safe=False, tests_require=['nose'], test_suite="titlecase.tests", entry_points = """\ """ )
import os import sys from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.md')).read() sys.path.insert(0, here) from titlecase import __version__ setup(name='titlecase', version=__version__, description="Python Port of John Gruber's titlecase.pl", long_description=README, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Topic :: Text Processing :: Filters", ], keywords='string formatting', author="Stuart Colville", author_email="[email protected]", url="http://muffinresearch.co.uk/", license="MIT", packages=find_packages(), include_package_data=True, zip_safe=False, tests_require=['nose'], setup_requires=['nose>=1.0'], test_suite="titlecase.tests", entry_points = """\ """ )
Add python3 support for nosetests
Add python3 support for nosetests
Python
mit
ppannuto/python-titlecase
import os import sys from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.md')).read() sys.path.insert(0, here) from titlecase import __version__ setup(name='titlecase', version=__version__, description="Python Port of John Gruber's titlecase.pl", long_description=README, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Topic :: Text Processing :: Filters", ], keywords='string formatting', author="Stuart Colville", author_email="[email protected]", url="http://muffinresearch.co.uk/", license="MIT", packages=find_packages(), include_package_data=True, zip_safe=False, tests_require=['nose'], setup_requires=['nose>=1.0'], test_suite="titlecase.tests", entry_points = """\ """ )
Add python3 support for nosetests import os import sys from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.md')).read() sys.path.insert(0, here) from titlecase import __version__ setup(name='titlecase', version=__version__, description="Python Port of John Gruber's titlecase.pl", long_description=README, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Programming Language :: Python", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Topic :: Text Processing :: Filters", ], keywords='string formatting', author="Stuart Colville", author_email="[email protected]", url="http://muffinresearch.co.uk/", license="MIT", packages=find_packages(), include_package_data=True, zip_safe=False, tests_require=['nose'], test_suite="titlecase.tests", entry_points = """\ """ )
921bdcc5d6f6ac4be7dfd0015e5b5fd6d06e6486
runcommands/__main__.py
runcommands/__main__.py
import sys from .config import RawConfig, RunConfig from .exc import RunCommandsError from .run import run, partition_argv, read_run_args from .util import printer def main(argv=None): try: all_argv, run_argv, command_argv = partition_argv(argv) cli_args = run.parse_args(RawConfig(run=RunConfig()), run_argv) run_args = read_run_args(run) run_args.update(cli_args) run.implementation( None, all_argv=all_argv, run_argv=run_argv, command_argv=command_argv, cli_args=cli_args, **run_args) except RunCommandsError as exc: printer.error(exc, file=sys.stderr) return 1 return 0 if __name__ == '__main__': sys.exit(main())
import sys from .config import RawConfig, RunConfig from .exc import RunCommandsError from .run import run, partition_argv, read_run_args from .util import printer def main(argv=None): debug = None try: all_argv, run_argv, command_argv = partition_argv(argv) cli_args = run.parse_args(RawConfig(run=RunConfig()), run_argv) run_args = read_run_args(run) run_args.update(cli_args) debug = run_args.get('debug', run.parameters['debug'].default) run.implementation( None, all_argv=all_argv, run_argv=run_argv, command_argv=command_argv, cli_args=cli_args, **run_args) except RunCommandsError as exc: if debug or debug is None: # User specified --debug OR processing didn't get far enough # to determine whether user specified --debug. raise printer.error(exc, file=sys.stderr) return 1 return 0 if __name__ == '__main__': sys.exit(main())
Raise exception when --debug is specified to main script
Raise exception when --debug is specified to main script I.e., instead of printing the exception and then exiting.
Python
mit
wylee/runcommands,wylee/runcommands
import sys from .config import RawConfig, RunConfig from .exc import RunCommandsError from .run import run, partition_argv, read_run_args from .util import printer def main(argv=None): debug = None try: all_argv, run_argv, command_argv = partition_argv(argv) cli_args = run.parse_args(RawConfig(run=RunConfig()), run_argv) run_args = read_run_args(run) run_args.update(cli_args) debug = run_args.get('debug', run.parameters['debug'].default) run.implementation( None, all_argv=all_argv, run_argv=run_argv, command_argv=command_argv, cli_args=cli_args, **run_args) except RunCommandsError as exc: if debug or debug is None: # User specified --debug OR processing didn't get far enough # to determine whether user specified --debug. raise printer.error(exc, file=sys.stderr) return 1 return 0 if __name__ == '__main__': sys.exit(main())
Raise exception when --debug is specified to main script I.e., instead of printing the exception and then exiting. import sys from .config import RawConfig, RunConfig from .exc import RunCommandsError from .run import run, partition_argv, read_run_args from .util import printer def main(argv=None): try: all_argv, run_argv, command_argv = partition_argv(argv) cli_args = run.parse_args(RawConfig(run=RunConfig()), run_argv) run_args = read_run_args(run) run_args.update(cli_args) run.implementation( None, all_argv=all_argv, run_argv=run_argv, command_argv=command_argv, cli_args=cli_args, **run_args) except RunCommandsError as exc: printer.error(exc, file=sys.stderr) return 1 return 0 if __name__ == '__main__': sys.exit(main())
86fdcd6575a944a378a9c3f5b292fb33a6c42853
digestive/hash.py
digestive/hash.py
import hashlib from digestive.io import Sink class HashDigest(Sink): def __init__(self, name, digest): super().__init__(name) self._digest = digest def update(self, data): self._digest.update(data) def digest(self): return self._digest.hexdigest() class MD5(HashDigest): def __init__(self): super().__init__('md5', hashlib.md5()) class SHA1(HashDigest): def __init__(self): super().__init__('sha1', hashlib.sha1()) class SHA256(HashDigest): def __init__(self): super().__init__('sha2-256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): super().__init__('sha2-512', hashlib.sha512())
import hashlib from digestive.io import Sink class HashDigest(Sink): def __init__(self, name, digest): super().__init__(name) self._digest = digest def update(self, data): self._digest.update(data) def digest(self): return self._digest.hexdigest() class MD5(HashDigest): def __init__(self): super().__init__('md5', hashlib.md5()) class SHA1(HashDigest): def __init__(self): super().__init__('sha1', hashlib.sha1()) class SHA256(HashDigest): def __init__(self): super().__init__('sha256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): super().__init__('sha512', hashlib.sha512())
Make sha256 and sha512 sink names correspond to their commandline arguments
Make sha256 and sha512 sink names correspond to their commandline arguments
Python
isc
akaIDIOT/Digestive
import hashlib from digestive.io import Sink class HashDigest(Sink): def __init__(self, name, digest): super().__init__(name) self._digest = digest def update(self, data): self._digest.update(data) def digest(self): return self._digest.hexdigest() class MD5(HashDigest): def __init__(self): super().__init__('md5', hashlib.md5()) class SHA1(HashDigest): def __init__(self): super().__init__('sha1', hashlib.sha1()) class SHA256(HashDigest): def __init__(self): super().__init__('sha256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): super().__init__('sha512', hashlib.sha512())
Make sha256 and sha512 sink names correspond to their commandline arguments import hashlib from digestive.io import Sink class HashDigest(Sink): def __init__(self, name, digest): super().__init__(name) self._digest = digest def update(self, data): self._digest.update(data) def digest(self): return self._digest.hexdigest() class MD5(HashDigest): def __init__(self): super().__init__('md5', hashlib.md5()) class SHA1(HashDigest): def __init__(self): super().__init__('sha1', hashlib.sha1()) class SHA256(HashDigest): def __init__(self): super().__init__('sha2-256', hashlib.sha256()) class SHA512(HashDigest): def __init__(self): super().__init__('sha2-512', hashlib.sha512())
390fc84183c0f680c5fb1a980ee3c1227b187611
HowLong/HowLong.py
HowLong/HowLong.py
#!/usr/bin/env python3 import argparse from datetime import timedelta from subprocess import Popen from time import time, sleep def red(text): RED = '\033[91m' END = '\033[0m' return RED + text + END class HowLong(object): def __init__(self): parser = argparse.ArgumentParser(description='Time a process') parser.add_argument('-i', type=float, nargs='?', metavar='interval', help='the timer interval, defaults to 1 second') parser.add_argument('command', metavar='C', type=str, nargs='+', help='a valid command') self.parsed_args = parser.parse_args() self.timer_interval = self.parsed_args.i if self.parsed_args.i else 1 self.readable_command = " ".join(self.parsed_args.command) def run(self): print("Running", self.readable_command) process = Popen(self.parsed_args.command) start_time = time() while process.poll() is None: sleep(self.timer_interval) elapsed_time = (time() - start_time) * 1000 print(red(str(timedelta(milliseconds=elapsed_time)))) print("Finished", self.readable_command) def howlong(): HowLong().run() if __name__ == "__main__": howlong()
from __future__ import print_function import sys import argparse from datetime import timedelta from subprocess import Popen from time import time, sleep def red(text): RED = '\033[91m' END = '\033[0m' return RED + text + END def log(*args): print(*args, file=sys.stderr) sys.stderr.flush() class HowLong(object): def __init__(self): parser = argparse.ArgumentParser(description='Time a process') parser.add_argument('-i', type=float, nargs='?', metavar='interval', help='the timer interval, defaults to 1 second') parser.add_argument('command', metavar='C', type=str, nargs='+', help='a valid command') self.parsed_args = parser.parse_args() self.timer_interval = self.parsed_args.i if self.parsed_args.i else 1 self.readable_command = " ".join(self.parsed_args.command) def run(self): log("Running", self.readable_command) process = Popen(self.parsed_args.command) start_time = time() while process.poll() is None: sleep(self.timer_interval) elapsed_time = (time() - start_time) * 1000 log(red(str(timedelta(milliseconds=elapsed_time)))) log("Finished", self.readable_command) def howlong(): HowLong().run() if __name__ == "__main__": howlong()
Print debug info to stderr
MINOR: Print debug info to stderr
Python
apache-2.0
mattjegan/HowLong
from __future__ import print_function import sys import argparse from datetime import timedelta from subprocess import Popen from time import time, sleep def red(text): RED = '\033[91m' END = '\033[0m' return RED + text + END def log(*args): print(*args, file=sys.stderr) sys.stderr.flush() class HowLong(object): def __init__(self): parser = argparse.ArgumentParser(description='Time a process') parser.add_argument('-i', type=float, nargs='?', metavar='interval', help='the timer interval, defaults to 1 second') parser.add_argument('command', metavar='C', type=str, nargs='+', help='a valid command') self.parsed_args = parser.parse_args() self.timer_interval = self.parsed_args.i if self.parsed_args.i else 1 self.readable_command = " ".join(self.parsed_args.command) def run(self): log("Running", self.readable_command) process = Popen(self.parsed_args.command) start_time = time() while process.poll() is None: sleep(self.timer_interval) elapsed_time = (time() - start_time) * 1000 log(red(str(timedelta(milliseconds=elapsed_time)))) log("Finished", self.readable_command) def howlong(): HowLong().run() if __name__ == "__main__": howlong()
MINOR: Print debug info to stderr #!/usr/bin/env python3 import argparse from datetime import timedelta from subprocess import Popen from time import time, sleep def red(text): RED = '\033[91m' END = '\033[0m' return RED + text + END class HowLong(object): def __init__(self): parser = argparse.ArgumentParser(description='Time a process') parser.add_argument('-i', type=float, nargs='?', metavar='interval', help='the timer interval, defaults to 1 second') parser.add_argument('command', metavar='C', type=str, nargs='+', help='a valid command') self.parsed_args = parser.parse_args() self.timer_interval = self.parsed_args.i if self.parsed_args.i else 1 self.readable_command = " ".join(self.parsed_args.command) def run(self): print("Running", self.readable_command) process = Popen(self.parsed_args.command) start_time = time() while process.poll() is None: sleep(self.timer_interval) elapsed_time = (time() - start_time) * 1000 print(red(str(timedelta(milliseconds=elapsed_time)))) print("Finished", self.readable_command) def howlong(): HowLong().run() if __name__ == "__main__": howlong()
e120858d5cb123e9f3422ddb15ce79bde8d05d64
statsd/__init__.py
statsd/__init__.py
import socket try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): statsd = None
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
Read settings from environment, if available
Read settings from environment, if available
Python
mit
lyft/pystatsd,jsocol/pystatsd,deathowl/pystatsd,Khan/pystatsd,Khan/pystatsd,smarkets/pystatsd,wujuguang/pystatsd,lyft/pystatsd
import socket import os try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): try: host = os.environ['STATSD_HOST'] port = os.environ['STATSD_PORT'] prefix = os.environ.get('STATSD_PREFIX') statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, KeyError): statsd = None
Read settings from environment, if available import socket try: from django.conf import settings except ImportError: settings = None from client import StatsClient __all__ = ['StatsClient', 'statsd'] VERSION = (0, 4, 0) __version__ = '.'.join(map(str, VERSION)) if settings: try: host = getattr(settings, 'STATSD_HOST', 'localhost') port = getattr(settings, 'STATSD_PORT', 8125) prefix = getattr(settings, 'STATSD_PREFIX', None) statsd = StatsClient(host, port, prefix) except (socket.error, socket.gaierror, ImportError): statsd = None
3093941ebed1f9c726a88776819ee181cdb0b869
piper/db/core.py
piper/db/core.py
import logbook # Let's name this DatabaseBase. 'tis a silly name. class DatabaseBase(object): """ Abstract class representing a persistance layer """ def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def init(self, ns, config): raise NotImplementedError() class DbCLI(object): def __init__(self, cls): self.cls = cls self.log = logbook.Logger(self.__class__.__name__) def compose(self, parser): # pragma: nocover db = parser.add_parser('db', help='Perform database tasks') sub = db.add_subparsers(help='Database commands', dest="db_command") sub.add_parser('init', help='Do the initial setup of the database') return 'db', self.run def run(self, ns, config): self.cls.init(ns, config) return 0
import logbook class LazyDatabaseMixin(object): """ A mixin class that gives the subclass lazy access to the database layer The lazy attribute self.db is added, and the database class is gotten from self.config, and an instance is made and returned. """ _db = None @property def db(self): assert self.config is not None, \ 'Database accessed before self.config was set.' if self._db is None: self._db = self.config.get_database() self._db.setup() return self._db # Let's name this DatabaseBase. 'tis a silly name. class DatabaseBase(object): """ Abstract class representing a persistance layer """ def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def init(self, ns, config): raise NotImplementedError() class DbCLI(object): def __init__(self, cls): self.cls = cls self.log = logbook.Logger(self.__class__.__name__) def compose(self, parser): # pragma: nocover db = parser.add_parser('db', help='Perform database tasks') sub = db.add_subparsers(help='Database commands', dest="db_command") sub.add_parser('init', help='Do the initial setup of the database') return 'db', self.run def run(self, ns, config): self.cls.init(ns, config) return 0
Add first iteration of LazyDatabaseMixin()
Add first iteration of LazyDatabaseMixin()
Python
mit
thiderman/piper
import logbook class LazyDatabaseMixin(object): """ A mixin class that gives the subclass lazy access to the database layer The lazy attribute self.db is added, and the database class is gotten from self.config, and an instance is made and returned. """ _db = None @property def db(self): assert self.config is not None, \ 'Database accessed before self.config was set.' if self._db is None: self._db = self.config.get_database() self._db.setup() return self._db # Let's name this DatabaseBase. 'tis a silly name. class DatabaseBase(object): """ Abstract class representing a persistance layer """ def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def init(self, ns, config): raise NotImplementedError() class DbCLI(object): def __init__(self, cls): self.cls = cls self.log = logbook.Logger(self.__class__.__name__) def compose(self, parser): # pragma: nocover db = parser.add_parser('db', help='Perform database tasks') sub = db.add_subparsers(help='Database commands', dest="db_command") sub.add_parser('init', help='Do the initial setup of the database') return 'db', self.run def run(self, ns, config): self.cls.init(ns, config) return 0
Add first iteration of LazyDatabaseMixin() import logbook # Let's name this DatabaseBase. 'tis a silly name. class DatabaseBase(object): """ Abstract class representing a persistance layer """ def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def init(self, ns, config): raise NotImplementedError() class DbCLI(object): def __init__(self, cls): self.cls = cls self.log = logbook.Logger(self.__class__.__name__) def compose(self, parser): # pragma: nocover db = parser.add_parser('db', help='Perform database tasks') sub = db.add_subparsers(help='Database commands', dest="db_command") sub.add_parser('init', help='Do the initial setup of the database') return 'db', self.run def run(self, ns, config): self.cls.init(ns, config) return 0
2d5c5a1bf693f428b53f8d4a6e788f7be864aa9e
image_site_app/forms.py
image_site_app/forms.py
from django import forms class SignupForm(forms.Form): field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2'] first_name = forms.CharField(max_length=30, label='First name (optional)', required=False) last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False) def signup(self, request, user): user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] user.save()
from django import forms class SignupForm(forms.Form): field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2'] first_name = forms.CharField(max_length=30, label='First name (optional)', required=False, widget=forms.TextInput(attrs={ 'placeholder': 'First name' })) last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False, widget=forms.TextInput(attrs={ 'placeholder': 'Last name' })) def signup(self, request, user): user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] user.save()
Add placeholder to first_name and last_name fields in signup form
Add placeholder to first_name and last_name fields in signup form
Python
mit
frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq,frostblooded/kanq
from django import forms class SignupForm(forms.Form): field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2'] first_name = forms.CharField(max_length=30, label='First name (optional)', required=False, widget=forms.TextInput(attrs={ 'placeholder': 'First name' })) last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False, widget=forms.TextInput(attrs={ 'placeholder': 'Last name' })) def signup(self, request, user): user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] user.save()
Add placeholder to first_name and last_name fields in signup form from django import forms class SignupForm(forms.Form): field_order = ['username', 'first_name', 'last_name', 'email', 'password', 'password2'] first_name = forms.CharField(max_length=30, label='First name (optional)', required=False) last_name = forms.CharField(max_length=30, label='Last name (optional)', required=False) def signup(self, request, user): user.first_name = self.cleaned_data['first_name'] user.last_name = self.cleaned_data['last_name'] user.save()