commit
stringlengths
40
40
old_file
stringlengths
5
117
new_file
stringlengths
5
117
old_contents
stringlengths
0
1.93k
new_contents
stringlengths
19
3.3k
subject
stringlengths
17
320
message
stringlengths
18
3.28k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
42.4k
completion
stringlengths
19
3.3k
prompt
stringlengths
21
3.65k
67e0d2b943ef467cfef46f71195f205b1be15a0a
cms/__init__.py
cms/__init__.py
# -*- coding: utf-8 -*- __version__ = '2.3.5pbs.19' # patch settings try: from django.conf import settings if 'cms' in settings.INSTALLED_APPS: from conf import patch_settings patch_settings() except ImportError: # pragma: no cover """ This exception means that either the application is being built, or is otherwise installed improperly. Both make running patch_settings irrelevant. """ pass
# -*- coding: utf-8 -*- __version__ = '2.3.5pbs.20' # patch settings try: from django.conf import settings if 'cms' in settings.INSTALLED_APPS: from conf import patch_settings patch_settings() except ImportError: # pragma: no cover """ This exception means that either the application is being built, or is otherwise installed improperly. Both make running patch_settings irrelevant. """ pass
Bump version as instructed by bamboo.
Bump version as instructed by bamboo.
Python
bsd-3-clause
pbs/django-cms,pbs/django-cms,pbs/django-cms,pbs/django-cms
# -*- coding: utf-8 -*- __version__ = '2.3.5pbs.20' # patch settings try: from django.conf import settings if 'cms' in settings.INSTALLED_APPS: from conf import patch_settings patch_settings() except ImportError: # pragma: no cover """ This exception means that either the application is being built, or is otherwise installed improperly. Both make running patch_settings irrelevant. """ pass
Bump version as instructed by bamboo. # -*- coding: utf-8 -*- __version__ = '2.3.5pbs.19' # patch settings try: from django.conf import settings if 'cms' in settings.INSTALLED_APPS: from conf import patch_settings patch_settings() except ImportError: # pragma: no cover """ This exception means that either the application is being built, or is otherwise installed improperly. Both make running patch_settings irrelevant. """ pass
81faa7704fb355dd16674d4ed089e0ced34c24c6
rflo/start.py
rflo/start.py
import ioflo.app.run import os class Manager(object): ''' Manage the main ioflo process ''' def __init__(self): self.behaviors = ['rflo.config', 'rflo.roads'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self): ioflo.app.run.start( name='rflo', period=0.01, stamp=0.0, filepath=self.floscript, behaviors=self.behaviors, verbose=2, )
import ioflo.app.run import os class Manager(object): ''' Manage the main ioflo process ''' def __init__(self): self.behaviors = ['rflo.config', 'rflo.roads', 'rflo.router'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self): ioflo.app.run.start( name='rflo', period=0.01, stamp=0.0, filepath=self.floscript, behaviors=self.behaviors, verbose=2, )
Add router to the behaviors lookup
Add router to the behaviors lookup
Python
apache-2.0
thatch45/rflo
import ioflo.app.run import os class Manager(object): ''' Manage the main ioflo process ''' def __init__(self): self.behaviors = ['rflo.config', 'rflo.roads', 'rflo.router'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self): ioflo.app.run.start( name='rflo', period=0.01, stamp=0.0, filepath=self.floscript, behaviors=self.behaviors, verbose=2, )
Add router to the behaviors lookup import ioflo.app.run import os class Manager(object): ''' Manage the main ioflo process ''' def __init__(self): self.behaviors = ['rflo.config', 'rflo.roads'] self.floscript = os.path.join(os.path.dirname(__file__), 'raft.flo') def start(self): ioflo.app.run.start( name='rflo', period=0.01, stamp=0.0, filepath=self.floscript, behaviors=self.behaviors, verbose=2, )
f349753417682960e607b458a009fbfd324de7ab
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup execfile('kronos/version.py') setup( name = 'django-kronos', version = __version__, description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.', long_description = open('README.rst').read(), author = 'Johannes Gorset', author_email = '[email protected]', url = 'http://github.com/jgorset/kronos', packages = ['kronos', 'kronos.management', 'kronos.management.commands'] )
#!/usr/bin/env python from setuptools import setup execfile('kronos/version.py') readme = open('README.rst').read() history = open('HISTORY.rst').read() setup( name = 'django-kronos', version = __version__, description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.', long_description = readme + '\n\n' + history, author = 'Johannes Gorset', author_email = '[email protected]', url = 'http://github.com/jgorset/kronos', packages = ['kronos', 'kronos.management', 'kronos.management.commands'] )
Add history to long description
Add history to long description
Python
mit
jeanbaptistelab/django-kronos,jeanbaptistelab/django-kronos,joshblum/django-kronos,jgorset/django-kronos,jgorset/django-kronos,joshblum/django-kronos
#!/usr/bin/env python from setuptools import setup execfile('kronos/version.py') readme = open('README.rst').read() history = open('HISTORY.rst').read() setup( name = 'django-kronos', version = __version__, description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.', long_description = readme + '\n\n' + history, author = 'Johannes Gorset', author_email = '[email protected]', url = 'http://github.com/jgorset/kronos', packages = ['kronos', 'kronos.management', 'kronos.management.commands'] )
Add history to long description #!/usr/bin/env python from setuptools import setup execfile('kronos/version.py') setup( name = 'django-kronos', version = __version__, description = 'Kronos is a Django application that makes it easy to define and schedule tasks with cron.', long_description = open('README.rst').read(), author = 'Johannes Gorset', author_email = '[email protected]', url = 'http://github.com/jgorset/kronos', packages = ['kronos', 'kronos.management', 'kronos.management.commands'] )
f109f24e8f10d1fd3f8940c0eb54b157aa9ed909
content/test/gpu/gpu_tests/pixel_expectations.py
content/test/gpu/gpu_tests/pixel_expectations.py
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel.Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) self.Fail('Pixel.Canvas2DRedBox', [ 'linux', ('nvidia', 0x104a)], bug=511580) self.Fail('Pixel.CSS3DBlueBox', [ 'linux', ('nvidia', 0x104a)], bug=511580) self.Fail('Pixel.WebGLGreenTriangle', [ 'linux', ('nvidia', 0x104a)], bug=511580) pass
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel.Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) self.Fail('Pixel.Canvas2DRedBox', bug=511580) self.Fail('Pixel.CSS3DBlueBox', bug=511580) self.Fail('Pixel.WebGLGreenTriangle', bug=511580) pass
Mark pixel tests as failing on all platform
Mark pixel tests as failing on all platform BUG=511580 [email protected] Review URL: https://codereview.chromium.org/1245243003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#340191}
Python
bsd-3-clause
Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Chilledheart/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel.Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) self.Fail('Pixel.Canvas2DRedBox', bug=511580) self.Fail('Pixel.CSS3DBlueBox', bug=511580) self.Fail('Pixel.WebGLGreenTriangle', bug=511580) pass
Mark pixel tests as failing on all platform BUG=511580 [email protected] Review URL: https://codereview.chromium.org/1245243003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#340191} # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel.Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) self.Fail('Pixel.Canvas2DRedBox', [ 'linux', ('nvidia', 0x104a)], bug=511580) self.Fail('Pixel.CSS3DBlueBox', [ 'linux', ('nvidia', 0x104a)], bug=511580) self.Fail('Pixel.WebGLGreenTriangle', [ 'linux', ('nvidia', 0x104a)], bug=511580) pass
63143c94cef353d7bae13f7b13650801bb901c94
tests/unicode/unicode_pos.py
tests/unicode/unicode_pos.py
# str methods with explicit start/end pos print("Привет".startswith("П")) print("Привет".startswith("р", 1)) print("абвба".find("а", 1)) print("абвба".find("а", 1, -1))
Test for explicit start/end args to str methods for unicode.
tests: Test for explicit start/end args to str methods for unicode.
Python
mit
hiway/micropython,martinribelotta/micropython,tdautc19841202/micropython,supergis/micropython,torwag/micropython,pfalcon/micropython,galenhz/micropython,TDAbboud/micropython,kerneltask/micropython,heisewangluo/micropython,kerneltask/micropython,pozetroninc/micropython,ericsnowcurrently/micropython,ernesto-g/micropython,omtinez/micropython,misterdanb/micropython,Peetz0r/micropython-esp32,praemdonck/micropython,xyb/micropython,MrSurly/micropython-esp32,ryannathans/micropython,redbear/micropython,infinnovation/micropython,rubencabrera/micropython,EcmaXp/micropython,xhat/micropython,cwyark/micropython,xhat/micropython,micropython/micropython-esp32,swegener/micropython,firstval/micropython,xhat/micropython,emfcamp/micropython,torwag/micropython,ernesto-g/micropython,pramasoul/micropython,toolmacher/micropython,noahwilliamsson/micropython,lbattraw/micropython,infinnovation/micropython,tuc-osg/micropython,slzatz/micropython,blmorris/micropython,pramasoul/micropython,pramasoul/micropython,Peetz0r/micropython-esp32,alex-robbins/micropython,EcmaXp/micropython,ericsnowcurrently/micropython,noahwilliamsson/micropython,tralamazza/micropython,jimkmc/micropython,ryannathans/micropython,micropython/micropython-esp32,PappaPeppar/micropython,kerneltask/micropython,ruffy91/micropython,emfcamp/micropython,dxxb/micropython,ChuckM/micropython,TDAbboud/micropython,aethaniel/micropython,ganshun666/micropython,oopy/micropython,ChuckM/micropython,ruffy91/micropython,suda/micropython,neilh10/micropython,AriZuu/micropython,neilh10/micropython,henriknelson/micropython,ganshun666/micropython,dinau/micropython,pozetroninc/micropython,blmorris/micropython,puuu/micropython,ernesto-g/micropython,ahotam/micropython,cloudformdesign/micropython,mpalomer/micropython,dhylands/micropython,dxxb/micropython,matthewelse/micropython,tobbad/micropython,galenhz/micropython,praemdonck/micropython,EcmaXp/micropython,swegener/micropython,jmarcelino/pycom-micropython,misterdanb/micropython,tuc-osg/micropython,galenhz/micropython,mgyenik/micropython,oopy/micropython,ahotam/micropython,toolmacher/micropython,stonegithubs/micropython,lbattraw/micropython,emfcamp/micropython,drrk/micropython,heisewangluo/micropython,ganshun666/micropython,dmazzella/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,tdautc19841202/micropython,dhylands/micropython,pramasoul/micropython,trezor/micropython,alex-robbins/micropython,mhoffma/micropython,stonegithubs/micropython,Peetz0r/micropython-esp32,misterdanb/micropython,cloudformdesign/micropython,ganshun666/micropython,suda/micropython,PappaPeppar/micropython,pramasoul/micropython,tobbad/micropython,noahwilliamsson/micropython,tdautc19841202/micropython,SHA2017-badge/micropython-esp32,kostyll/micropython,ericsnowcurrently/micropython,kostyll/micropython,supergis/micropython,Timmenem/micropython,pozetroninc/micropython,adafruit/circuitpython,hiway/micropython,micropython/micropython-esp32,mianos/micropython,slzatz/micropython,selste/micropython,tobbad/micropython,kostyll/micropython,warner83/micropython,alex-march/micropython,adafruit/micropython,dxxb/micropython,tobbad/micropython,vriera/micropython,omtinez/micropython,hiway/micropython,dhylands/micropython,ceramos/micropython,drrk/micropython,vriera/micropython,vitiral/micropython,lowRISC/micropython,methoxid/micropystat,utopiaprince/micropython,orionrobots/micropython,lbattraw/micropython,mpalomer/micropython,kostyll/micropython,cnoviello/micropython,chrisdearman/micropython,pozetroninc/micropython,martinribelotta/micropython,lbattraw/micropython,noahwilliamsson/micropython,MrSurly/micropython-esp32,Peetz0r/micropython-esp32,MrSurly/micropython,xyb/micropython,neilh10/micropython,hiway/micropython,PappaPeppar/micropython,EcmaXp/micropython,cnoviello/micropython,martinribelotta/micropython,jimkmc/micropython,suda/micropython,cwyark/micropython,dxxb/micropython,martinribelotta/micropython,heisewangluo/micropython,orionrobots/micropython,paul-xxx/micropython,suda/micropython,SungEun-Steve-Kim/test-mp,jmarcelino/pycom-micropython,skybird6672/micropython,oopy/micropython,bvernoux/micropython,lowRISC/micropython,tdautc19841202/micropython,deshipu/micropython,galenhz/micropython,adafruit/circuitpython,alex-robbins/micropython,ericsnowcurrently/micropython,turbinenreiter/micropython,alex-march/micropython,SungEun-Steve-Kim/test-mp,matthewelse/micropython,matthewelse/micropython,Vogtinator/micropython,mgyenik/micropython,ceramos/micropython,paul-xxx/micropython,oopy/micropython,methoxid/micropystat,feilongfl/micropython,warner83/micropython,utopiaprince/micropython,cnoviello/micropython,chrisdearman/micropython,deshipu/micropython,adamkh/micropython,methoxid/micropystat,pfalcon/micropython,ceramos/micropython,Timmenem/micropython,HenrikSolver/micropython,xyb/micropython,skybird6672/micropython,mpalomer/micropython,vitiral/micropython,warner83/micropython,xuxiaoxin/micropython,emfcamp/micropython,henriknelson/micropython,swegener/micropython,mpalomer/micropython,noahwilliamsson/micropython,ruffy91/micropython,KISSMonX/micropython,aethaniel/micropython,ericsnowcurrently/micropython,Vogtinator/micropython,galenhz/micropython,methoxid/micropystat,KISSMonX/micropython,TDAbboud/micropython,deshipu/micropython,praemdonck/micropython,adamkh/micropython,Timmenem/micropython,AriZuu/micropython,cloudformdesign/micropython,skybird6672/micropython,hiway/micropython,henriknelson/micropython,xyb/micropython,jlillest/micropython,aethaniel/micropython,SHA2017-badge/micropython-esp32,drrk/micropython,dhylands/micropython,SHA2017-badge/micropython-esp32,methoxid/micropystat,kerneltask/micropython,tuc-osg/micropython,TDAbboud/micropython,matthewelse/micropython,bvernoux/micropython,mgyenik/micropython,alex-march/micropython,ceramos/micropython,ChuckM/micropython,trezor/micropython,micropython/micropython-esp32,adafruit/micropython,orionrobots/micropython,kerneltask/micropython,cnoviello/micropython,dmazzella/micropython,jlillest/micropython,Timmenem/micropython,ernesto-g/micropython,xuxiaoxin/micropython,vriera/micropython,MrSurly/micropython-esp32,martinribelotta/micropython,orionrobots/micropython,vitiral/micropython,toolmacher/micropython,Vogtinator/micropython,dhylands/micropython,tdautc19841202/micropython,MrSurly/micropython,adafruit/circuitpython,danicampora/micropython,alex-robbins/micropython,noahchense/micropython,ChuckM/micropython,dinau/micropython,feilongfl/micropython,omtinez/micropython,jlillest/micropython,puuu/micropython,adafruit/micropython,jlillest/micropython,adafruit/micropython,stonegithubs/micropython,hosaka/micropython,mhoffma/micropython,mpalomer/micropython,micropython/micropython-esp32,blmorris/micropython,SungEun-Steve-Kim/test-mp,noahchense/micropython,hosaka/micropython,chrisdearman/micropython,drrk/micropython,MrSurly/micropython,selste/micropython,feilongfl/micropython,selste/micropython,blazewicz/micropython,infinnovation/micropython,hosaka/micropython,swegener/micropython,warner83/micropython,tralamazza/micropython,MrSurly/micropython,MrSurly/micropython-esp32,torwag/micropython,neilh10/micropython,cloudformdesign/micropython,pfalcon/micropython,xuxiaoxin/micropython,mhoffma/micropython,blmorris/micropython,turbinenreiter/micropython,ruffy91/micropython,ceramos/micropython,xhat/micropython,torwag/micropython,PappaPeppar/micropython,supergis/micropython,puuu/micropython,omtinez/micropython,xuxiaoxin/micropython,redbear/micropython,turbinenreiter/micropython,mhoffma/micropython,adamkh/micropython,dmazzella/micropython,chrisdearman/micropython,toolmacher/micropython,paul-xxx/micropython,slzatz/micropython,torwag/micropython,noahchense/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,suda/micropython,hosaka/micropython,blazewicz/micropython,adafruit/micropython,mianos/micropython,noahchense/micropython,xyb/micropython,ahotam/micropython,deshipu/micropython,ernesto-g/micropython,turbinenreiter/micropython,ryannathans/micropython,omtinez/micropython,dmazzella/micropython,lowRISC/micropython,vitiral/micropython,feilongfl/micropython,matthewelse/micropython,chrisdearman/micropython,noahchense/micropython,xhat/micropython,blmorris/micropython,tralamazza/micropython,bvernoux/micropython,puuu/micropython,ryannathans/micropython,alex-march/micropython,vriera/micropython,skybird6672/micropython,firstval/micropython,deshipu/micropython,paul-xxx/micropython,henriknelson/micropython,danicampora/micropython,supergis/micropython,lbattraw/micropython,utopiaprince/micropython,tuc-osg/micropython,AriZuu/micropython,Vogtinator/micropython,mianos/micropython,danicampora/micropython,feilongfl/micropython,matthewelse/micropython,dinau/micropython,warner83/micropython,lowRISC/micropython,tobbad/micropython,HenrikSolver/micropython,adamkh/micropython,MrSurly/micropython-esp32,praemdonck/micropython,oopy/micropython,jimkmc/micropython,adafruit/circuitpython,dinau/micropython,Timmenem/micropython,skybird6672/micropython,toolmacher/micropython,blazewicz/micropython,adamkh/micropython,redbear/micropython,rubencabrera/micropython,jmarcelino/pycom-micropython,rubencabrera/micropython,infinnovation/micropython,tralamazza/micropython,heisewangluo/micropython,KISSMonX/micropython,adafruit/circuitpython,pfalcon/micropython,ganshun666/micropython,slzatz/micropython,trezor/micropython,jmarcelino/pycom-micropython,HenrikSolver/micropython,ahotam/micropython,turbinenreiter/micropython,pozetroninc/micropython,ruffy91/micropython,heisewangluo/micropython,mianos/micropython,supergis/micropython,trezor/micropython,redbear/micropython,cwyark/micropython,hosaka/micropython,danicampora/micropython,aethaniel/micropython,cloudformdesign/micropython,orionrobots/micropython,firstval/micropython,utopiaprince/micropython,trezor/micropython,slzatz/micropython,vitiral/micropython,xuxiaoxin/micropython,emfcamp/micropython,firstval/micropython,selste/micropython,cwyark/micropython,pfalcon/micropython,alex-robbins/micropython,aethaniel/micropython,HenrikSolver/micropython,puuu/micropython,SungEun-Steve-Kim/test-mp,selste/micropython,SHA2017-badge/micropython-esp32,ryannathans/micropython,AriZuu/micropython,mianos/micropython,mgyenik/micropython,lowRISC/micropython,jimkmc/micropython,blazewicz/micropython,ChuckM/micropython,kostyll/micropython,Peetz0r/micropython-esp32,dinau/micropython,misterdanb/micropython,SungEun-Steve-Kim/test-mp,TDAbboud/micropython,alex-march/micropython,firstval/micropython,mgyenik/micropython,swegener/micropython,drrk/micropython,vriera/micropython,bvernoux/micropython,cnoviello/micropython,MrSurly/micropython,blazewicz/micropython,paul-xxx/micropython,utopiaprince/micropython,dxxb/micropython,praemdonck/micropython,ahotam/micropython,bvernoux/micropython,misterdanb/micropython,KISSMonX/micropython,Vogtinator/micropython,neilh10/micropython,henriknelson/micropython,cwyark/micropython,mhoffma/micropython,KISSMonX/micropython,rubencabrera/micropython,redbear/micropython,danicampora/micropython,jlillest/micropython,jimkmc/micropython,HenrikSolver/micropython,stonegithubs/micropython,EcmaXp/micropython,rubencabrera/micropython,tuc-osg/micropython,stonegithubs/micropython,AriZuu/micropython,infinnovation/micropython
# str methods with explicit start/end pos print("Привет".startswith("П")) print("Привет".startswith("р", 1)) print("абвба".find("а", 1)) print("абвба".find("а", 1, -1))
tests: Test for explicit start/end args to str methods for unicode.
3a87b03ed42232f7daa96242142f48872bf26634
readthedocs/gold/models.py
readthedocs/gold/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ LEVEL_CHOICES = ( ('v1-org-5', '$5/mo'), ('v1-org-10', '$10/mo'), ('v1-org-15', '$15/mo'), ('v1-org-20', '$20/mo'), ('v1-org-50', '$50/mo'), ('v1-org-100', '$100/mo'), ) class GoldUser(models.Model): pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True) modified_date = models.DateTimeField(_('Modified date'), auto_now=True) user = models.ForeignKey('auth.User', verbose_name=_('User'), unique=True, related_name='gold') level = models.CharField(_('Level'), max_length=20, choices=LEVEL_CHOICES, default='supporter') last_4_digits = models.CharField(max_length=4) stripe_id = models.CharField(max_length=255) subscribed = models.BooleanField(default=False)
from django.db import models from django.utils.translation import ugettext_lazy as _ LEVEL_CHOICES = ( ('v1-org-5', '$5/mo'), ('v1-org-10', '$10/mo'), ('v1-org-15', '$15/mo'), ('v1-org-20', '$20/mo'), ('v1-org-50', '$50/mo'), ('v1-org-100', '$100/mo'), ) class GoldUser(models.Model): pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True) modified_date = models.DateTimeField(_('Modified date'), auto_now=True) user = models.ForeignKey('auth.User', verbose_name=_('User'), unique=True, related_name='gold') level = models.CharField(_('Level'), max_length=20, choices=LEVEL_CHOICES, default='supporter') last_4_digits = models.CharField(max_length=4) stripe_id = models.CharField(max_length=255) subscribed = models.BooleanField(default=False) def __unicode__(self): return 'Gold Level %s for %s' % (self.level, self.user)
Add nicer string rep for gold user
Add nicer string rep for gold user
Python
mit
jerel/readthedocs.org,CedarLogic/readthedocs.org,sunnyzwh/readthedocs.org,sils1297/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,sunnyzwh/readthedocs.org,safwanrahman/readthedocs.org,takluyver/readthedocs.org,fujita-shintaro/readthedocs.org,sid-kap/readthedocs.org,laplaceliu/readthedocs.org,espdev/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,laplaceliu/readthedocs.org,nikolas/readthedocs.org,raven47git/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,cgourlay/readthedocs.org,pombredanne/readthedocs.org,istresearch/readthedocs.org,wijerasa/readthedocs.org,espdev/readthedocs.org,techtonik/readthedocs.org,kdkeyser/readthedocs.org,laplaceliu/readthedocs.org,kdkeyser/readthedocs.org,atsuyim/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,asampat3090/readthedocs.org,sunnyzwh/readthedocs.org,wijerasa/readthedocs.org,sunnyzwh/readthedocs.org,gjtorikian/readthedocs.org,GovReady/readthedocs.org,nikolas/readthedocs.org,jerel/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,Tazer/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,emawind84/readthedocs.org,sid-kap/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,VishvajitP/readthedocs.org,Tazer/readthedocs.org,wanghaven/readthedocs.org,mhils/readthedocs.org,nikolas/readthedocs.org,takluyver/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,michaelmcandrew/readthedocs.org,safwanrahman/readthedocs.org,GovReady/readthedocs.org,singingwolfboy/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,wanghaven/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,cgourlay/readthedocs.org,SteveViss/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,dirn/readthedocs.org,emawind84/readthedocs.org,kenshinthebattosai/readthedocs.org,agjohnson/readthedocs.org,sid-kap/readthedocs.org,istresearch/readthedocs.org,asampat3090/readthedocs.org,asampat3090/readthedocs.org,clarkperkins/readthedocs.org,wijerasa/readthedocs.org,atsuyim/readthedocs.org,kenwang76/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,singingwolfboy/readthedocs.org,dirn/readthedocs.org,agjohnson/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,LukasBoersma/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,takluyver/readthedocs.org,gjtorikian/readthedocs.org,cgourlay/readthedocs.org,stevepiercy/readthedocs.org,agjohnson/readthedocs.org,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,Tazer/readthedocs.org,titiushko/readthedocs.org,safwanrahman/readthedocs.org,pombredanne/readthedocs.org,davidfischer/readthedocs.org,sils1297/readthedocs.org,VishvajitP/readthedocs.org,raven47git/readthedocs.org,emawind84/readthedocs.org,royalwang/readthedocs.org,atsuyim/readthedocs.org,stevepiercy/readthedocs.org,royalwang/readthedocs.org,SteveViss/readthedocs.org,d0ugal/readthedocs.org,d0ugal/readthedocs.org,VishvajitP/readthedocs.org,jerel/readthedocs.org,michaelmcandrew/readthedocs.org,CedarLogic/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,espdev/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,singingwolfboy/readthedocs.org,titiushko/readthedocs.org,attakei/readthedocs-oauth,hach-que/readthedocs.org,wijerasa/readthedocs.org,takluyver/readthedocs.org,GovReady/readthedocs.org,asampat3090/readthedocs.org,sils1297/readthedocs.org,soulshake/readthedocs.org,atsuyim/readthedocs.org,istresearch/readthedocs.org,titiushko/readthedocs.org,titiushko/readthedocs.org,raven47git/readthedocs.org,d0ugal/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,kenwang76/readthedocs.org,hach-que/readthedocs.org,dirn/readthedocs.org,LukasBoersma/readthedocs.org,techtonik/readthedocs.org,SteveViss/readthedocs.org,LukasBoersma/readthedocs.org,agjohnson/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,clarkperkins/readthedocs.org,CedarLogic/readthedocs.org,cgourlay/readthedocs.org,royalwang/readthedocs.org,techtonik/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,royalwang/readthedocs.org,laplaceliu/readthedocs.org,nikolas/readthedocs.org,attakei/readthedocs-oauth,sid-kap/readthedocs.org,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,SteveViss/readthedocs.org,kenshinthebattosai/readthedocs.org,davidfischer/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,espdev/readthedocs.org
from django.db import models from django.utils.translation import ugettext_lazy as _ LEVEL_CHOICES = ( ('v1-org-5', '$5/mo'), ('v1-org-10', '$10/mo'), ('v1-org-15', '$15/mo'), ('v1-org-20', '$20/mo'), ('v1-org-50', '$50/mo'), ('v1-org-100', '$100/mo'), ) class GoldUser(models.Model): pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True) modified_date = models.DateTimeField(_('Modified date'), auto_now=True) user = models.ForeignKey('auth.User', verbose_name=_('User'), unique=True, related_name='gold') level = models.CharField(_('Level'), max_length=20, choices=LEVEL_CHOICES, default='supporter') last_4_digits = models.CharField(max_length=4) stripe_id = models.CharField(max_length=255) subscribed = models.BooleanField(default=False) def __unicode__(self): return 'Gold Level %s for %s' % (self.level, self.user)
Add nicer string rep for gold user from django.db import models from django.utils.translation import ugettext_lazy as _ LEVEL_CHOICES = ( ('v1-org-5', '$5/mo'), ('v1-org-10', '$10/mo'), ('v1-org-15', '$15/mo'), ('v1-org-20', '$20/mo'), ('v1-org-50', '$50/mo'), ('v1-org-100', '$100/mo'), ) class GoldUser(models.Model): pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True) modified_date = models.DateTimeField(_('Modified date'), auto_now=True) user = models.ForeignKey('auth.User', verbose_name=_('User'), unique=True, related_name='gold') level = models.CharField(_('Level'), max_length=20, choices=LEVEL_CHOICES, default='supporter') last_4_digits = models.CharField(max_length=4) stripe_id = models.CharField(max_length=255) subscribed = models.BooleanField(default=False)
8afb48e23b91efa3432ffad568002a46384eb021
fantasyland.py
fantasyland.py
import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))
import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
Add command line interface to vary num-games and num-cards.
Add command line interface to vary num-games and num-cards.
Python
mit
session-id/pineapple-ai
import argparse import numpy as np import random import game as g import hand_optimizer parser = argparse.ArgumentParser(description='Simulate fantasyland like situations.') parser.add_argument('--num-games', type=int, default=1000, help='number of games to play') parser.add_argument('--num-cards', type=int, default=14, help='number of cards to be dealt') args = parser.parse_args() game = g.PineappleGame1() utilities = [] for iter_num in xrange(args.num_games): print "{:5} / {:5}".format(iter_num, args.num_games), '\r', draw = random.sample(game.cards, args.num_cards) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(args.num_games))
Add command line interface to vary num-games and num-cards. import numpy as np import random import game as g import hand_optimizer game = g.PineappleGame1() NUM_ITERS = 1000 utilities = [] for iter_num in xrange(NUM_ITERS): print "{:5} / {:5}".format(iter_num, NUM_ITERS), '\r', draw = random.sample(game.cards, 14) utilities += [hand_optimizer.optimize_hand([[], [], []], draw)] print '' utilities = np.array(utilities) print "Average utility: {} +/- {}".format(np.mean(utilities), np.std(utilities) / np.sqrt(NUM_ITERS))
f755060a8999a1d6ba007f24dda9d00b9bb9d5dd
UI/sunc_menu.py
UI/sunc_menu.py
# -*- coding: utf-8 -*- from PyQt4 import QtCore, QtGui from qt_interfaces.sync_menu_ui import Ui_SyncMenu # Synchronization menu section # class SyncMenuUI(QtGui.QMainWindow): def __init__(self, parent=None,): QtGui.QWidget.__init__(self, parent) self.sync_menu_ui = Ui_SyncMenu() self.sync_menu_ui.setupUi(self) # start synchronization action QtCore.QObject.connect( self.sync_menu_ui.start_sync_bt, QtCore.SIGNAL('clicked()'), self.start_sync_action) def start_sync_action(self): return 1 def stop_sync_action(self): return 1 def update_current_main_sync_stats(self, stats_array): self.sync_menu_ui.successfully_synced_files_count.setText(stats_array["successfully_synced_files_count"]) return 1
Add sync menu backend init
Add sync menu backend init
Python
mit
lakewik/storj-gui-client
# -*- coding: utf-8 -*- from PyQt4 import QtCore, QtGui from qt_interfaces.sync_menu_ui import Ui_SyncMenu # Synchronization menu section # class SyncMenuUI(QtGui.QMainWindow): def __init__(self, parent=None,): QtGui.QWidget.__init__(self, parent) self.sync_menu_ui = Ui_SyncMenu() self.sync_menu_ui.setupUi(self) # start synchronization action QtCore.QObject.connect( self.sync_menu_ui.start_sync_bt, QtCore.SIGNAL('clicked()'), self.start_sync_action) def start_sync_action(self): return 1 def stop_sync_action(self): return 1 def update_current_main_sync_stats(self, stats_array): self.sync_menu_ui.successfully_synced_files_count.setText(stats_array["successfully_synced_files_count"]) return 1
Add sync menu backend init
f7d83caae3264d86420ce654f3669175c284a82d
ocradmin/core/decorators.py
ocradmin/core/decorators.py
# Miscellaneos functions relating the projects app import os from datetime import datetime from django.http import HttpResponseRedirect from django.utils.http import urlquote from django.conf import settings def project_required(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): path = urlquote(request.get_full_path()) if not request.session.get("project"): return HttpResponseRedirect("/projects/list/?next=%s" % path) return func(request, *args, **kwargs) return wrapper def saves_files(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): temp = request.path.startswith(("/nodelib/")) project = request.session.get("project") output_path = None if project is None: temp = True if temp: output_path = os.path.join( settings.MEDIA_ROOT, settings.TEMP_PATH, request.user.username, datetime.now().strftime("%Y%m%d%H%M%S") ) else: output_path = os.path.join( settings.MEDIA_ROOT, settings.USER_FILES_PATH, project.slug ) request.__class__.output_path = output_path return func(request, *args, **kwargs) return wrapper
# Miscellaneos functions relating the projects app import os from datetime import datetime from django.http import HttpResponseRedirect from django.utils.http import urlquote from django.conf import settings def project_required(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): path = urlquote(request.get_full_path()) if not request.session.get("project"): return HttpResponseRedirect("/projects/list/?next=%s" % path) request.project = request.session.get("project") return func(request, *args, **kwargs) return wrapper def saves_files(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): temp = request.path.startswith(("/nodelib/")) project = request.session.get("project") output_path = None if project is None: temp = True if temp: output_path = os.path.join( settings.MEDIA_ROOT, settings.TEMP_PATH, request.user.username, datetime.now().strftime("%Y%m%d%H%M%S") ) else: output_path = os.path.join( settings.MEDIA_ROOT, settings.USER_FILES_PATH, project.slug ) request.__class__.output_path = output_path return func(request, *args, **kwargs) return wrapper
Add project as request attribute to save a little boilerplate
Add project as request attribute to save a little boilerplate
Python
apache-2.0
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
# Miscellaneos functions relating the projects app import os from datetime import datetime from django.http import HttpResponseRedirect from django.utils.http import urlquote from django.conf import settings def project_required(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): path = urlquote(request.get_full_path()) if not request.session.get("project"): return HttpResponseRedirect("/projects/list/?next=%s" % path) request.project = request.session.get("project") return func(request, *args, **kwargs) return wrapper def saves_files(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): temp = request.path.startswith(("/nodelib/")) project = request.session.get("project") output_path = None if project is None: temp = True if temp: output_path = os.path.join( settings.MEDIA_ROOT, settings.TEMP_PATH, request.user.username, datetime.now().strftime("%Y%m%d%H%M%S") ) else: output_path = os.path.join( settings.MEDIA_ROOT, settings.USER_FILES_PATH, project.slug ) request.__class__.output_path = output_path return func(request, *args, **kwargs) return wrapper
Add project as request attribute to save a little boilerplate # Miscellaneos functions relating the projects app import os from datetime import datetime from django.http import HttpResponseRedirect from django.utils.http import urlquote from django.conf import settings def project_required(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): path = urlquote(request.get_full_path()) if not request.session.get("project"): return HttpResponseRedirect("/projects/list/?next=%s" % path) return func(request, *args, **kwargs) return wrapper def saves_files(func): """ Decorator function for other actions that require a project to be open in the session. """ def wrapper(request, *args, **kwargs): temp = request.path.startswith(("/nodelib/")) project = request.session.get("project") output_path = None if project is None: temp = True if temp: output_path = os.path.join( settings.MEDIA_ROOT, settings.TEMP_PATH, request.user.username, datetime.now().strftime("%Y%m%d%H%M%S") ) else: output_path = os.path.join( settings.MEDIA_ROOT, settings.USER_FILES_PATH, project.slug ) request.__class__.output_path = output_path return func(request, *args, **kwargs) return wrapper
b812843f03fd0da920872c109132aee7fae82b3a
tests/instancing_tests/NonterminalsTest.py
tests/instancing_tests/NonterminalsTest.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 31.08.2017 11:55 :Licence GNUv3 Part of grammpy """ from unittest import TestCase, main from grammpy import * from grammpy.exceptions import TreeDeletedException class A(Nonterminal): pass class B(Nonterminal): pass class C(Nonterminal): pass class From(Rule): rule = ([C], [A, B]) class To(Rule): rule = ([A], [B, C]) class NonterminalsTest(TestCase): def test_correctChild(self): a = A() t = To() a._set_to_rule(t) self.assertEqual(a.to_rule, t) def test_correctParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) def test_deleteParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) del f with self.assertRaises(TreeDeletedException): a.from_rule if __name__ == '__main__': main()
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 31.08.2017 11:55 :Licence GNUv3 Part of grammpy """ from unittest import TestCase, main from grammpy import * from grammpy.exceptions import TreeDeletedException class A(Nonterminal): pass class B(Nonterminal): pass class C(Nonterminal): pass class From(Rule): rule = ([C], [A, B]) class To(Rule): rule = ([A], [B, C]) class NonterminalsTest(TestCase): def test_correctChild(self): a = A() t = To() a._set_to_rule(t) self.assertEqual(a.to_rule, t) def test_correctParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) def test_deleteParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) del f with self.assertRaises(TreeDeletedException): a.from_rule def test_shouldNotDeleteChild(self): a = A() t = To() a._set_to_rule(t) del t a.to_rule if __name__ == '__main__': main()
Add test of deleteing child for nonterminal
Add test of deleteing child for nonterminal
Python
mit
PatrikValkovic/grammpy
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 31.08.2017 11:55 :Licence GNUv3 Part of grammpy """ from unittest import TestCase, main from grammpy import * from grammpy.exceptions import TreeDeletedException class A(Nonterminal): pass class B(Nonterminal): pass class C(Nonterminal): pass class From(Rule): rule = ([C], [A, B]) class To(Rule): rule = ([A], [B, C]) class NonterminalsTest(TestCase): def test_correctChild(self): a = A() t = To() a._set_to_rule(t) self.assertEqual(a.to_rule, t) def test_correctParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) def test_deleteParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) del f with self.assertRaises(TreeDeletedException): a.from_rule def test_shouldNotDeleteChild(self): a = A() t = To() a._set_to_rule(t) del t a.to_rule if __name__ == '__main__': main()
Add test of deleteing child for nonterminal #!/usr/bin/env python """ :Author Patrik Valkovic :Created 31.08.2017 11:55 :Licence GNUv3 Part of grammpy """ from unittest import TestCase, main from grammpy import * from grammpy.exceptions import TreeDeletedException class A(Nonterminal): pass class B(Nonterminal): pass class C(Nonterminal): pass class From(Rule): rule = ([C], [A, B]) class To(Rule): rule = ([A], [B, C]) class NonterminalsTest(TestCase): def test_correctChild(self): a = A() t = To() a._set_to_rule(t) self.assertEqual(a.to_rule, t) def test_correctParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) def test_deleteParent(self): a = A() f = From() a._set_from_rule(f) self.assertEqual(a.from_rule, f) del f with self.assertRaises(TreeDeletedException): a.from_rule if __name__ == '__main__': main()
f50644484f4b05fbb25adfd6430b6207441d8b2e
src/ggrc_basic_permissions/migrations/versions/20131008124800_8f33d9bd2043_fix_system_roles.py
src/ggrc_basic_permissions/migrations/versions/20131008124800_8f33d9bd2043_fix_system_roles.py
""" Revision ID: 8f33d9bd2043 Revises: 758b4012b5f Create Date: 2013-09-20 14:12:32.846302 """ # revision identifiers, used by Alembic. revision = '8f33d9bd2043' down_revision = '758b4012b5f' import json import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.Text), column('description', sa.Text), column('modified_by_id', sa.Integer), column('created_at', sa.DateTime), column('updated_at', sa.DateTime), column('context_id', sa.Integer), ) def upgrade(): basic_objects_editable = [ 'Categorization', 'Category', 'Control', 'ControlControl', 'ControlSection', 'Cycle', 'DataAsset', 'Directive', 'Contract', 'Policy', 'Regulation', 'DirectiveControl', 'Document', 'Facility', 'Help', 'Market', 'Objective', 'ObjectiveControl', 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Option', 'OrgGroup', 'PopulationSample', 'Product', 'ProgramControl', 'ProgramDirective', 'Project', 'Relationship', 'RelationshipType', 'Section', 'SectionObjective', 'SystemOrProcess', 'System', 'Process', 'SystemControl', 'SystemSysetm', ] basic_objects_readable = list(basic_objects_editable) basic_objects_readable.extend([ 'Person', 'Program', 'Role', #'UserRole', ?? why? ]) basic_objects_creatable = list(basic_objects_editable) basic_objects_creatable.extend([ 'Person', ]) basic_objects_updateable = list(basic_objects_editable) basic_objects_updateable.extend([ 'Person', ]) basic_objects_deletable = list(basic_objects_editable) op.execute(roles_table.update()\ .where(roles_table.c.name == 'Reader')\ .values(permissions_json=json.dumps({ 'read': basic_objects_readable, }))) op.execute(roles_table.update()\ .where(roles_table.c.name == 'ObjectEditor')\ .values(permissions_json=json.dumps({ 'create': basic_objects_creatable, 'read': basic_objects_readable, 'update': basic_objects_updateable, 'delete': basic_objects_deletable, }))) def downgrade(): # No reason to downgrade this one pass
Add migration to fix system roles
Add migration to fix system roles * `ObjectEditor` and `Reader` were missing `ProgramDirective`, `ProgramControl`, and `Person` permissions (CRUD, except `Person`, which is CRU. * `ObjectControl` and `ObjectDocument` were combined due to a missing comma in a previous migration.
Python
apache-2.0
hasanalom/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,uskudnik/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core
""" Revision ID: 8f33d9bd2043 Revises: 758b4012b5f Create Date: 2013-09-20 14:12:32.846302 """ # revision identifiers, used by Alembic. revision = '8f33d9bd2043' down_revision = '758b4012b5f' import json import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.Text), column('description', sa.Text), column('modified_by_id', sa.Integer), column('created_at', sa.DateTime), column('updated_at', sa.DateTime), column('context_id', sa.Integer), ) def upgrade(): basic_objects_editable = [ 'Categorization', 'Category', 'Control', 'ControlControl', 'ControlSection', 'Cycle', 'DataAsset', 'Directive', 'Contract', 'Policy', 'Regulation', 'DirectiveControl', 'Document', 'Facility', 'Help', 'Market', 'Objective', 'ObjectiveControl', 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Option', 'OrgGroup', 'PopulationSample', 'Product', 'ProgramControl', 'ProgramDirective', 'Project', 'Relationship', 'RelationshipType', 'Section', 'SectionObjective', 'SystemOrProcess', 'System', 'Process', 'SystemControl', 'SystemSysetm', ] basic_objects_readable = list(basic_objects_editable) basic_objects_readable.extend([ 'Person', 'Program', 'Role', #'UserRole', ?? why? ]) basic_objects_creatable = list(basic_objects_editable) basic_objects_creatable.extend([ 'Person', ]) basic_objects_updateable = list(basic_objects_editable) basic_objects_updateable.extend([ 'Person', ]) basic_objects_deletable = list(basic_objects_editable) op.execute(roles_table.update()\ .where(roles_table.c.name == 'Reader')\ .values(permissions_json=json.dumps({ 'read': basic_objects_readable, }))) op.execute(roles_table.update()\ .where(roles_table.c.name == 'ObjectEditor')\ .values(permissions_json=json.dumps({ 'create': basic_objects_creatable, 'read': basic_objects_readable, 'update': basic_objects_updateable, 'delete': basic_objects_deletable, }))) def downgrade(): # No reason to downgrade this one pass
Add migration to fix system roles * `ObjectEditor` and `Reader` were missing `ProgramDirective`, `ProgramControl`, and `Person` permissions (CRUD, except `Person`, which is CRU. * `ObjectControl` and `ObjectDocument` were combined due to a missing comma in a previous migration.
a1cf304f9941b811b33e1b2d786b6f38bc514546
anafero/templatetags/anafero_tags.py
anafero/templatetags/anafero_tags.py
from django import template from django.contrib.contenttypes.models import ContentType from anafero.models import ReferralResponse, ACTION_DISPLAY register = template.Library() @register.inclusion_tag("anafero/_create_referral_form.html") def create_referral(url, obj=None): if obj: return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)} else: return {"url": url, "obj": "", "obj_ct": ""} @register.assignment_tag def referral_responses(user): return ReferralResponse.objects.filter( referral__user=user ).order_by("-created_at") @register.filter def action_display(value): return ACTION_DISPLAY.get(value, value)
from django import template from django.contrib.contenttypes.models import ContentType from anafero.models import ReferralResponse, ACTION_DISPLAY register = template.Library() @register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True) def create_referral(context, url, obj=None): if obj: context.update( {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)} ) else: context.update( {"url": url, "obj": "", "obj_ct": ""} ) return context @register.assignment_tag def referral_responses(user): return ReferralResponse.objects.filter( referral__user=user ).order_by("-created_at") @register.filter def action_display(value): return ACTION_DISPLAY.get(value, value)
Add full context to the create_referral tag
Add full context to the create_referral tag
Python
mit
pinax/pinax-referrals,pinax/pinax-referrals
from django import template from django.contrib.contenttypes.models import ContentType from anafero.models import ReferralResponse, ACTION_DISPLAY register = template.Library() @register.inclusion_tag("anafero/_create_referral_form.html", takes_context=True) def create_referral(context, url, obj=None): if obj: context.update( {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)} ) else: context.update( {"url": url, "obj": "", "obj_ct": ""} ) return context @register.assignment_tag def referral_responses(user): return ReferralResponse.objects.filter( referral__user=user ).order_by("-created_at") @register.filter def action_display(value): return ACTION_DISPLAY.get(value, value)
Add full context to the create_referral tag from django import template from django.contrib.contenttypes.models import ContentType from anafero.models import ReferralResponse, ACTION_DISPLAY register = template.Library() @register.inclusion_tag("anafero/_create_referral_form.html") def create_referral(url, obj=None): if obj: return {"url": url, "obj": obj, "obj_ct": ContentType.objects.get_for_model(obj)} else: return {"url": url, "obj": "", "obj_ct": ""} @register.assignment_tag def referral_responses(user): return ReferralResponse.objects.filter( referral__user=user ).order_by("-created_at") @register.filter def action_display(value): return ACTION_DISPLAY.get(value, value)
d54544ecf6469eedce80d6d3180aa826c1fcc19a
cpgintegrate/__init__.py
cpgintegrate/__init__.py
import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: df = processor(file) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
import pandas import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: source = getattr(file, 'name', None) subject_id = getattr(file, 'cpgintegrate_subject_id', None) try: df = processor(file) except Exception as e: raise ProcessingException({"Source": source, 'SubjectID': subject_id}) from e yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID") class ProcessingException(Exception): """cpgintegrate processing error"""
Add file source and subjectID to processing exceptions
Add file source and subjectID to processing exceptions
Python
agpl-3.0
PointyShinyBurning/cpgintegrate
import pandas import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: source = getattr(file, 'name', None) subject_id = getattr(file, 'cpgintegrate_subject_id', None) try: df = processor(file) except Exception as e: raise ProcessingException({"Source": source, 'SubjectID': subject_id}) from e yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID") class ProcessingException(Exception): """cpgintegrate processing error"""
Add file source and subjectID to processing exceptions import pandas import traceback import typing def process_files(file_iterator: typing.Iterator[typing.IO], processor: typing.Callable) -> pandas.DataFrame: def get_frames(): for file in file_iterator: df = processor(file) yield (df .assign(Source=getattr(file, 'name', None), SubjectID=getattr(file, 'cpgintegrate_subject_id', None), FileSubjectID=df.index if df.index.name else None)) return pandas.DataFrame(pandas.concat((frame for frame in get_frames()))).set_index("SubjectID")
8d471b5b7a8f57214afe79783f09afa97c5d2bfc
entropy/__init__.py
entropy/__init__.py
import entropy._entropy as _entropy def entropy(data): """Compute the Shannon entropy of the given string. Returns a floating point value indicating how many bits of entropy there are per octet in the string.""" return _entropy.shannon_entropy(data) if __name__ == '__main__': print entropy('\n'.join(file(__file__)))
import entropy._entropy as _entropy def entropy(data): """Compute the Shannon entropy of the given string. Returns a floating point value indicating how many bits of entropy there are per octet in the string.""" return _entropy.shannon_entropy(data) def absolute_entropy(data): """Compute the "absolute" entropy of the given string. The absolute entropy of a string is how many bits of information, total, are in the entire string. This is the same as the Shannon entropy multiplied by the length of the string. A string can be losslessly compressed to a size no smaller than its absolute entropy.""" return entropy(data) * len(data) def relative_entropy(data): """Compute the relative entropy of the given string. The relative entropy is the ratio of the entropy of a string to its size, i.e., a measure of how well it uses space. It is, therefore, a floating point value on the interval (0, 1].""" return entropy(data) / 8 if __name__ == '__main__': print entropy('\n'.join(file(__file__)))
Add absolute and relative entropy functions.
Add absolute and relative entropy functions.
Python
bsd-3-clause
chachalaca/py-entropy,billthebrute/py-entropy,chachalaca/py-entropy,billthebrute/py-entropy
import entropy._entropy as _entropy def entropy(data): """Compute the Shannon entropy of the given string. Returns a floating point value indicating how many bits of entropy there are per octet in the string.""" return _entropy.shannon_entropy(data) def absolute_entropy(data): """Compute the "absolute" entropy of the given string. The absolute entropy of a string is how many bits of information, total, are in the entire string. This is the same as the Shannon entropy multiplied by the length of the string. A string can be losslessly compressed to a size no smaller than its absolute entropy.""" return entropy(data) * len(data) def relative_entropy(data): """Compute the relative entropy of the given string. The relative entropy is the ratio of the entropy of a string to its size, i.e., a measure of how well it uses space. It is, therefore, a floating point value on the interval (0, 1].""" return entropy(data) / 8 if __name__ == '__main__': print entropy('\n'.join(file(__file__)))
Add absolute and relative entropy functions. import entropy._entropy as _entropy def entropy(data): """Compute the Shannon entropy of the given string. Returns a floating point value indicating how many bits of entropy there are per octet in the string.""" return _entropy.shannon_entropy(data) if __name__ == '__main__': print entropy('\n'.join(file(__file__)))
b3a8a187cb6e569229d7e6d2929377035790f7de
virtool/dev/api.py
virtool/dev/api.py
from logging import getLogger from virtool.api.response import no_content from virtool.fake.wrapper import FakerWrapper from virtool.http.routes import Routes from virtool.samples.fake import create_fake_samples from virtool.subtractions.fake import create_fake_fasta_upload, create_fake_finalized_subtraction from virtool.utils import random_alphanumeric logger = getLogger(__name__) routes = Routes() faker = FakerWrapper() @routes.post("/api/dev") async def dev(req): data = await req.json() user_id = req["client"].user_id command = data.get("command") if command == "clear_users": await req.app["db"].users.delete_many({}) await req.app["db"].sessions.delete_many({}) await req.app["db"].keys.delete_many({}) logger.debug("Cleared users") if command == "create_subtraction": upload_id, upload_name = await create_fake_fasta_upload( req.app, req["client"].user_id ) await create_fake_finalized_subtraction( req.app, upload_id, upload_name, random_alphanumeric(8), user_id ) if command == "create_sample": await create_fake_samples(req.app) return no_content()
from logging import getLogger from virtool.api.response import no_content from virtool.fake.wrapper import FakerWrapper from virtool.http.routes import Routes from virtool.samples.fake import create_fake_sample from virtool.subtractions.fake import create_fake_fasta_upload, create_fake_finalized_subtraction from virtool.utils import random_alphanumeric logger = getLogger(__name__) routes = Routes() faker = FakerWrapper() @routes.post("/api/dev") async def dev(req): data = await req.json() user_id = req["client"].user_id command = data.get("command") if command == "clear_users": await req.app["db"].users.delete_many({}) await req.app["db"].sessions.delete_many({}) await req.app["db"].keys.delete_many({}) logger.debug("Cleared users") if command == "create_subtraction": upload_id, upload_name = await create_fake_fasta_upload( req.app, req["client"].user_id ) await create_fake_finalized_subtraction( req.app, upload_id, upload_name, random_alphanumeric(8), user_id ) if command == "create_sample": await create_fake_sample( req.app, random_alphanumeric(8), req["client"].user_id, False, True ) return no_content()
Fix handling of create_sample command on dev API endpoint
Fix handling of create_sample command on dev API endpoint This was completely broken.
Python
mit
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
from logging import getLogger from virtool.api.response import no_content from virtool.fake.wrapper import FakerWrapper from virtool.http.routes import Routes from virtool.samples.fake import create_fake_sample from virtool.subtractions.fake import create_fake_fasta_upload, create_fake_finalized_subtraction from virtool.utils import random_alphanumeric logger = getLogger(__name__) routes = Routes() faker = FakerWrapper() @routes.post("/api/dev") async def dev(req): data = await req.json() user_id = req["client"].user_id command = data.get("command") if command == "clear_users": await req.app["db"].users.delete_many({}) await req.app["db"].sessions.delete_many({}) await req.app["db"].keys.delete_many({}) logger.debug("Cleared users") if command == "create_subtraction": upload_id, upload_name = await create_fake_fasta_upload( req.app, req["client"].user_id ) await create_fake_finalized_subtraction( req.app, upload_id, upload_name, random_alphanumeric(8), user_id ) if command == "create_sample": await create_fake_sample( req.app, random_alphanumeric(8), req["client"].user_id, False, True ) return no_content()
Fix handling of create_sample command on dev API endpoint This was completely broken. from logging import getLogger from virtool.api.response import no_content from virtool.fake.wrapper import FakerWrapper from virtool.http.routes import Routes from virtool.samples.fake import create_fake_samples from virtool.subtractions.fake import create_fake_fasta_upload, create_fake_finalized_subtraction from virtool.utils import random_alphanumeric logger = getLogger(__name__) routes = Routes() faker = FakerWrapper() @routes.post("/api/dev") async def dev(req): data = await req.json() user_id = req["client"].user_id command = data.get("command") if command == "clear_users": await req.app["db"].users.delete_many({}) await req.app["db"].sessions.delete_many({}) await req.app["db"].keys.delete_many({}) logger.debug("Cleared users") if command == "create_subtraction": upload_id, upload_name = await create_fake_fasta_upload( req.app, req["client"].user_id ) await create_fake_finalized_subtraction( req.app, upload_id, upload_name, random_alphanumeric(8), user_id ) if command == "create_sample": await create_fake_samples(req.app) return no_content()
0aa6a648fff39b013f9b644d9a894db39706df43
amplpy/amplpython/__init__.py
amplpy/amplpython/__init__.py
# -*- coding: utf-8 -*- from __future__ import absolute_import import os import ctypes import platform if platform.system() == 'Windows': lib32 = os.path.join(os.path.dirname(__file__), 'lib32') lib64 = os.path.join(os.path.dirname(__file__), 'lib64') from glob import glob try: if ctypes.sizeof(ctypes.c_voidp) == 4: dllfile = glob(lib32 + '/*.dll')[0] else: dllfile = glob(lib64 + '/*.dll')[0] ctypes.CDLL(dllfile) except: pass from .amplpython import * from .amplpython import _READTABLE, _WRITETABLE
# -*- coding: utf-8 -*- import os import sys import ctypes import platform if platform.system() == 'Windows': lib32 = os.path.join(os.path.dirname(__file__), 'lib32') lib64 = os.path.join(os.path.dirname(__file__), 'lib64') from glob import glob try: if ctypes.sizeof(ctypes.c_voidp) == 4: dllfile = glob(lib32 + '/*.dll')[0] else: dllfile = glob(lib64 + '/*.dll')[0] ctypes.CDLL(dllfile) except: pass sys.path.append(os.path.dirname(__file__)) from amplpython import * from amplpython import _READTABLE, _WRITETABLE
Fix 'ModuleNotFoundError: No module named amplpython'
Fix 'ModuleNotFoundError: No module named amplpython'
Python
bsd-3-clause
ampl/amplpy,ampl/amplpy,ampl/amplpy
# -*- coding: utf-8 -*- import os import sys import ctypes import platform if platform.system() == 'Windows': lib32 = os.path.join(os.path.dirname(__file__), 'lib32') lib64 = os.path.join(os.path.dirname(__file__), 'lib64') from glob import glob try: if ctypes.sizeof(ctypes.c_voidp) == 4: dllfile = glob(lib32 + '/*.dll')[0] else: dllfile = glob(lib64 + '/*.dll')[0] ctypes.CDLL(dllfile) except: pass sys.path.append(os.path.dirname(__file__)) from amplpython import * from amplpython import _READTABLE, _WRITETABLE
Fix 'ModuleNotFoundError: No module named amplpython' # -*- coding: utf-8 -*- from __future__ import absolute_import import os import ctypes import platform if platform.system() == 'Windows': lib32 = os.path.join(os.path.dirname(__file__), 'lib32') lib64 = os.path.join(os.path.dirname(__file__), 'lib64') from glob import glob try: if ctypes.sizeof(ctypes.c_voidp) == 4: dllfile = glob(lib32 + '/*.dll')[0] else: dllfile = glob(lib64 + '/*.dll')[0] ctypes.CDLL(dllfile) except: pass from .amplpython import * from .amplpython import _READTABLE, _WRITETABLE
a329770bdd5fdc6a646d6a0b298f0a67c789f86a
resolwe/flow/migrations/0029_storage_m2m.py
resolwe/flow/migrations/0029_storage_m2m.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2019-02-26 04:08 from __future__ import unicode_literals from django.db import migrations, models def set_data_relation(apps, schema_editor): Data = apps.get_model('flow', 'Data') Storage = apps.get_model('flow', 'Storage') for data in Data.objects.all(): storage = Storage.objects.filter(data_migration_temporary=data).first() if storage: storage.data.add(data) class Migration(migrations.Migration): dependencies = [ ('flow', '0028_add_data_location'), ] operations = [ migrations.RenameField( model_name='storage', old_name='data', new_name='data_migration_temporary', ), migrations.AddField( model_name='storage', name='data', field=models.ManyToManyField(related_name='storages', to='flow.Data'), ), migrations.RunPython(set_data_relation), migrations.RemoveField( model_name='storage', name='data_migration_temporary', ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2019-02-26 04:08 from __future__ import unicode_literals from django.db import migrations, models def set_data_relation(apps, schema_editor): Storage = apps.get_model('flow', 'Storage') for storage in Storage.objects.all(): storage.data.add(storage.data_migration_temporary) class Migration(migrations.Migration): dependencies = [ ('flow', '0028_add_data_location'), ] operations = [ migrations.RenameField( model_name='storage', old_name='data', new_name='data_migration_temporary', ), migrations.AddField( model_name='storage', name='data', field=models.ManyToManyField(related_name='storages', to='flow.Data'), ), migrations.RunPython(set_data_relation), migrations.RemoveField( model_name='storage', name='data_migration_temporary', ), ]
Fix storage migration to process all storages
Fix storage migration to process all storages
Python
apache-2.0
genialis/resolwe,genialis/resolwe
# -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2019-02-26 04:08 from __future__ import unicode_literals from django.db import migrations, models def set_data_relation(apps, schema_editor): Storage = apps.get_model('flow', 'Storage') for storage in Storage.objects.all(): storage.data.add(storage.data_migration_temporary) class Migration(migrations.Migration): dependencies = [ ('flow', '0028_add_data_location'), ] operations = [ migrations.RenameField( model_name='storage', old_name='data', new_name='data_migration_temporary', ), migrations.AddField( model_name='storage', name='data', field=models.ManyToManyField(related_name='storages', to='flow.Data'), ), migrations.RunPython(set_data_relation), migrations.RemoveField( model_name='storage', name='data_migration_temporary', ), ]
Fix storage migration to process all storages # -*- coding: utf-8 -*- # Generated by Django 1.11.16 on 2019-02-26 04:08 from __future__ import unicode_literals from django.db import migrations, models def set_data_relation(apps, schema_editor): Data = apps.get_model('flow', 'Data') Storage = apps.get_model('flow', 'Storage') for data in Data.objects.all(): storage = Storage.objects.filter(data_migration_temporary=data).first() if storage: storage.data.add(data) class Migration(migrations.Migration): dependencies = [ ('flow', '0028_add_data_location'), ] operations = [ migrations.RenameField( model_name='storage', old_name='data', new_name='data_migration_temporary', ), migrations.AddField( model_name='storage', name='data', field=models.ManyToManyField(related_name='storages', to='flow.Data'), ), migrations.RunPython(set_data_relation), migrations.RemoveField( model_name='storage', name='data_migration_temporary', ), ]
77d491ea43fcd00dcfcee1f0b9c2fdb50dc50c8e
tests/test_models.py
tests/test_models.py
import unittest from datetime import datetime from twofa import create_app, db from twofa.models import User class UserTestCase(unittest.TestCase): def setUp(self): self.app = create_app('testing') db.create_all() def tearDown(self): db.session.remove() db.drop_all() def test_password_setter(self): pass
import unittest from twofa import create_app, db from twofa.models import User from unittest.mock import patch class UserTestCase(unittest.TestCase): def setUp(self): self.app = create_app('testing') self.user = User( '[email protected]', 'fakepassword', 'Alice', 33, 600112233, 123 ) db.create_all() def tearDown(self): db.session.remove() db.drop_all() def test_has_authy_app(self): # Arrange # Act with patch('twofa.models.authy_user_has_app', return_value=True): has_authy_app = self.user.has_authy_app # Assert self.assertTrue(has_authy_app) def test_hasnt_authy_app(self): # Arrange # Act with patch('twofa.models.authy_user_has_app', return_value=False): has_authy_app = self.user.has_authy_app # Assert self.assertFalse(has_authy_app) def test_password_is_unreadable(self): # Arrange # Act / Assert with self.assertRaises(AttributeError): self.user.password def test_password_setter(self): # Arrange old_password_hash = self.user.password_hash password = 'superpassword' # Act self.user.password = password # Assert self.assertNotEqual(password, self.user.password_hash) self.assertNotEqual(old_password_hash, self.user.password_hash) def test_verify_password(self): # Arrange password = 'anothercoolpassword' unused_password = 'unusedpassword' self.user.password = password # Act ret_good_password = self.user.verify_password(password) ret_bad_password = self.user.verify_password(unused_password) # Assert self.assertTrue(ret_good_password) self.assertFalse(ret_bad_password) def test_send_one_touch_request(self): # Arrange # Act with patch('twofa.models.send_authy_one_touch_request') as fake_send: self.user.send_one_touch_request() # Assert fake_send.assert_called_with(self.user.authy_id, self.user.email)
Add some tests for the model
Add some tests for the model
Python
mit
TwilioDevEd/authy2fa-flask,TwilioDevEd/authy2fa-flask,TwilioDevEd/authy2fa-flask,TwilioDevEd/authy2fa-flask
import unittest from twofa import create_app, db from twofa.models import User from unittest.mock import patch class UserTestCase(unittest.TestCase): def setUp(self): self.app = create_app('testing') self.user = User( '[email protected]', 'fakepassword', 'Alice', 33, 600112233, 123 ) db.create_all() def tearDown(self): db.session.remove() db.drop_all() def test_has_authy_app(self): # Arrange # Act with patch('twofa.models.authy_user_has_app', return_value=True): has_authy_app = self.user.has_authy_app # Assert self.assertTrue(has_authy_app) def test_hasnt_authy_app(self): # Arrange # Act with patch('twofa.models.authy_user_has_app', return_value=False): has_authy_app = self.user.has_authy_app # Assert self.assertFalse(has_authy_app) def test_password_is_unreadable(self): # Arrange # Act / Assert with self.assertRaises(AttributeError): self.user.password def test_password_setter(self): # Arrange old_password_hash = self.user.password_hash password = 'superpassword' # Act self.user.password = password # Assert self.assertNotEqual(password, self.user.password_hash) self.assertNotEqual(old_password_hash, self.user.password_hash) def test_verify_password(self): # Arrange password = 'anothercoolpassword' unused_password = 'unusedpassword' self.user.password = password # Act ret_good_password = self.user.verify_password(password) ret_bad_password = self.user.verify_password(unused_password) # Assert self.assertTrue(ret_good_password) self.assertFalse(ret_bad_password) def test_send_one_touch_request(self): # Arrange # Act with patch('twofa.models.send_authy_one_touch_request') as fake_send: self.user.send_one_touch_request() # Assert fake_send.assert_called_with(self.user.authy_id, self.user.email)
Add some tests for the model import unittest from datetime import datetime from twofa import create_app, db from twofa.models import User class UserTestCase(unittest.TestCase): def setUp(self): self.app = create_app('testing') db.create_all() def tearDown(self): db.session.remove() db.drop_all() def test_password_setter(self): pass
8ab1e018319fc7fc3837f1d8d1dd59a0dc3f2eb5
tests/compiler/test_conditional_compilation.py
tests/compiler/test_conditional_compilation.py
from tests.compiler import compile_snippet, STATIC_START, internal_call from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump PREFIX = [ OpcodePushStatic(STATIC_START), OpcodePushStatic(STATIC_START + 1), internal_call('text.__equals__'), ] def test_simple_conditional(): assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [ OpcodeJumpConditional(26), OpcodePushStatic(STATIC_START + 2), internal_call('Console.write') ] def test_empty_conditional(): assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [ OpcodeJumpConditional(24) ]
Add test for conditional compilation
Add test for conditional compilation
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
from tests.compiler import compile_snippet, STATIC_START, internal_call from thinglang.compiler.opcodes import OpcodePushStatic, OpcodeJumpConditional, OpcodeJump PREFIX = [ OpcodePushStatic(STATIC_START), OpcodePushStatic(STATIC_START + 1), internal_call('text.__equals__'), ] def test_simple_conditional(): assert compile_snippet({'if "dog" == "dog"': ['Console.write("executing")']}) == PREFIX + [ OpcodeJumpConditional(26), OpcodePushStatic(STATIC_START + 2), internal_call('Console.write') ] def test_empty_conditional(): assert compile_snippet({'if "dog" == "dog"': ['pass']}) == PREFIX + [ OpcodeJumpConditional(24) ]
Add test for conditional compilation
4a7f152e5feb9393ae548f239b2cbf2d8cee3c4e
modules/email.py
modules/email.py
# -*- coding: utf-8 -*- from jinja2 import Template import sender from imapclient import IMAPClient import socket import logging import time class email: def __init__(self, config): self.logger = logging.getLogger('app_logger') self.server = config['host'] self.port = config['port'] self.sender = config['sender'] self.password = config['password'] def send(self, name, ret, mailto, subject, content, message): with open(content, 'r') as mail_config_file: try: body = mail_config_file.read() except: self.logger.error('Invalid configuration content file') sys.exit(1) text_content = Template(body).render(service=name, diagnostic=ret['message']) text_subject = Template(subject).render(service=name) try: test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True) test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content) except: self.logger.error('Cannot send email {0}'.format(subject))
# -*- coding: utf-8 -*- from jinja2 import Template import sender from imapclient import IMAPClient import socket import logging import time class email: def __init__(self, config): self.logger = logging.getLogger('app_logger') self.server = config['host'] self.port = config['port'] self.sender = config['sender'] self.password = config['password'] def send(self, name, ret, mailto, subject, content, message): with open(content, 'r') as mail_config_file: try: body = mail_config_file.read() except: self.logger.error('Invalid configuration content file') sys.exit(1) text_content = Template(body).render(service=name, diagnostic=ret['message']) text_subject = Template(subject).render(service=name) try: test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True) test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content) except: self.logger.error('Cannot send email {0}'.format(text_subject))
Fix service name when we failed to send a mail.
Fix service name when we failed to send a mail. This solves issue #3.
Python
apache-2.0
Lex-Persona/SupExt
# -*- coding: utf-8 -*- from jinja2 import Template import sender from imapclient import IMAPClient import socket import logging import time class email: def __init__(self, config): self.logger = logging.getLogger('app_logger') self.server = config['host'] self.port = config['port'] self.sender = config['sender'] self.password = config['password'] def send(self, name, ret, mailto, subject, content, message): with open(content, 'r') as mail_config_file: try: body = mail_config_file.read() except: self.logger.error('Invalid configuration content file') sys.exit(1) text_content = Template(body).render(service=name, diagnostic=ret['message']) text_subject = Template(subject).render(service=name) try: test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True) test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content) except: self.logger.error('Cannot send email {0}'.format(text_subject))
Fix service name when we failed to send a mail. This solves issue #3. # -*- coding: utf-8 -*- from jinja2 import Template import sender from imapclient import IMAPClient import socket import logging import time class email: def __init__(self, config): self.logger = logging.getLogger('app_logger') self.server = config['host'] self.port = config['port'] self.sender = config['sender'] self.password = config['password'] def send(self, name, ret, mailto, subject, content, message): with open(content, 'r') as mail_config_file: try: body = mail_config_file.read() except: self.logger.error('Invalid configuration content file') sys.exit(1) text_content = Template(body).render(service=name, diagnostic=ret['message']) text_subject = Template(subject).render(service=name) try: test_smtp = sender.Mail(self.server, port=self.port, username=self.sender, password=self.password, use_tls=True) test_smtp.send_message(text_subject, to=mailto, fromaddr=self.sender, body=text_content) except: self.logger.error('Cannot send email {0}'.format(subject))
d4e8839ac02935b86c1634848476a9a8512c376d
delivery_transsmart/models/res_partner.py
delivery_transsmart/models/res_partner.py
# -*- coding: utf-8 -*- ############################################################################## # # Delivery Transsmart Ingegration # © 2016 - 1200 Web Development <http://1200wd.com/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api, _ import openerp.addons.decimal_precision as dp from openerp.exceptions import Warning class ProductProduct(models.Model): _inherit = 'product.product' service_level_id = fields.Many2one( 'delivery.service.level', string='Service Level') service_level_time_id = fields.Many2one( 'delivery.service.level.time', string='Service Level Time') class ResPartner(models.Model): _inherit = 'res.partner' transsmart_code = fields.Char( size=128, string="Transsmart Code") transsmart_id = fields.Integer( "Transsmart ID")
# -*- coding: utf-8 -*- ############################################################################## # # Delivery Transsmart Ingegration # © 2016 - 1200 Web Development <http://1200wd.com/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api, _ import openerp.addons.decimal_precision as dp from openerp.exceptions import Warning class ResPartner(models.Model): _inherit = 'res.partner' transsmart_code = fields.Char( size=128, string="Transsmart Code") transsmart_id = fields.Integer( "Transsmart ID")
Remove double product field definitions
[DEL] Remove double product field definitions
Python
agpl-3.0
1200wd/1200wd_addons,1200wd/1200wd_addons
# -*- coding: utf-8 -*- ############################################################################## # # Delivery Transsmart Ingegration # © 2016 - 1200 Web Development <http://1200wd.com/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api, _ import openerp.addons.decimal_precision as dp from openerp.exceptions import Warning class ResPartner(models.Model): _inherit = 'res.partner' transsmart_code = fields.Char( size=128, string="Transsmart Code") transsmart_id = fields.Integer( "Transsmart ID")
[DEL] Remove double product field definitions # -*- coding: utf-8 -*- ############################################################################## # # Delivery Transsmart Ingegration # © 2016 - 1200 Web Development <http://1200wd.com/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api, _ import openerp.addons.decimal_precision as dp from openerp.exceptions import Warning class ProductProduct(models.Model): _inherit = 'product.product' service_level_id = fields.Many2one( 'delivery.service.level', string='Service Level') service_level_time_id = fields.Many2one( 'delivery.service.level.time', string='Service Level Time') class ResPartner(models.Model): _inherit = 'res.partner' transsmart_code = fields.Char( size=128, string="Transsmart Code") transsmart_id = fields.Integer( "Transsmart ID")
aff8cebfd168493a4a9dff77cf9722507429d570
contrib/examples/actions/pythonactions/isprime.py
contrib/examples/actions/pythonactions/isprime.py
import math class PrimeChecker(object): def run(self, **kwargs): return self._is_prime(**kwargs) def _is_prime(self, value=0): if math.floor(value) != value: raise ValueError('%s should be an integer.' % value) if value < 2: return False for test in range(2, int(math.floor(math.sqrt(value)))+1): if value % test == 0: return False return True if __name__ == '__main__': checker = PrimeChecker() for i in range(0, 10): print '%s : %s' % (i, checker.run(**{'value': i}))
import math class PrimeChecker(object): def run(self, value=0): if math.floor(value) != value: raise ValueError('%s should be an integer.' % value) if value < 2: return False for test in range(2, int(math.floor(math.sqrt(value)))+1): if value % test == 0: return False return True if __name__ == '__main__': checker = PrimeChecker() for i in range(0, 10): print '%s : %s' % (i, checker.run(**{'value': i}))
Update pythonaction sample for simpler run.
Update pythonaction sample for simpler run.
Python
apache-2.0
peak6/st2,lakshmi-kannan/st2,pixelrebel/st2,StackStorm/st2,jtopjian/st2,pinterb/st2,Plexxi/st2,punalpatel/st2,armab/st2,grengojbo/st2,grengojbo/st2,punalpatel/st2,pixelrebel/st2,Itxaka/st2,lakshmi-kannan/st2,emedvedev/st2,lakshmi-kannan/st2,pixelrebel/st2,nzlosh/st2,peak6/st2,dennybaa/st2,pinterb/st2,Plexxi/st2,nzlosh/st2,Itxaka/st2,grengojbo/st2,alfasin/st2,nzlosh/st2,pinterb/st2,Plexxi/st2,jtopjian/st2,emedvedev/st2,StackStorm/st2,armab/st2,jtopjian/st2,StackStorm/st2,dennybaa/st2,alfasin/st2,emedvedev/st2,peak6/st2,punalpatel/st2,tonybaloney/st2,Plexxi/st2,tonybaloney/st2,alfasin/st2,nzlosh/st2,StackStorm/st2,tonybaloney/st2,dennybaa/st2,armab/st2,Itxaka/st2
import math class PrimeChecker(object): def run(self, value=0): if math.floor(value) != value: raise ValueError('%s should be an integer.' % value) if value < 2: return False for test in range(2, int(math.floor(math.sqrt(value)))+1): if value % test == 0: return False return True if __name__ == '__main__': checker = PrimeChecker() for i in range(0, 10): print '%s : %s' % (i, checker.run(**{'value': i}))
Update pythonaction sample for simpler run. import math class PrimeChecker(object): def run(self, **kwargs): return self._is_prime(**kwargs) def _is_prime(self, value=0): if math.floor(value) != value: raise ValueError('%s should be an integer.' % value) if value < 2: return False for test in range(2, int(math.floor(math.sqrt(value)))+1): if value % test == 0: return False return True if __name__ == '__main__': checker = PrimeChecker() for i in range(0, 10): print '%s : %s' % (i, checker.run(**{'value': i}))
371df3363677118d59315e66523aefb081c67282
astroML/plotting/settings.py
astroML/plotting/settings.py
def setup_text_plots(fontsize=8, usetex=True): """ This function adjusts matplotlib settings so that all figures in the textbook have a uniform format and look. """ import matplotlib matplotlib.rc('legend', fontsize=fontsize, handlelength=3) matplotlib.rc('axes', titlesize=fontsize) matplotlib.rc('axes', labelsize=fontsize) matplotlib.rc('xtick', labelsize=fontsize) matplotlib.rc('ytick', labelsize=fontsize) matplotlib.rc('text', usetex=usetex) matplotlib.rc('font', size=fontsize, family='serif', style='normal', variant='normal', stretch='normal', weight='normal')
def setup_text_plots(fontsize=8, usetex=True): """ This function adjusts matplotlib settings so that all figures in the textbook have a uniform format and look. """ import matplotlib from distutils.version import LooseVersion matplotlib.rc('legend', fontsize=fontsize, handlelength=3) matplotlib.rc('axes', titlesize=fontsize) matplotlib.rc('axes', labelsize=fontsize) matplotlib.rc('xtick', labelsize=fontsize) matplotlib.rc('ytick', labelsize=fontsize) matplotlib.rc('text', usetex=usetex) matplotlib.rc('font', size=fontsize, family='serif', style='normal', variant='normal', stretch='normal', weight='normal') matplotlib.rc('patch', force_edgecolor=True) if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"): matplotlib.rc('_internal', classic_mode=True) else: # New in mpl 3.1 matplotlib.rc('scatter.edgecolors', 'b') matplotlib.rc('grid', linestyle=':')
Update the mpl rcparams for mpl 2.0+
Update the mpl rcparams for mpl 2.0+
Python
bsd-2-clause
astroML/astroML
def setup_text_plots(fontsize=8, usetex=True): """ This function adjusts matplotlib settings so that all figures in the textbook have a uniform format and look. """ import matplotlib from distutils.version import LooseVersion matplotlib.rc('legend', fontsize=fontsize, handlelength=3) matplotlib.rc('axes', titlesize=fontsize) matplotlib.rc('axes', labelsize=fontsize) matplotlib.rc('xtick', labelsize=fontsize) matplotlib.rc('ytick', labelsize=fontsize) matplotlib.rc('text', usetex=usetex) matplotlib.rc('font', size=fontsize, family='serif', style='normal', variant='normal', stretch='normal', weight='normal') matplotlib.rc('patch', force_edgecolor=True) if LooseVersion(matplotlib.__version__) < LooseVersion("3.1"): matplotlib.rc('_internal', classic_mode=True) else: # New in mpl 3.1 matplotlib.rc('scatter.edgecolors', 'b') matplotlib.rc('grid', linestyle=':')
Update the mpl rcparams for mpl 2.0+ def setup_text_plots(fontsize=8, usetex=True): """ This function adjusts matplotlib settings so that all figures in the textbook have a uniform format and look. """ import matplotlib matplotlib.rc('legend', fontsize=fontsize, handlelength=3) matplotlib.rc('axes', titlesize=fontsize) matplotlib.rc('axes', labelsize=fontsize) matplotlib.rc('xtick', labelsize=fontsize) matplotlib.rc('ytick', labelsize=fontsize) matplotlib.rc('text', usetex=usetex) matplotlib.rc('font', size=fontsize, family='serif', style='normal', variant='normal', stretch='normal', weight='normal')
11be4b77e84c721ef8de583b0dcf1035367d4b25
libtmux/__about__.py
libtmux/__about__.py
__title__ = 'libtmux' __package_name__ = 'libtmux' __version__ = '0.8.0' __description__ = 'scripting library / orm for tmux' __email__ = '[email protected]' __author__ = 'Tony Narlock' __github__ = 'https://github.com/tmux-python/libtmux' __license__ = 'MIT' __copyright__ = 'Copyright 2016-2018 Tony Narlock'
__title__ = 'libtmux' __package_name__ = 'libtmux' __version__ = '0.8.0' __description__ = 'scripting library / orm for tmux' __email__ = '[email protected]' __author__ = 'Tony Narlock' __github__ = 'https://github.com/tmux-python/libtmux' __pypi__ = 'https://pypi.python.org/pypi/libtmux' __license__ = 'MIT' __copyright__ = 'Copyright 2016-2018 Tony Narlock'
Add __pypi__ url to metadata
Add __pypi__ url to metadata
Python
bsd-3-clause
tony/libtmux
__title__ = 'libtmux' __package_name__ = 'libtmux' __version__ = '0.8.0' __description__ = 'scripting library / orm for tmux' __email__ = '[email protected]' __author__ = 'Tony Narlock' __github__ = 'https://github.com/tmux-python/libtmux' __pypi__ = 'https://pypi.python.org/pypi/libtmux' __license__ = 'MIT' __copyright__ = 'Copyright 2016-2018 Tony Narlock'
Add __pypi__ url to metadata __title__ = 'libtmux' __package_name__ = 'libtmux' __version__ = '0.8.0' __description__ = 'scripting library / orm for tmux' __email__ = '[email protected]' __author__ = 'Tony Narlock' __github__ = 'https://github.com/tmux-python/libtmux' __license__ = 'MIT' __copyright__ = 'Copyright 2016-2018 Tony Narlock'
6dc47f932b5c7f84918ec730b3ccd03d74070453
app/py/cuda_sort/app_specific.py
app/py/cuda_sort/app_specific.py
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
Sort plg: fix caret pos after 'delete empty lines'
Sort plg: fix caret pos after 'delete empty lines'
Python
mpl-2.0
Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,vhanla/CudaText,vhanla/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText
import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line1+len(lines), 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
Sort plg: fix caret pos after 'delete empty lines' import os from cudatext import * def get_ini_fn(): return os.path.join(app_path(APP_DIR_SETTINGS), 'cuda_sort.ini') def ed_set_text_all(lines): ed.set_text_all('\n'.join(lines)+'\n') def ed_get_text_all(): n = ed.get_line_count() if ed.get_text_line(n-1)=='': n-=1 return [ed.get_text_line(i) for i in range(n)] def ed_insert_to_lines(lines, line1, line2): ed.delete(0, line1, 0, line2+1) ed.insert(0, line1, '\n'.join(lines)+'\n') ed.set_caret(0, line2+1, 0, line1) def ed_set_tab_title(s): ed.set_prop(PROP_TAB_TITLE, s) def ed_convert_tabs_to_spaces(s): return ed.convert(CONVERT_LINE_TABS_TO_SPACES, 0, 0, s) def msg_show_error(s): msg_box(s, MB_OK+MB_ICONERROR) def ed_get_sel_lines(): return ed.get_sel_lines()
a3053c843a5709d3fd0fe1dc6c93f369dc101d8b
setup.py
setup.py
from setuptools import setup, find_packages import sys, os version = '0.1' setup(name='ckan-service-provider', version=version, description="A server that can server jobs at services.", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='David Raznick', author_email='[email protected]', url='', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[''' APScheduler Flask SQLAlchemy requests''' ], entry_points=""" # -*- Entry points: -*- """, )
from setuptools import setup, find_packages import sys, os version = '0.1' setup(name='ckanserviceprovider', version=version, description="A server that can server jobs at services.", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='David Raznick', author_email='[email protected]', url='', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[''' APScheduler Flask SQLAlchemy requests''' ], entry_points=""" # -*- Entry points: -*- """, )
Rename the package so that it does not contain -
Rename the package so that it does not contain -
Python
agpl-3.0
ESRC-CDRC/ckan-service-provider,datawagovau/ckan-service-provider,deniszgonjanin/ckan-service-provider,ckan/ckan-service-provider
from setuptools import setup, find_packages import sys, os version = '0.1' setup(name='ckanserviceprovider', version=version, description="A server that can server jobs at services.", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='David Raznick', author_email='[email protected]', url='', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[''' APScheduler Flask SQLAlchemy requests''' ], entry_points=""" # -*- Entry points: -*- """, )
Rename the package so that it does not contain - from setuptools import setup, find_packages import sys, os version = '0.1' setup(name='ckan-service-provider', version=version, description="A server that can server jobs at services.", long_description="""\ """, classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='', author='David Raznick', author_email='[email protected]', url='', license='AGPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=False, install_requires=[''' APScheduler Flask SQLAlchemy requests''' ], entry_points=""" # -*- Entry points: -*- """, )
d130a926c847f37f039dfff7c14140d933b7a6af
django/website/contacts/tests/test_group_permissions.py
django/website/contacts/tests/test_group_permissions.py
import pytest from django.contrib.auth.models import Permission, Group, ContentType from contacts.group_permissions import GroupPermissions @pytest.mark.django_db def test_add_perms(): g1, _ = Group.objects.get_or_create(name="Test Group 1") g2, _ = Group.objects.get_or_create(name="Test Group 2") any_model = Group # for example content_type = ContentType.objects.get_for_model(any_model) codenames = ['a_do_stuff', 'b_do_more_stuff'] expected_permissions = [] for name in codenames: perm, _ = Permission.objects.get_or_create(name=name, codename=name, content_type=content_type) expected_permissions.append(perm) gp = GroupPermissions() with gp.groups(g1, g2): gp.add_permissions(any_model, *codenames) assert list(g1.permissions.all()) == expected_permissions assert list(g2.permissions.all()) == expected_permissions
import pytest from django.contrib.auth.models import Permission, Group, ContentType from django.core.exceptions import ObjectDoesNotExist from contacts.group_permissions import GroupPermissions @pytest.mark.django_db def test_add_perms(): g1, _ = Group.objects.get_or_create(name="Test Group 1") g2, _ = Group.objects.get_or_create(name="Test Group 2") any_model = Group # for example content_type = ContentType.objects.get_for_model(any_model) codenames = ['a_do_stuff', 'b_do_more_stuff'] expected_permissions = [] for name in codenames: perm, _ = Permission.objects.get_or_create(name=name, codename=name, content_type=content_type) expected_permissions.append(perm) gp = GroupPermissions() with gp.groups(g1, g2): gp.add_permissions(any_model, *codenames) assert list(g1.permissions.all()) == expected_permissions assert list(g2.permissions.all()) == expected_permissions @pytest.mark.django_db def test_add_nonexistent_perms(): g1, _ = Group.objects.get_or_create(name="Test Group 1") g2, _ = Group.objects.get_or_create(name="Test Group 2") any_model = Group # for example codenames = ['a_do_stuff', 'b_do_more_stuff'] gp = GroupPermissions() with gp.groups(g1, g2): try: gp.add_permissions(any_model, *codenames) pytest.fail("This should raise an ObjectDoesNotExist exception", False) except ObjectDoesNotExist: pass
Test can't give group non-exsitent permission
Test can't give group non-exsitent permission
Python
agpl-3.0
aptivate/alfie,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/alfie,aptivate/alfie,daniell/kashana,daniell/kashana,aptivate/kashana,daniell/kashana,aptivate/kashana,aptivate/kashana
import pytest from django.contrib.auth.models import Permission, Group, ContentType from django.core.exceptions import ObjectDoesNotExist from contacts.group_permissions import GroupPermissions @pytest.mark.django_db def test_add_perms(): g1, _ = Group.objects.get_or_create(name="Test Group 1") g2, _ = Group.objects.get_or_create(name="Test Group 2") any_model = Group # for example content_type = ContentType.objects.get_for_model(any_model) codenames = ['a_do_stuff', 'b_do_more_stuff'] expected_permissions = [] for name in codenames: perm, _ = Permission.objects.get_or_create(name=name, codename=name, content_type=content_type) expected_permissions.append(perm) gp = GroupPermissions() with gp.groups(g1, g2): gp.add_permissions(any_model, *codenames) assert list(g1.permissions.all()) == expected_permissions assert list(g2.permissions.all()) == expected_permissions @pytest.mark.django_db def test_add_nonexistent_perms(): g1, _ = Group.objects.get_or_create(name="Test Group 1") g2, _ = Group.objects.get_or_create(name="Test Group 2") any_model = Group # for example codenames = ['a_do_stuff', 'b_do_more_stuff'] gp = GroupPermissions() with gp.groups(g1, g2): try: gp.add_permissions(any_model, *codenames) pytest.fail("This should raise an ObjectDoesNotExist exception", False) except ObjectDoesNotExist: pass
Test can't give group non-exsitent permission import pytest from django.contrib.auth.models import Permission, Group, ContentType from contacts.group_permissions import GroupPermissions @pytest.mark.django_db def test_add_perms(): g1, _ = Group.objects.get_or_create(name="Test Group 1") g2, _ = Group.objects.get_or_create(name="Test Group 2") any_model = Group # for example content_type = ContentType.objects.get_for_model(any_model) codenames = ['a_do_stuff', 'b_do_more_stuff'] expected_permissions = [] for name in codenames: perm, _ = Permission.objects.get_or_create(name=name, codename=name, content_type=content_type) expected_permissions.append(perm) gp = GroupPermissions() with gp.groups(g1, g2): gp.add_permissions(any_model, *codenames) assert list(g1.permissions.all()) == expected_permissions assert list(g2.permissions.all()) == expected_permissions
d72eb62bc0afe1b37c675babed8373bd536de73c
python/challenges/plusMinus.py
python/challenges/plusMinus.py
""" Problem Statement: Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction. Input Format: The first line, N, is the size of the array. The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN). Output Format: Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third. """
import unittest """ Problem Statement: Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction. Input Format: The first line, N, is the size of the array. The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN). Output Format: Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third. There are 3 positive numbers, 2 negative numbers, and 1 zero in the array. The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively. """ def plusMinus(arr): def roundToPrecision(num): return round(num / n, 6) n = len(arr) pos, neg, zer = 0, 0, 0 for item in arr: if item == 0: zer += 1 elif item > 0: pos += 1 elif item < 0: neg += 1 results = [] for result in [pos, neg, zer]: results.append(roundToPrecision(result)) return results class TestPlusMinus(unittest.TestCase): def test_plus_minus(self): arr = [-4, 3, -9, 0, 4, 1] self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667]) if __name__ == '__main__': unittest.main()
Create way to compute ratios of each number type
Create way to compute ratios of each number type
Python
mit
markthethomas/algorithms,markthethomas/algorithms,markthethomas/algorithms,markthethomas/algorithms
import unittest """ Problem Statement: Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction. Input Format: The first line, N, is the size of the array. The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN). Output Format: Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third. There are 3 positive numbers, 2 negative numbers, and 1 zero in the array. The fraction of the positive numbers, negative numbers and zeroes are 36=0.500000, 26=0.333333 and 16=0.166667, respectively. """ def plusMinus(arr): def roundToPrecision(num): return round(num / n, 6) n = len(arr) pos, neg, zer = 0, 0, 0 for item in arr: if item == 0: zer += 1 elif item > 0: pos += 1 elif item < 0: neg += 1 results = [] for result in [pos, neg, zer]: results.append(roundToPrecision(result)) return results class TestPlusMinus(unittest.TestCase): def test_plus_minus(self): arr = [-4, 3, -9, 0, 4, 1] self.assertEqual(plusMinus(arr), [0.500000, 0.333333, 0.166667]) if __name__ == '__main__': unittest.main()
Create way to compute ratios of each number type """ Problem Statement: Given an array of integers, calculate which fraction of the elements are positive, negative, and zeroes, respectively. Print the decimal value of each fraction. Input Format: The first line, N, is the size of the array. The second line contains N space-separated integers describing the array of numbers (A1,A2,A3,⋯,AN). Output Format: Print each value on its own line with the fraction of positive numbers first, negative numbers second, and zeroes third. """
7e2440c00ce75dc3ff0eac53e63d629981a9873a
raven/contrib/celery/__init__.py
raven/contrib/celery/__init__.py
""" raven.contrib.celery ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ try: from celery.task import task except ImportError: from celery.decorators import task from celery.signals import task_failure from raven.base import Client class CeleryMixin(object): def send_encoded(self, message): "Errors through celery" self.send_raw.delay(message) @task(routing_key='sentry') def send_raw(self, message): return super(CeleryMixin, self).send_encoded(message) class CeleryClient(CeleryMixin, Client): pass def register_signal(client): def process_failure_signal(exception, traceback, sender, task_id, signal, args, kwargs, einfo, **kw): exc_info = (type(exception), exception, traceback) client.captureException( exc_info=exc_info, extra={ 'task_id': task_id, 'sender': sender, 'args': args, 'kwargs': kwargs, }) task_failure.connect(process_failure_signal)
""" raven.contrib.celery ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ try: from celery.task import task except ImportError: from celery.decorators import task from celery.signals import task_failure from raven.base import Client class CeleryMixin(object): def send_encoded(self, message): "Errors through celery" self.send_raw.delay(message) @task(routing_key='sentry') def send_raw(self, message): return super(CeleryMixin, self).send_encoded(message) class CeleryClient(CeleryMixin, Client): pass def register_signal(client): @task_failure.connect(weak=False) def process_failure_signal(sender, task_id, exception, args, kwargs, traceback, einfo, **kw): client.captureException( exc_info=einfo.exc_info, extra={ 'task_id': task_id, 'task': sender, 'args': args, 'kwargs': kwargs, })
Fix celery task_failure signal definition
Fix celery task_failure signal definition
Python
bsd-3-clause
lepture/raven-python,recht/raven-python,lepture/raven-python,beniwohli/apm-agent-python,dbravender/raven-python,patrys/opbeat_python,recht/raven-python,jbarbuto/raven-python,getsentry/raven-python,akalipetis/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,patrys/opbeat_python,ewdurbin/raven-python,nikolas/raven-python,daikeren/opbeat_python,arthurlogilab/raven-python,icereval/raven-python,ronaldevers/raven-python,jmp0xf/raven-python,danriti/raven-python,danriti/raven-python,smarkets/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,akheron/raven-python,dbravender/raven-python,smarkets/raven-python,ticosax/opbeat_python,lepture/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,lopter/raven-python-old,icereval/raven-python,ronaldevers/raven-python,johansteffner/raven-python,jmagnusson/raven-python,tarkatronic/opbeat_python,collective/mr.poe,someonehan/raven-python,jbarbuto/raven-python,inspirehep/raven-python,akalipetis/raven-python,dirtycoder/opbeat_python,smarkets/raven-python,arthurlogilab/raven-python,daikeren/opbeat_python,Photonomie/raven-python,inspirehep/raven-python,beniwohli/apm-agent-python,jbarbuto/raven-python,daikeren/opbeat_python,inspirehep/raven-python,danriti/raven-python,someonehan/raven-python,nikolas/raven-python,patrys/opbeat_python,ewdurbin/raven-python,icereval/raven-python,akheron/raven-python,jbarbuto/raven-python,inspirehep/raven-python,beniwohli/apm-agent-python,someonehan/raven-python,hzy/raven-python,percipient/raven-python,openlabs/raven,ticosax/opbeat_python,getsentry/raven-python,tarkatronic/opbeat_python,dirtycoder/opbeat_python,recht/raven-python,jmp0xf/raven-python,jmagnusson/raven-python,arthurlogilab/raven-python,percipient/raven-python,nikolas/raven-python,getsentry/raven-python,ronaldevers/raven-python,johansteffner/raven-python,akheron/raven-python,hzy/raven-python,ewdurbin/raven-python,dirtycoder/opbeat_python,percipient/raven-python,jmagnusson/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,nikolas/raven-python,johansteffner/raven-python,icereval/raven-python,hzy/raven-python,smarkets/raven-python,beniwohli/apm-agent-python,Photonomie/raven-python,Photonomie/raven-python,jmp0xf/raven-python,ticosax/opbeat_python,tarkatronic/opbeat_python,alex/raven,arthurlogilab/raven-python,akalipetis/raven-python,dbravender/raven-python,patrys/opbeat_python
""" raven.contrib.celery ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ try: from celery.task import task except ImportError: from celery.decorators import task from celery.signals import task_failure from raven.base import Client class CeleryMixin(object): def send_encoded(self, message): "Errors through celery" self.send_raw.delay(message) @task(routing_key='sentry') def send_raw(self, message): return super(CeleryMixin, self).send_encoded(message) class CeleryClient(CeleryMixin, Client): pass def register_signal(client): @task_failure.connect(weak=False) def process_failure_signal(sender, task_id, exception, args, kwargs, traceback, einfo, **kw): client.captureException( exc_info=einfo.exc_info, extra={ 'task_id': task_id, 'task': sender, 'args': args, 'kwargs': kwargs, })
Fix celery task_failure signal definition """ raven.contrib.celery ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ try: from celery.task import task except ImportError: from celery.decorators import task from celery.signals import task_failure from raven.base import Client class CeleryMixin(object): def send_encoded(self, message): "Errors through celery" self.send_raw.delay(message) @task(routing_key='sentry') def send_raw(self, message): return super(CeleryMixin, self).send_encoded(message) class CeleryClient(CeleryMixin, Client): pass def register_signal(client): def process_failure_signal(exception, traceback, sender, task_id, signal, args, kwargs, einfo, **kw): exc_info = (type(exception), exception, traceback) client.captureException( exc_info=exc_info, extra={ 'task_id': task_id, 'sender': sender, 'args': args, 'kwargs': kwargs, }) task_failure.connect(process_failure_signal)
1eedac5229e5a9128c4fbc09f7d7b97a3859e9b9
django_sse/views.py
django_sse/views.py
# -*- coding: utf-8 -*- from django.views.generic import View from django.views.decorators.csrf import csrf_exempt from django.http import HttpResponse try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
# -*- coding: utf-8 -*- from django.views.generic import View from django.views.decorators.csrf import csrf_exempt try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
Remove duplicate import. (Thanks to MechanisM)
Remove duplicate import. (Thanks to MechanisM)
Python
bsd-3-clause
chadmiller/django-sse,niwinz/django-sse,chadmiller/django-sse
# -*- coding: utf-8 -*- from django.views.generic import View from django.views.decorators.csrf import csrf_exempt try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
Remove duplicate import. (Thanks to MechanisM) # -*- coding: utf-8 -*- from django.views.generic import View from django.views.decorators.csrf import csrf_exempt from django.http import HttpResponse try: from django.http import StreamingHttpResponse as HttpResponse except ImportError: from django.http import HttpResponse from django.utils.decorators import method_decorator from sse import Sse class BaseSseView(View): """ This is a base class for sse streaming. """ def get_last_id(self): if "HTTP_LAST_EVENT_ID" in self.request.META: return self.request.META['HTTP_LAST_EVENT_ID'] return None def _iterator(self): for subiterator in self.iterator(): for bufferitem in self.sse: yield bufferitem @method_decorator(csrf_exempt) def dispatch(self, request, *args, **kwargs): self.sse = Sse() self.request = request self.args = args self.kwargs = kwargs response = HttpResponse(self._iterator(), content_type="text/event-stream") response['Cache-Control'] = 'no-cache' response['Software'] = 'django-sse' return response def iterator(self): """ This is a source of stream. Must be use sentence ``yield`` for flush content fon sse object to the client. Example: def iterator(self): counter = 0 while True: self.sse.add_message('foo', 'bar') self.sse.add_message('bar', 'foo') yield """ raise NotImplementedError
b635b6f17e8fceba72e48ab074120d3bddd9388d
tools/process_EXO.py
tools/process_EXO.py
# EXO Convesion Script # Using specification v1.01 # J.C. Loach (2013) # Textual replacements remove = {r"\centering":"", r"newline":"", r"tabular":""}; # Main loop through data file with open("exo_data.txt") as f_in: for line in f_in: for i, j in remove.iteritems(): line = line.replace(i,j) line = line.split("&") line = [i.strip() for i in line] with open("EXO_" + line[0].zfill(3) + "_v1.01.json", 'w') as f_out: f_out.write("{\n") f_out.write("\n") f_out.write(" \"type\": \"measurement\",\n") f_out.write("\n") f_out.write(" \"grouping\": \"EXO(2008)\",\n") f_out.write("\n") # Sample f_out.write(" \"sample\": {\n") f_out.write(" \"m_name\": \"" + line[1] + "\",\n") f_out.write(" \"m_description\": \"" + line[1] + "\",\n") f_out.write(" \"m_id\": \"Table 3 Measurement " + line[0] + "\",\n") f_out.write(" \"m_source\": \"\",\n") f_out.write(" \"m_owner\": {\n") f_out.write(" \"name\": \"\",\n") f_out.write(" \"contact\": \"\"\n") f_out.write(" }\n") f_out.write(" },\n") f_out.write("\n") # Measurement # ... # Data source # ... f_out.write("}\n")
Add tool for converting EXO data from latex to JSON
Add tool for converting EXO data from latex to JSON
Python
apache-2.0
chrisstanford/persephone-darkside,nepahwin/persephone,nepahwin/persephone,chrisstanford/persephone-darkside
# EXO Convesion Script # Using specification v1.01 # J.C. Loach (2013) # Textual replacements remove = {r"\centering":"", r"newline":"", r"tabular":""}; # Main loop through data file with open("exo_data.txt") as f_in: for line in f_in: for i, j in remove.iteritems(): line = line.replace(i,j) line = line.split("&") line = [i.strip() for i in line] with open("EXO_" + line[0].zfill(3) + "_v1.01.json", 'w') as f_out: f_out.write("{\n") f_out.write("\n") f_out.write(" \"type\": \"measurement\",\n") f_out.write("\n") f_out.write(" \"grouping\": \"EXO(2008)\",\n") f_out.write("\n") # Sample f_out.write(" \"sample\": {\n") f_out.write(" \"m_name\": \"" + line[1] + "\",\n") f_out.write(" \"m_description\": \"" + line[1] + "\",\n") f_out.write(" \"m_id\": \"Table 3 Measurement " + line[0] + "\",\n") f_out.write(" \"m_source\": \"\",\n") f_out.write(" \"m_owner\": {\n") f_out.write(" \"name\": \"\",\n") f_out.write(" \"contact\": \"\"\n") f_out.write(" }\n") f_out.write(" },\n") f_out.write("\n") # Measurement # ... # Data source # ... f_out.write("}\n")
Add tool for converting EXO data from latex to JSON
234df393c438fdf729dc050d20084e1fe1a4c2ee
backend/mcapi/mcdir.py
backend/mcapi/mcdir.py
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
Change directory where data is written to.
Change directory where data is written to.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
Change directory where data is written to. import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
9502c0e816097cf65fa92c6dd255c3356cf20964
test/api_class_repr_test.py
test/api_class_repr_test.py
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT # All rights reserved. This work is under a BSD license, see LICENSE.TXT. from __future__ import print_function import pytest from .. import jenkins_api from .framework import api_select from .cfg import ApiType @pytest.mark.not_apis(ApiType.MOCK, ApiType.SCRIPT) def test_api_class_repr_job(api_type): api = api_select.api(__file__, api_type, login=True) job = jenkins_api.ApiJob(api, {}, 'my-job') jrd = eval(repr(job)) assert jrd == {'name': 'my-job', 'dct': {}} invocation = jenkins_api.Invocation(job, "http://dummy", 'hello') assert repr(invocation) == "Invocation: 'http://dummy' None None"
Test jenkis_api ApiJob and Invocation classes __repr__ methods
Test jenkis_api ApiJob and Invocation classes __repr__ methods
Python
bsd-3-clause
lhupfeldt/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow,lechat/jenkinsflow,lhupfeldt/jenkinsflow
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT # All rights reserved. This work is under a BSD license, see LICENSE.TXT. from __future__ import print_function import pytest from .. import jenkins_api from .framework import api_select from .cfg import ApiType @pytest.mark.not_apis(ApiType.MOCK, ApiType.SCRIPT) def test_api_class_repr_job(api_type): api = api_select.api(__file__, api_type, login=True) job = jenkins_api.ApiJob(api, {}, 'my-job') jrd = eval(repr(job)) assert jrd == {'name': 'my-job', 'dct': {}} invocation = jenkins_api.Invocation(job, "http://dummy", 'hello') assert repr(invocation) == "Invocation: 'http://dummy' None None"
Test jenkis_api ApiJob and Invocation classes __repr__ methods
90963666f22bea81d433724d232deaa0f3e2fec1
st2common/st2common/exceptions/db.py
st2common/st2common/exceptions/db.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from st2common.exceptions import StackStormBaseException class StackStormDBObjectNotFoundError(StackStormBaseException): pass class StackStormDBObjectMalformedError(StackStormBaseException): pass
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from st2common.exceptions import StackStormBaseException class StackStormDBObjectNotFoundError(StackStormBaseException): pass class StackStormDBObjectMalformedError(StackStormBaseException): pass class StackStormDBObjectConflictError(StackStormBaseException): """ Exception that captures a DB object conflict error. """ def __init__(self, message, conflict_id): super(StackStormDBObjectConflictError, self).__init__(message) self.conflict_id = conflict_id
Add a special exception for capturing object conflicts.
Add a special exception for capturing object conflicts.
Python
apache-2.0
jtopjian/st2,StackStorm/st2,StackStorm/st2,emedvedev/st2,dennybaa/st2,StackStorm/st2,alfasin/st2,pixelrebel/st2,nzlosh/st2,Itxaka/st2,StackStorm/st2,dennybaa/st2,punalpatel/st2,Plexxi/st2,lakshmi-kannan/st2,lakshmi-kannan/st2,grengojbo/st2,Itxaka/st2,jtopjian/st2,alfasin/st2,punalpatel/st2,peak6/st2,tonybaloney/st2,pinterb/st2,lakshmi-kannan/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,emedvedev/st2,tonybaloney/st2,emedvedev/st2,alfasin/st2,Plexxi/st2,peak6/st2,Plexxi/st2,armab/st2,pinterb/st2,dennybaa/st2,nzlosh/st2,nzlosh/st2,Itxaka/st2,jtopjian/st2,pixelrebel/st2,armab/st2,peak6/st2,pixelrebel/st2,pinterb/st2,nzlosh/st2,grengojbo/st2,grengojbo/st2,armab/st2
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from st2common.exceptions import StackStormBaseException class StackStormDBObjectNotFoundError(StackStormBaseException): pass class StackStormDBObjectMalformedError(StackStormBaseException): pass class StackStormDBObjectConflictError(StackStormBaseException): """ Exception that captures a DB object conflict error. """ def __init__(self, message, conflict_id): super(StackStormDBObjectConflictError, self).__init__(message) self.conflict_id = conflict_id
Add a special exception for capturing object conflicts. # Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from st2common.exceptions import StackStormBaseException class StackStormDBObjectNotFoundError(StackStormBaseException): pass class StackStormDBObjectMalformedError(StackStormBaseException): pass
b0d13f4f6332e18390a1d8e0152e55b8fb2e780e
sdntest/examples/customtopo/triangle.py
sdntest/examples/customtopo/triangle.py
"""Custom topology example s1---s2 | / | / | / s3 Consist of three fixed core switches, and each core switches will connect to m hosts through n switches. """ from mininet.topo import Topo from optparse import OptionParser class MyTopo( Topo ): "Simple topology example." # def __init__( self ): def build( self, m=1, n=1 ): "Create custom topo." # Initialize topology #Topo.__init__( self ) switch_index = 1 host_index = 1 # core = ['space'] switch = ['space'] host = ['space'] # parser = OptionParser() # parser.add_option("-m", action="store", type="int", dest="m") # parser.add_option("-n", action="store", type="int", dest="n") # (options, args) = parser,parse_args() # print options.m # print options.n #m = 2 #n = 2 CORE_NUMBER = 3 for i in range(1, CORE_NUMBER+1): switch.append(self.addSwitch( 's'+str(switch_index) )) switch_index = switch_index + 1 for k in range(1, CORE_NUMBER+1): if (k==CORE_NUMBER): self.addLink( switch[k], switch[1] ) else: self.addLink( switch[k], switch[k+1] ) for i in range(1,m+1): for j in range(1,n+1): switch.append(self.addSwitch( 's'+str(switch_index) )) if (j==1): self.addLink( switch[k],switch[switch_index] ) else: self.addLink( switch[switch_index-1],switch[switch_index]) switch_index = switch_index + 1 host.append(self.addHost( 'h'+str(host_index))) self.addLink( host[host_index], switch[switch_index-1]) host_index = host_index + 1 print "total_switches=%u"%(switch_index-1+3) print "total_hosts=%u"%(host_index-1) print "total_nodes=%u"%(switch_index-1+3+host_index-1) topos = { 'mytopo': ( lambda m,n:MyTopo(m, n) ) }
Add a custom topology example
Add a custom topology example
Python
mit
snlab-freedom/sdntest,snlab-freedom/sdntest
"""Custom topology example s1---s2 | / | / | / s3 Consist of three fixed core switches, and each core switches will connect to m hosts through n switches. """ from mininet.topo import Topo from optparse import OptionParser class MyTopo( Topo ): "Simple topology example." # def __init__( self ): def build( self, m=1, n=1 ): "Create custom topo." # Initialize topology #Topo.__init__( self ) switch_index = 1 host_index = 1 # core = ['space'] switch = ['space'] host = ['space'] # parser = OptionParser() # parser.add_option("-m", action="store", type="int", dest="m") # parser.add_option("-n", action="store", type="int", dest="n") # (options, args) = parser,parse_args() # print options.m # print options.n #m = 2 #n = 2 CORE_NUMBER = 3 for i in range(1, CORE_NUMBER+1): switch.append(self.addSwitch( 's'+str(switch_index) )) switch_index = switch_index + 1 for k in range(1, CORE_NUMBER+1): if (k==CORE_NUMBER): self.addLink( switch[k], switch[1] ) else: self.addLink( switch[k], switch[k+1] ) for i in range(1,m+1): for j in range(1,n+1): switch.append(self.addSwitch( 's'+str(switch_index) )) if (j==1): self.addLink( switch[k],switch[switch_index] ) else: self.addLink( switch[switch_index-1],switch[switch_index]) switch_index = switch_index + 1 host.append(self.addHost( 'h'+str(host_index))) self.addLink( host[host_index], switch[switch_index-1]) host_index = host_index + 1 print "total_switches=%u"%(switch_index-1+3) print "total_hosts=%u"%(host_index-1) print "total_nodes=%u"%(switch_index-1+3+host_index-1) topos = { 'mytopo': ( lambda m,n:MyTopo(m, n) ) }
Add a custom topology example
d041ab4a09da6a2181e1b14f3d0f323ed9c29c6f
applications/templatetags/applications_tags.py
applications/templatetags/applications_tags.py
# -*- encoding: utf-8 -*- from django import template from applications.models import Score register = template.Library() @register.filter def scored_by_user(value, arg): try: score = Score.objects.get(application=value, user=arg) return True if score.score else False except Score.DoesNotExist: return False @register.simple_tag def display_sorting_arrow(name, current_order): is_reversed = False if '-{}'.format(name) == current_order: is_reversed = True if is_reversed: return '<a href="?order={}">▼</a>'.format(name) else: return '<a href="?order=-{}">▲</a>'.format(name)
# -*- encoding: utf-8 -*- from django import template register = template.Library() @register.filter def scored_by_user(application, user): return application.is_scored_by_user(user) @register.simple_tag def display_sorting_arrow(name, current_order): is_reversed = False if '-{}'.format(name) == current_order: is_reversed = True if is_reversed: return '<a href="?order={}">▼</a>'.format(name) else: return '<a href="?order=-{}">▲</a>'.format(name)
Make scored_by_user filter call model method
Make scored_by_user filter call model method Ref #113
Python
bsd-3-clause
DjangoGirls/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,patjouk/djangogirls,patjouk/djangogirls
# -*- encoding: utf-8 -*- from django import template register = template.Library() @register.filter def scored_by_user(application, user): return application.is_scored_by_user(user) @register.simple_tag def display_sorting_arrow(name, current_order): is_reversed = False if '-{}'.format(name) == current_order: is_reversed = True if is_reversed: return '<a href="?order={}">▼</a>'.format(name) else: return '<a href="?order=-{}">▲</a>'.format(name)
Make scored_by_user filter call model method Ref #113 # -*- encoding: utf-8 -*- from django import template from applications.models import Score register = template.Library() @register.filter def scored_by_user(value, arg): try: score = Score.objects.get(application=value, user=arg) return True if score.score else False except Score.DoesNotExist: return False @register.simple_tag def display_sorting_arrow(name, current_order): is_reversed = False if '-{}'.format(name) == current_order: is_reversed = True if is_reversed: return '<a href="?order={}">▼</a>'.format(name) else: return '<a href="?order=-{}">▲</a>'.format(name)
017d33a8fdcf55272613550c5360a998f201ad3d
services/gunicorn_conf.py
services/gunicorn_conf.py
import multiprocessing preload_app = True workers = multiprocessing.cpu_count() * 2 + 1 worker_class = 'gevent' keepalive = 60 timeout = 900 max_requests = 600 # defaults to 30 sec, setting to 5 minutes to fight `GreenletExit`s graceful_timeout = 5*60 # cryptically, setting forwarded_allow_ips (to the ip of the hqproxy0) # gets gunicorn to set https on redirects when appropriate. See: # http://docs.gunicorn.org/en/latest/configure.html#secure-scheme-headers # http://docs.gunicorn.org/en/latest/configure.html#forwarded-allow-ips forwarded_allow_ips = '10.176.162.109' def post_fork(server, worker): # hacky way to address gunicorn gevent requests hitting django too early before urls are loaded # see: https://github.com/benoitc/gunicorn/issues/527#issuecomment-19601046 from django.core.urlresolvers import resolve resolve('/')
import multiprocessing preload_app = True workers = multiprocessing.cpu_count() * 2 + 1 worker_class = 'gevent' keepalive = 60 timeout = 900 max_requests = 120 # defaults to 30 sec, setting to 5 minutes to fight `GreenletExit`s graceful_timeout = 5*60 # cryptically, setting forwarded_allow_ips (to the ip of the hqproxy0) # gets gunicorn to set https on redirects when appropriate. See: # http://docs.gunicorn.org/en/latest/configure.html#secure-scheme-headers # http://docs.gunicorn.org/en/latest/configure.html#forwarded-allow-ips forwarded_allow_ips = '10.176.162.109' def post_fork(server, worker): # hacky way to address gunicorn gevent requests hitting django too early before urls are loaded # see: https://github.com/benoitc/gunicorn/issues/527#issuecomment-19601046 from django.core.urlresolvers import resolve resolve('/')
Revert "bump gunicorn max_requests to 600"
Revert "bump gunicorn max_requests to 600" This reverts commit ffbfe0d6f2ca83346693a788b14562eb332d0cbd.
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq
import multiprocessing preload_app = True workers = multiprocessing.cpu_count() * 2 + 1 worker_class = 'gevent' keepalive = 60 timeout = 900 max_requests = 120 # defaults to 30 sec, setting to 5 minutes to fight `GreenletExit`s graceful_timeout = 5*60 # cryptically, setting forwarded_allow_ips (to the ip of the hqproxy0) # gets gunicorn to set https on redirects when appropriate. See: # http://docs.gunicorn.org/en/latest/configure.html#secure-scheme-headers # http://docs.gunicorn.org/en/latest/configure.html#forwarded-allow-ips forwarded_allow_ips = '10.176.162.109' def post_fork(server, worker): # hacky way to address gunicorn gevent requests hitting django too early before urls are loaded # see: https://github.com/benoitc/gunicorn/issues/527#issuecomment-19601046 from django.core.urlresolvers import resolve resolve('/')
Revert "bump gunicorn max_requests to 600" This reverts commit ffbfe0d6f2ca83346693a788b14562eb332d0cbd. import multiprocessing preload_app = True workers = multiprocessing.cpu_count() * 2 + 1 worker_class = 'gevent' keepalive = 60 timeout = 900 max_requests = 600 # defaults to 30 sec, setting to 5 minutes to fight `GreenletExit`s graceful_timeout = 5*60 # cryptically, setting forwarded_allow_ips (to the ip of the hqproxy0) # gets gunicorn to set https on redirects when appropriate. See: # http://docs.gunicorn.org/en/latest/configure.html#secure-scheme-headers # http://docs.gunicorn.org/en/latest/configure.html#forwarded-allow-ips forwarded_allow_ips = '10.176.162.109' def post_fork(server, worker): # hacky way to address gunicorn gevent requests hitting django too early before urls are loaded # see: https://github.com/benoitc/gunicorn/issues/527#issuecomment-19601046 from django.core.urlresolvers import resolve resolve('/')
2f152c5036d32a780741edd8fb6ce75684728824
singleuser/user-config.py
singleuser/user-config.py
import os mylang = 'test' family = 'wikipedia' # Not defining any extra variables here at all since that causes pywikibot # to issue a warning about potential misspellings if os.path.exists(os.path.expanduser('~/user-config.py')): with open(os.path.expanduser('~/user-config.py'), 'r') as f: exec( compile(f.read(), os.path.expanduser('~/user-config.py'), 'exec'), globals()) # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
Revert "Do not introduce extra variables"
Revert "Do not introduce extra variables" Since the 'f' is considered an extra variable and introduces a warning anyway :( Let's fix this the right way This reverts commit a03de68fb772d859098327d0e54a219fe4507072.
Python
mit
yuvipanda/paws,yuvipanda/paws
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
Revert "Do not introduce extra variables" Since the 'f' is considered an extra variable and introduces a warning anyway :( Let's fix this the right way This reverts commit a03de68fb772d859098327d0e54a219fe4507072. import os mylang = 'test' family = 'wikipedia' # Not defining any extra variables here at all since that causes pywikibot # to issue a warning about potential misspellings if os.path.exists(os.path.expanduser('~/user-config.py')): with open(os.path.expanduser('~/user-config.py'), 'r') as f: exec( compile(f.read(), os.path.expanduser('~/user-config.py'), 'exec'), globals()) # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
1b42dc4d49ccbef9b2ed4bd31e8bb32b597a3575
oscar/agent/scripted/minigame/nicolas_mineralshard.py
oscar/agent/scripted/minigame/nicolas_mineralshard.py
import numpy from pysc2.agents import base_agent from pysc2.lib import actions from pysc2.lib import features _PLAYER_RELATIVE = features.SCREEN_FEATURES.player_relative.index _PLAYER_FRIENDLY = 1 _PLAYER_NEUTRAL = 3 # beacon/minerals _PLAYER_HOSTILE = 4 _NO_OP = actions.FUNCTIONS.no_op.id _MOVE_SCREEN = actions.FUNCTIONS.Move_screen.id _ATTACK_SCREEN = actions.FUNCTIONS.Attack_screen.id _SELECT_ARMY = actions.FUNCTIONS.select_army.id _SELECT_POINT = actions.FUNCTIONS.select_point.id _NOT_QUEUED = [0] _SELECT_ALL = [0] _NEW_SELECTION = [0] class CollectMineralShards(base_agent.BaseAgent): """An agent specifically for solving the CollectMineralShards map.""" def step(self, obs): super(CollectMineralShards, self).step(obs) player_relative = obs.observation["screen"][_PLAYER_RELATIVE] if _MOVE_SCREEN in obs.observation["available_actions"]: neutral_y, neutral_x = (player_relative == _PLAYER_NEUTRAL).nonzero() player_y, player_x = (player_relative == _PLAYER_FRIENDLY).nonzero() if not neutral_y.any() or not player_y.any(): return actions.FunctionCall(_NO_OP, []) player = [int(player_x.mean()), int(player_y.mean())] closest, min_dist = None, None for p in zip(neutral_x, neutral_y): dist = numpy.linalg.norm(numpy.array(player) - numpy.array(p)) if not min_dist or dist < min_dist: closest, min_dist = p, dist return actions.FunctionCall(_MOVE_SCREEN, [_NOT_QUEUED, closest]) else: player_y, player_x = (player_relative == _PLAYER_FRIENDLY).nonzero() return actions.FunctionCall(_SELECT_POINT, [_NEW_SELECTION, [player_x[0], player_y[0]]])
Create a new scripted agent: copy from the deepmind one but do not select the two marins, only one
Create a new scripted agent: copy from the deepmind one but do not select the two marins, only one
Python
apache-2.0
Xaxetrov/OSCAR,Xaxetrov/OSCAR
import numpy from pysc2.agents import base_agent from pysc2.lib import actions from pysc2.lib import features _PLAYER_RELATIVE = features.SCREEN_FEATURES.player_relative.index _PLAYER_FRIENDLY = 1 _PLAYER_NEUTRAL = 3 # beacon/minerals _PLAYER_HOSTILE = 4 _NO_OP = actions.FUNCTIONS.no_op.id _MOVE_SCREEN = actions.FUNCTIONS.Move_screen.id _ATTACK_SCREEN = actions.FUNCTIONS.Attack_screen.id _SELECT_ARMY = actions.FUNCTIONS.select_army.id _SELECT_POINT = actions.FUNCTIONS.select_point.id _NOT_QUEUED = [0] _SELECT_ALL = [0] _NEW_SELECTION = [0] class CollectMineralShards(base_agent.BaseAgent): """An agent specifically for solving the CollectMineralShards map.""" def step(self, obs): super(CollectMineralShards, self).step(obs) player_relative = obs.observation["screen"][_PLAYER_RELATIVE] if _MOVE_SCREEN in obs.observation["available_actions"]: neutral_y, neutral_x = (player_relative == _PLAYER_NEUTRAL).nonzero() player_y, player_x = (player_relative == _PLAYER_FRIENDLY).nonzero() if not neutral_y.any() or not player_y.any(): return actions.FunctionCall(_NO_OP, []) player = [int(player_x.mean()), int(player_y.mean())] closest, min_dist = None, None for p in zip(neutral_x, neutral_y): dist = numpy.linalg.norm(numpy.array(player) - numpy.array(p)) if not min_dist or dist < min_dist: closest, min_dist = p, dist return actions.FunctionCall(_MOVE_SCREEN, [_NOT_QUEUED, closest]) else: player_y, player_x = (player_relative == _PLAYER_FRIENDLY).nonzero() return actions.FunctionCall(_SELECT_POINT, [_NEW_SELECTION, [player_x[0], player_y[0]]])
Create a new scripted agent: copy from the deepmind one but do not select the two marins, only one
511522f2e0d6399191d79e393ed6f14d3a843550
range_ghost_test.py
range_ghost_test.py
from dtest import Tester from tools import * from assertions import * import os, sys, time from ccmlib.cluster import Cluster class TestRangeGhosts(Tester): def ghosts_test(self): """ Check range ghost are correctly removed by the system """ cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() time.sleep(.5) cursor = self.cql_connection(node1).cursor() self.create_ks(cursor, 'ks', 1) self.create_cf(cursor, 'cf', gc_grace=0) rows = 1000 for i in xrange(0, rows): cursor.execute("UPDATE cf SET c = 'value' WHERE key = k%i" % i) cursor.execute("SELECT * FROM cf LIMIT 10000") res = cursor.fetchall() assert len(res) == rows, res node1.flush() for i in xrange(0, rows/2): cursor.execute("DELETE FROM cf WHERE key = k%i" % i) cursor.execute("SELECT * FROM cf LIMIT 10000") res = cursor.fetchall() assert len(res) == rows, res node1.flush() time.sleep(1) # make sure tombstones are collected node1.compact() cursor.execute("SELECT * FROM cf LIMIT 10000") res = cursor.fetchall() assert len(res) == rows/2, res
Add test to check range ghost are removed
Add test to check range ghost are removed
Python
apache-2.0
thobbs/cassandra-dtest,snazy/cassandra-dtest,carlyeks/cassandra-dtest,krummas/cassandra-dtest,beobal/cassandra-dtest,thobbs/cassandra-dtest,tjake/cassandra-dtest,pcmanus/cassandra-dtest,spodkowinski/cassandra-dtest,aweisberg/cassandra-dtest,stef1927/cassandra-dtest,pauloricardomg/cassandra-dtest,snazy/cassandra-dtest,mambocab/cassandra-dtest,beobal/cassandra-dtest,spodkowinski/cassandra-dtest,yukim/cassandra-dtest,aweisberg/cassandra-dtest,riptano/cassandra-dtest,iamaleksey/cassandra-dtest,blerer/cassandra-dtest,bdeggleston/cassandra-dtest,mambocab/cassandra-dtest,blerer/cassandra-dtest,pauloricardomg/cassandra-dtest,bdeggleston/cassandra-dtest,riptano/cassandra-dtest,josh-mckenzie/cassandra-dtest,krummas/cassandra-dtest,stef1927/cassandra-dtest,iamaleksey/cassandra-dtest,carlyeks/cassandra-dtest
from dtest import Tester from tools import * from assertions import * import os, sys, time from ccmlib.cluster import Cluster class TestRangeGhosts(Tester): def ghosts_test(self): """ Check range ghost are correctly removed by the system """ cluster = self.cluster cluster.populate(1).start() [node1] = cluster.nodelist() time.sleep(.5) cursor = self.cql_connection(node1).cursor() self.create_ks(cursor, 'ks', 1) self.create_cf(cursor, 'cf', gc_grace=0) rows = 1000 for i in xrange(0, rows): cursor.execute("UPDATE cf SET c = 'value' WHERE key = k%i" % i) cursor.execute("SELECT * FROM cf LIMIT 10000") res = cursor.fetchall() assert len(res) == rows, res node1.flush() for i in xrange(0, rows/2): cursor.execute("DELETE FROM cf WHERE key = k%i" % i) cursor.execute("SELECT * FROM cf LIMIT 10000") res = cursor.fetchall() assert len(res) == rows, res node1.flush() time.sleep(1) # make sure tombstones are collected node1.compact() cursor.execute("SELECT * FROM cf LIMIT 10000") res = cursor.fetchall() assert len(res) == rows/2, res
Add test to check range ghost are removed
cd003fa1d57b442d6889442d0b1815fc3312505c
toolbox/replicate_graph.py
toolbox/replicate_graph.py
import sys import commentjson as json import os import argparse import numpy as np import copy sys.path.append('../.') sys.path.append('.') from progressbar import ProgressBar if __name__ == "__main__": parser = argparse.ArgumentParser(description='Replicate nodes, links, divisions and exclusion sets N times, ' \ 'so that the total number of timeframes does not change', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--model', required=True, type=str, dest='model_filename', help='Filename of the json model description') parser.add_argument('--output', required=True, type=str, dest='result_filename', help='Filename of the json file that will hold the replicated model') parser.add_argument('--num', type=int, dest='num', default=2, help='how many instances of the original model shall be present in the result file') args = parser.parse_args() print("Loading model file: " + args.model_filename) with open(args.model_filename, 'r') as f: model = json.load(f) segmentationHypotheses = model['segmentationHypotheses'] # use generator expression instead of list comprehension, we only need it once! maxId = max((i['id'] for i in segmentationHypotheses)) newModel = copy.deepcopy(model) for i in range(1, args.num): offset = i * (maxId + 1000000) # create random gap in IDs for seg in segmentationHypotheses: newSeg = copy.deepcopy(seg) newSeg['id'] = offset + newSeg['id'] newModel['segmentationHypotheses'].append(newSeg) linkingHypotheses = model['linkingHypotheses'] for link in linkingHypotheses: newLink = copy.deepcopy(link) newLink['src'] = offset + newLink['src'] newLink['dest'] = offset + newLink['dest'] newModel['linkingHypotheses'].append(newLink) if 'exclusions' in model: for e in model['exclusions']: newExclusion = [x + offset for x in e] newModel['exclusions'].append(newExclusion) if 'divisions' in model: for d in model['divisions']: newDiv = copy.deepcopy(d) newDiv['parent'] = offset + d['parent'] newDiv['children'] = [offset + c for c in d['children']] newModel['divisions'].append(newDiv) with open(args.result_filename, 'w') as f: json.dump(newModel, f, indent=4, separators=(',', ': '))
Add script to artificially increase the size of graphs by replicating all nodes and their links
Add script to artificially increase the size of graphs by replicating all nodes and their links
Python
mit
chaubold/hytra,chaubold/hytra,chaubold/hytra
import sys import commentjson as json import os import argparse import numpy as np import copy sys.path.append('../.') sys.path.append('.') from progressbar import ProgressBar if __name__ == "__main__": parser = argparse.ArgumentParser(description='Replicate nodes, links, divisions and exclusion sets N times, ' \ 'so that the total number of timeframes does not change', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--model', required=True, type=str, dest='model_filename', help='Filename of the json model description') parser.add_argument('--output', required=True, type=str, dest='result_filename', help='Filename of the json file that will hold the replicated model') parser.add_argument('--num', type=int, dest='num', default=2, help='how many instances of the original model shall be present in the result file') args = parser.parse_args() print("Loading model file: " + args.model_filename) with open(args.model_filename, 'r') as f: model = json.load(f) segmentationHypotheses = model['segmentationHypotheses'] # use generator expression instead of list comprehension, we only need it once! maxId = max((i['id'] for i in segmentationHypotheses)) newModel = copy.deepcopy(model) for i in range(1, args.num): offset = i * (maxId + 1000000) # create random gap in IDs for seg in segmentationHypotheses: newSeg = copy.deepcopy(seg) newSeg['id'] = offset + newSeg['id'] newModel['segmentationHypotheses'].append(newSeg) linkingHypotheses = model['linkingHypotheses'] for link in linkingHypotheses: newLink = copy.deepcopy(link) newLink['src'] = offset + newLink['src'] newLink['dest'] = offset + newLink['dest'] newModel['linkingHypotheses'].append(newLink) if 'exclusions' in model: for e in model['exclusions']: newExclusion = [x + offset for x in e] newModel['exclusions'].append(newExclusion) if 'divisions' in model: for d in model['divisions']: newDiv = copy.deepcopy(d) newDiv['parent'] = offset + d['parent'] newDiv['children'] = [offset + c for c in d['children']] newModel['divisions'].append(newDiv) with open(args.result_filename, 'w') as f: json.dump(newModel, f, indent=4, separators=(',', ': '))
Add script to artificially increase the size of graphs by replicating all nodes and their links
cca106b4cb647e82838deb359cf6f9ef813992a9
dbaas/integrations/credentials/admin/integration_credential.py
dbaas/integrations/credentials/admin/integration_credential.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = True
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
Change field order at integration credential admin index page
Change field order at integration credential admin index page
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("endpoint","user",) save_on_top = True
Change field order at integration credential admin index page # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin class IntegrationCredentialAdmin(admin.ModelAdmin): search_fields = ("endpoint",) list_display = ("user","endpoint",) save_on_top = True
d49b23365a972931502329f47a3aa65b9170477e
openstack/common/middleware/catch_errors.py
openstack/common/middleware/catch_errors.py
# Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that provides high-level error handling. It catches all exceptions from subsequent applications in WSGI pipeline to hide internal errors from API response. """ import webob.dec import webob.exc from openstack.common.gettextutils import _ # noqa from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
# Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that provides high-level error handling. It catches all exceptions from subsequent applications in WSGI pipeline to hide internal errors from API response. """ import webob.dec import webob.exc from openstack.common.gettextutils import _LE from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_LE('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
Update oslo log messages with translation domains
Update oslo log messages with translation domains Update the incubator code to use different domains for log messages at different levels. Update the import exceptions setting for hacking to allow multiple functions to be imported from gettextutils on one line. bp log-messages-translation-domain Change-Id: I6ce0f4a59438612ce74c46b3ee9398bef24c0c19
Python
apache-2.0
varunarya10/oslo.middleware,openstack/oslo.middleware,chungg/oslo.middleware,JioCloud/oslo.middleware
# Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that provides high-level error handling. It catches all exceptions from subsequent applications in WSGI pipeline to hide internal errors from API response. """ import webob.dec import webob.exc from openstack.common.gettextutils import _LE from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_LE('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
Update oslo log messages with translation domains Update the incubator code to use different domains for log messages at different levels. Update the import exceptions setting for hacking to allow multiple functions to be imported from gettextutils on one line. bp log-messages-translation-domain Change-Id: I6ce0f4a59438612ce74c46b3ee9398bef24c0c19 # Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Middleware that provides high-level error handling. It catches all exceptions from subsequent applications in WSGI pipeline to hide internal errors from API response. """ import webob.dec import webob.exc from openstack.common.gettextutils import _ # noqa from openstack.common import log as logging from openstack.common.middleware import base LOG = logging.getLogger(__name__) class CatchErrorsMiddleware(base.Middleware): @webob.dec.wsgify def __call__(self, req): try: response = req.get_response(self.application) except Exception: LOG.exception(_('An error occurred during ' 'processing the request: %s')) response = webob.exc.HTTPInternalServerError() return response
dfdac5764236ce9301e7997443b6de4a7a4b4473
scripts/convert_gml_to_csv.py
scripts/convert_gml_to_csv.py
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True)
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
Add outfile option to conversion script
Add outfile option to conversion script
Python
mit
gaberosser/geo-network
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
Add outfile option to conversion script import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True)
4a5dd598f689425aa89541ce890ec15aa7592543
dragonfire/tts/__init__.py
dragonfire/tts/__init__.py
import csv class Synthesizer(): def __init__(self): self.word_map = {} filename = "../../dictionaries/VoxForgeDict" for line in csv.reader(open(filename), delimiter=' ', skipinitialspace=True): if len(line) > 2: self.word_map[line[0]] = line[2:] print len(self.word_map) def string_to_phonemes(self, string): string = string.upper() string = string.replace('.','') string = string.replace(',','') words = string.split() result = "" for word in words: print self.word_map[word]
Add the function for parsing strings to phonemes
Add the function for parsing strings to phonemes
Python
mit
DragonComputer/Dragonfire,DragonComputer/Dragonfire,DragonComputer/Dragonfire,mertyildiran/Dragonfire,mertyildiran/Dragonfire
import csv class Synthesizer(): def __init__(self): self.word_map = {} filename = "../../dictionaries/VoxForgeDict" for line in csv.reader(open(filename), delimiter=' ', skipinitialspace=True): if len(line) > 2: self.word_map[line[0]] = line[2:] print len(self.word_map) def string_to_phonemes(self, string): string = string.upper() string = string.replace('.','') string = string.replace(',','') words = string.split() result = "" for word in words: print self.word_map[word]
Add the function for parsing strings to phonemes
a2572d38eeaa7c004142a194b18fd6fdfff99f9a
test/test_translate.py
test/test_translate.py
from Bio import SeqIO import logging import unittest from select_taxa import select_genomes_by_ids import translate class Test(unittest.TestCase): def setUp(self): self.longMessage = True logging.root.setLevel(logging.DEBUG) def test_translate_genomes(self): # Select genomes genomes = select_genomes_by_ids(['13305.1']).values() # Call translate dnafiles, aafiles = translate.translate_genomes(genomes) # Verify expected output first_header = '13305.1|NC_008253.1|YP_667942.1|None|thr' first = next(SeqIO.parse(dnafiles[0], 'fasta')) self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id)
from Bio import SeqIO import logging import unittest from select_taxa import select_genomes_by_ids import translate class Test(unittest.TestCase): def setUp(self): self.longMessage = True logging.root.setLevel(logging.DEBUG) def test_translate_genomes(self): # Select genomes genomes = select_genomes_by_ids(['13305.1']).values() # Call translate dnafiles, aafiles = translate.translate_genomes(genomes) # Verify expected output first_header = '13305.1|NC_008253.1|YP_667942.1|None|thr' first = next(SeqIO.parse(dnafiles[0], 'fasta')) self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id) # Verify no header appears twice headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] self.assertEqual(len(headers), len(set(headers))) def test_translate_93125_2(self): # Select genomes genomes = select_genomes_by_ids(['93125.2']).values() # Call translate aafiles = translate.translate_genomes(genomes)[1] # Verify no header appears twice headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] self.assertEqual(len(headers), len(set(headers)))
Verify no header appears twice when translating 93125.2
Verify no header appears twice when translating 93125.2
Python
mit
ODoSE/odose.nl
from Bio import SeqIO import logging import unittest from select_taxa import select_genomes_by_ids import translate class Test(unittest.TestCase): def setUp(self): self.longMessage = True logging.root.setLevel(logging.DEBUG) def test_translate_genomes(self): # Select genomes genomes = select_genomes_by_ids(['13305.1']).values() # Call translate dnafiles, aafiles = translate.translate_genomes(genomes) # Verify expected output first_header = '13305.1|NC_008253.1|YP_667942.1|None|thr' first = next(SeqIO.parse(dnafiles[0], 'fasta')) self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id) # Verify no header appears twice headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] self.assertEqual(len(headers), len(set(headers))) def test_translate_93125_2(self): # Select genomes genomes = select_genomes_by_ids(['93125.2']).values() # Call translate aafiles = translate.translate_genomes(genomes)[1] # Verify no header appears twice headers = [record.id for record in SeqIO.parse(aafiles[0], 'fasta')] self.assertEqual(len(headers), len(set(headers)))
Verify no header appears twice when translating 93125.2 from Bio import SeqIO import logging import unittest from select_taxa import select_genomes_by_ids import translate class Test(unittest.TestCase): def setUp(self): self.longMessage = True logging.root.setLevel(logging.DEBUG) def test_translate_genomes(self): # Select genomes genomes = select_genomes_by_ids(['13305.1']).values() # Call translate dnafiles, aafiles = translate.translate_genomes(genomes) # Verify expected output first_header = '13305.1|NC_008253.1|YP_667942.1|None|thr' first = next(SeqIO.parse(dnafiles[0], 'fasta')) self.assertEqual(first_header, first.id) first = next(SeqIO.parse(aafiles[0], 'fasta')) self.assertEqual(first_header, first.id)
d18919060fde86baaa1bd6fed561872dfe4cc37f
oam_base/urls.py
oam_base/urls.py
from django.conf.urls import patterns, include, url from django.core.urlresolvers import reverse_lazy from MyInfo import views as my_info_views from django_cas import views as cas_views from oam_base import views as base_views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', my_info_views.index, name='index'), url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')), url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')), url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')), url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'), url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'), url(r'^error/denied$', base_views.rate_limited, name='rate_limited'), url(r'^ajax/', include('ajax.urls')), url(r'^admin/', include(admin.site.urls)), ) handler500 = 'oam_base.views.custom_error'
from django.conf.urls import patterns, include, url from django.core.urlresolvers import reverse_lazy from MyInfo import views as my_info_views from django_cas import views as cas_views from oam_base import views as base_views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', my_info_views.index, name='index'), url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')), url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')), url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')), url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'), url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'), url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'), url(r'^ajax/', include('ajax.urls')), url(r'^admin/', include(admin.site.urls)), ) handler500 = 'oam_base.views.custom_error'
Make the ratelimited error URL follow established conventions.
Make the ratelimited error URL follow established conventions.
Python
mit
hhauer/myinfo,hhauer/myinfo,hhauer/myinfo,hhauer/myinfo
from django.conf.urls import patterns, include, url from django.core.urlresolvers import reverse_lazy from MyInfo import views as my_info_views from django_cas import views as cas_views from oam_base import views as base_views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', my_info_views.index, name='index'), url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')), url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')), url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')), url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'), url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'), url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'), url(r'^ajax/', include('ajax.urls')), url(r'^admin/', include(admin.site.urls)), ) handler500 = 'oam_base.views.custom_error'
Make the ratelimited error URL follow established conventions. from django.conf.urls import patterns, include, url from django.core.urlresolvers import reverse_lazy from MyInfo import views as my_info_views from django_cas import views as cas_views from oam_base import views as base_views # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^$', my_info_views.index, name='index'), url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')), url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')), url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')), url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'), url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'), url(r'^error/denied$', base_views.rate_limited, name='rate_limited'), url(r'^ajax/', include('ajax.urls')), url(r'^admin/', include(admin.site.urls)), ) handler500 = 'oam_base.views.custom_error'
0fd68b4ac82bf867365c9cc5e0a129dbb51d8247
teamstats/migrations/0002_auto_20180828_1937.py
teamstats/migrations/0002_auto_20180828_1937.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-08-28 16:37 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('teamstats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='video', name='mp4', field=models.FilePathField(blank=True, match='.*\\.mp4$', null=True, path='videos', recursive=True), ), migrations.AlterField( model_name='video', name='ogg', field=models.FilePathField(blank=True, match='.*\\.ogv$', null=True, path='videos', recursive=True), ), migrations.AlterField( model_name='video', name='webm', field=models.FilePathField(blank=True, match='.*\\.webm$', null=True, path='videos', recursive=True), ), ]
Add migrations for media files
Add migrations for media files
Python
agpl-3.0
jluttine/django-sportsteam,jluttine/django-sportsteam,jluttine/django-sportsteam,jluttine/django-sportsteam
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-08-28 16:37 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('teamstats', '0001_initial'), ] operations = [ migrations.AlterField( model_name='video', name='mp4', field=models.FilePathField(blank=True, match='.*\\.mp4$', null=True, path='videos', recursive=True), ), migrations.AlterField( model_name='video', name='ogg', field=models.FilePathField(blank=True, match='.*\\.ogv$', null=True, path='videos', recursive=True), ), migrations.AlterField( model_name='video', name='webm', field=models.FilePathField(blank=True, match='.*\\.webm$', null=True, path='videos', recursive=True), ), ]
Add migrations for media files
38496eddbb214ee856b588e5b1cda62d5e353ab7
system_maintenance/tests/functional/tests.py
system_maintenance/tests/functional/tests.py
from selenium import webdriver import unittest class FunctionalTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_app_home_title(self): self.browser.get('http://localhost:8000/system_maintenance') self.assertIn('System Maintenance', self.browser.title) if __name__ == '__main__': unittest.main(warnings='ignore')
Add simple functional test to test the title of the app's home page
Add simple functional test to test the title of the app's home page
Python
bsd-3-clause
mfcovington/django-system-maintenance,mfcovington/django-system-maintenance,mfcovington/django-system-maintenance
from selenium import webdriver import unittest class FunctionalTest(unittest.TestCase): def setUp(self): self.browser = webdriver.Firefox() self.browser.implicitly_wait(3) def tearDown(self): self.browser.quit() def test_app_home_title(self): self.browser.get('http://localhost:8000/system_maintenance') self.assertIn('System Maintenance', self.browser.title) if __name__ == '__main__': unittest.main(warnings='ignore')
Add simple functional test to test the title of the app's home page
aec8653089f37d53d13e1526ce2379a05e66604d
Utilities/Maintenance/GeneratePythonDownloadsPage.py
Utilities/Maintenance/GeneratePythonDownloadsPage.py
#!/usr/bin/env python #========================================================================= # # Copyright Insight Software Consortium # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0.txt # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #========================================================================= import hashlib import argparse import re import os parser = argparse.ArgumentParser( description="Given a list of python wheels, generate a list of hyperlinks to GitHub with sha512 fragment identifier" ) parser.add_argument( '--hash', choices=['md5','sha256', 'sha512'], default='sha512') parser.add_argument( '-f', '--format', choices=['html','md'], default='html') parser.add_argument( 'files', metavar="python.whl", type=argparse.FileType(mode='rb'), nargs='+' ) args = parser.parse_args() for f in args.files: name = os.path.basename(f.name) #version="1.1.0" version = re.match(r'SimpleITK-([0-9]+\.[0-9]+(\.[0-9]+)?(rc[0-9]+)?)', name ).group(1) print("version:{0}".format(version)) if args.hash == "md5": hash_value = hashlib.md5(f.read()).hexdigest() elif args.hash == "sha256": hash_value = hashlib.sha256(f.read()).hexdigest() elif args.hash == "sha512": hash_value = hashlib.sha512(f.read()).hexdigest() tag = "v{0}".format(version) #host="SourceForge" #url = "https://sourceforge.net/projects/simpleitk/files/SimpleITK/{0}/Python/{1}#{2}={3}".format(version,name,args.hash,hash_value) host = "GitHub" url = "https://github.com/SimpleITK/SimpleITK/releases/download/{0}/{1}#{2}={3}".format(tag,name,args.hash,hash_value) if args.format == 'html': print "<li><a href=\"{0}\" title=\"Click to download {1}\">{1} (hosted at {2})</a></li>".format(url,name,host) elif args.format == 'md': print "[{1}]({0})".format(url,name) f.close()
Add script to generate download links used on simpleitk.org
Add script to generate download links used on simpleitk.org
Python
apache-2.0
InsightSoftwareConsortium/SimpleITK,blowekamp/SimpleITK,richardbeare/SimpleITK,richardbeare/SimpleITK,SimpleITK/SimpleITK,InsightSoftwareConsortium/SimpleITK,InsightSoftwareConsortium/SimpleITK,blowekamp/SimpleITK,SimpleITK/SimpleITK,blowekamp/SimpleITK,SimpleITK/SimpleITK,richardbeare/SimpleITK,blowekamp/SimpleITK,SimpleITK/SimpleITK,richardbeare/SimpleITK,blowekamp/SimpleITK,SimpleITK/SimpleITK,richardbeare/SimpleITK,SimpleITK/SimpleITK,blowekamp/SimpleITK,InsightSoftwareConsortium/SimpleITK,InsightSoftwareConsortium/SimpleITK,blowekamp/SimpleITK,richardbeare/SimpleITK,blowekamp/SimpleITK,richardbeare/SimpleITK,SimpleITK/SimpleITK,InsightSoftwareConsortium/SimpleITK,richardbeare/SimpleITK,InsightSoftwareConsortium/SimpleITK,InsightSoftwareConsortium/SimpleITK,SimpleITK/SimpleITK
#!/usr/bin/env python #========================================================================= # # Copyright Insight Software Consortium # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0.txt # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #========================================================================= import hashlib import argparse import re import os parser = argparse.ArgumentParser( description="Given a list of python wheels, generate a list of hyperlinks to GitHub with sha512 fragment identifier" ) parser.add_argument( '--hash', choices=['md5','sha256', 'sha512'], default='sha512') parser.add_argument( '-f', '--format', choices=['html','md'], default='html') parser.add_argument( 'files', metavar="python.whl", type=argparse.FileType(mode='rb'), nargs='+' ) args = parser.parse_args() for f in args.files: name = os.path.basename(f.name) #version="1.1.0" version = re.match(r'SimpleITK-([0-9]+\.[0-9]+(\.[0-9]+)?(rc[0-9]+)?)', name ).group(1) print("version:{0}".format(version)) if args.hash == "md5": hash_value = hashlib.md5(f.read()).hexdigest() elif args.hash == "sha256": hash_value = hashlib.sha256(f.read()).hexdigest() elif args.hash == "sha512": hash_value = hashlib.sha512(f.read()).hexdigest() tag = "v{0}".format(version) #host="SourceForge" #url = "https://sourceforge.net/projects/simpleitk/files/SimpleITK/{0}/Python/{1}#{2}={3}".format(version,name,args.hash,hash_value) host = "GitHub" url = "https://github.com/SimpleITK/SimpleITK/releases/download/{0}/{1}#{2}={3}".format(tag,name,args.hash,hash_value) if args.format == 'html': print "<li><a href=\"{0}\" title=\"Click to download {1}\">{1} (hosted at {2})</a></li>".format(url,name,host) elif args.format == 'md': print "[{1}]({0})".format(url,name) f.close()
Add script to generate download links used on simpleitk.org
c22528df06e821936590431db5ba1a424e16f6a0
debug_toolbar/management/commands/debugsqlshell.py
debug_toolbar/management/commands/debugsqlshell.py
from datetime import datetime from django.db.backends import util import sqlparse from debug_toolbar.utils import ms_from_timedelta class PrintQueryWrapper(util.CursorDebugWrapper): def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = datetime.now() - starttime print sqlparse.format(raw_sql, reindent=True), print ' [%.2fms]' % (ms_from_timedelta(execution_time),) print util.CursorDebugWrapper = PrintQueryWrapper
from __future__ import print_function from datetime import datetime from django.db.backends import util import sqlparse from debug_toolbar.utils import ms_from_timedelta class PrintQueryWrapper(util.CursorDebugWrapper): def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = ms_from_timedelta(datetime.now() - starttime) formatted_sql = sqlparse.format(raw_sql, reindent=True) print('%s [%.2fms]' % (formatted_sql, execution_time)) util.CursorDebugWrapper = PrintQueryWrapper
Replace print statement by print function.
Replace print statement by print function.
Python
bsd-3-clause
seperman/django-debug-toolbar,guilhermetavares/django-debug-toolbar,stored/django-debug-toolbar,sidja/django-debug-toolbar,Endika/django-debug-toolbar,guilhermetavares/django-debug-toolbar,megcunningham/django-debug-toolbar,pevzi/django-debug-toolbar,peap/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,seperman/django-debug-toolbar,spookylukey/django-debug-toolbar,tim-schilling/django-debug-toolbar,tim-schilling/django-debug-toolbar,barseghyanartur/django-debug-toolbar,megcunningham/django-debug-toolbar,megcunningham/django-debug-toolbar,spookylukey/django-debug-toolbar,barseghyanartur/django-debug-toolbar,ivelum/django-debug-toolbar,seperman/django-debug-toolbar,calvinpy/django-debug-toolbar,spookylukey/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,sidja/django-debug-toolbar,barseghyanartur/django-debug-toolbar,guilhermetavares/django-debug-toolbar,stored/django-debug-toolbar,jazzband/django-debug-toolbar,stored/django-debug-toolbar,pevzi/django-debug-toolbar,peap/django-debug-toolbar,tim-schilling/django-debug-toolbar,Endika/django-debug-toolbar,pevzi/django-debug-toolbar,ivelum/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,peap/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,jazzband/django-debug-toolbar,jazzband/django-debug-toolbar,ivelum/django-debug-toolbar,calvinpy/django-debug-toolbar,Endika/django-debug-toolbar,sidja/django-debug-toolbar,calvinpy/django-debug-toolbar
from __future__ import print_function from datetime import datetime from django.db.backends import util import sqlparse from debug_toolbar.utils import ms_from_timedelta class PrintQueryWrapper(util.CursorDebugWrapper): def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = ms_from_timedelta(datetime.now() - starttime) formatted_sql = sqlparse.format(raw_sql, reindent=True) print('%s [%.2fms]' % (formatted_sql, execution_time)) util.CursorDebugWrapper = PrintQueryWrapper
Replace print statement by print function. from datetime import datetime from django.db.backends import util import sqlparse from debug_toolbar.utils import ms_from_timedelta class PrintQueryWrapper(util.CursorDebugWrapper): def execute(self, sql, params=()): starttime = datetime.now() try: return self.cursor.execute(sql, params) finally: raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params) execution_time = datetime.now() - starttime print sqlparse.format(raw_sql, reindent=True), print ' [%.2fms]' % (ms_from_timedelta(execution_time),) print util.CursorDebugWrapper = PrintQueryWrapper
dd739126181b29493c9d1d90a7e40eac09c23666
app/models.py
app/models.py
# -*- coding: utf-8 -*- """ app.models ~~~~~~~~~~ Provides the SQLAlchemy models """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) import savalidation.validators as val from datetime import datetime as dt from app import db from savalidation import ValidationMixin class HDRO(db.Model, ValidationMixin): # auto keys id = db.Column(db.Integer, primary_key=True) utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow()) utc_updated = db.Column( db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow()) # other keys rid = db.Column(db.String(16), nullable=False, index=True) country = db.Column(db.String(32), nullable=False) indicator = db.Column(db.String(128), nullable=False) value = db.Column(db.Numeric, nullable=False) year = db.Column(db.Integer, nullable=False) # validation val.validates_constraints() def __repr__(self): return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
# -*- coding: utf-8 -*- """ app.models ~~~~~~~~~~ Provides the SQLAlchemy models """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) import savalidation.validators as val from datetime import datetime as dt from app import db from savalidation import ValidationMixin class HDRO(db.Model, ValidationMixin): # auto keys id = db.Column(db.Integer, primary_key=True) utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow()) utc_updated = db.Column( db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow()) # other keys rid = db.Column(db.String(16), nullable=False, index=True, unique=True) country = db.Column(db.String(32), nullable=False) indicator = db.Column(db.String(128), nullable=False) value = db.Column(db.Numeric, nullable=False) year = db.Column(db.Integer, nullable=False) # validation val.validates_constraints() def __repr__(self): return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
Add unique constraint to rid
Add unique constraint to rid
Python
mit
reubano/hdxscraper-hdro,reubano/hdxscraper-hdro,reubano/hdxscraper-hdro
# -*- coding: utf-8 -*- """ app.models ~~~~~~~~~~ Provides the SQLAlchemy models """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) import savalidation.validators as val from datetime import datetime as dt from app import db from savalidation import ValidationMixin class HDRO(db.Model, ValidationMixin): # auto keys id = db.Column(db.Integer, primary_key=True) utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow()) utc_updated = db.Column( db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow()) # other keys rid = db.Column(db.String(16), nullable=False, index=True, unique=True) country = db.Column(db.String(32), nullable=False) indicator = db.Column(db.String(128), nullable=False) value = db.Column(db.Numeric, nullable=False) year = db.Column(db.Integer, nullable=False) # validation val.validates_constraints() def __repr__(self): return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
Add unique constraint to rid # -*- coding: utf-8 -*- """ app.models ~~~~~~~~~~ Provides the SQLAlchemy models """ from __future__ import ( absolute_import, division, print_function, with_statement, unicode_literals) import savalidation.validators as val from datetime import datetime as dt from app import db from savalidation import ValidationMixin class HDRO(db.Model, ValidationMixin): # auto keys id = db.Column(db.Integer, primary_key=True) utc_created = db.Column(db.DateTime, nullable=False, default=dt.utcnow()) utc_updated = db.Column( db.DateTime, nullable=False, default=dt.utcnow(), onupdate=dt.utcnow()) # other keys rid = db.Column(db.String(16), nullable=False, index=True) country = db.Column(db.String(32), nullable=False) indicator = db.Column(db.String(128), nullable=False) value = db.Column(db.Numeric, nullable=False) year = db.Column(db.Integer, nullable=False) # validation val.validates_constraints() def __repr__(self): return ('<HDRO(%r, %r)>' % (self.country, self.indicator))
40ae95e87e439645d35376942f8c48ce9e62b2ad
test/test_pluginmount.py
test/test_pluginmount.py
from JsonStats.FetchStats.Plugins import * from . import TestCase import JsonStats.FetchStats.Plugins from JsonStats.FetchStats import Fetcher class TestPluginMount(TestCase): def setUp(self): # Do stuff that has to happen on every test in this instance self.fetcher = Fetcher def test_get_plugins(self): """ Verify that after loading plugins we can see them attached to the Mount. """ discovered = len(self.fetcher.get_plugins()) expected = len(JsonStats.FetchStats.Plugins.__all__) self.assertEqual(discovered, expected)
from . import TestCase import JsonStats.FetchStats.Plugins from JsonStats.FetchStats import Fetcher class TestPluginMount(TestCase): def setUp(self): # Do stuff that has to happen on every test in this instance self.fetcher = Fetcher class _example_plugin(Fetcher): def __init__(self): self.context = 'testplugin' self._load_data() def _load_data(self): self._loaded(True) def dump(self): return {} def dump_json(self): return self.json.dumps(self.dump()) self.example_plugin = _example_plugin def test_get_plugins(self): """ Verify that after loading plugins we can see them attached to the Mount. """ example_plugin = self.example_plugin() discovered = len(self.fetcher.get_plugins()) assert discovered == 1
Fix the plugin mount text. And make it way more intelligent.
Fix the plugin mount text. And make it way more intelligent.
Python
mit
RHInception/jsonstats,pombredanne/jsonstats,pombredanne/jsonstats,RHInception/jsonstats
from . import TestCase import JsonStats.FetchStats.Plugins from JsonStats.FetchStats import Fetcher class TestPluginMount(TestCase): def setUp(self): # Do stuff that has to happen on every test in this instance self.fetcher = Fetcher class _example_plugin(Fetcher): def __init__(self): self.context = 'testplugin' self._load_data() def _load_data(self): self._loaded(True) def dump(self): return {} def dump_json(self): return self.json.dumps(self.dump()) self.example_plugin = _example_plugin def test_get_plugins(self): """ Verify that after loading plugins we can see them attached to the Mount. """ example_plugin = self.example_plugin() discovered = len(self.fetcher.get_plugins()) assert discovered == 1
Fix the plugin mount text. And make it way more intelligent. from JsonStats.FetchStats.Plugins import * from . import TestCase import JsonStats.FetchStats.Plugins from JsonStats.FetchStats import Fetcher class TestPluginMount(TestCase): def setUp(self): # Do stuff that has to happen on every test in this instance self.fetcher = Fetcher def test_get_plugins(self): """ Verify that after loading plugins we can see them attached to the Mount. """ discovered = len(self.fetcher.get_plugins()) expected = len(JsonStats.FetchStats.Plugins.__all__) self.assertEqual(discovered, expected)
4b855e62bd4f92c7aa9b2614cb6eb57e112d7db6
reclass/__init__.py
reclass/__init__.py
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <[email protected]> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <[email protected]> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
Allow node to be None to trigger inventory
Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <[email protected]>
Python
artistic-2.0
madduck/reclass,rmoorman/reclass,jeroen92/reclass,michaelkuty/reclass,jeroen92/reclass,rmoorman/reclass
# # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <[email protected]> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if not node: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
Allow node to be None to trigger inventory Signed-off-by: martin f. krafft <[email protected]> # # -*- coding: utf-8 -*- # # This file is part of reclass (http://github.com/madduck/reclass) # # Copyright © 2007–13 martin f. krafft <[email protected]> # Released under the terms of the Artistic Licence 2.0 # from output import OutputLoader from storage import StorageBackendLoader def get_data(storage_type, nodes_uri, classes_uri, applications_postfix, node): storage_class = StorageBackendLoader(storage_type).load() storage = storage_class(nodes_uri, classes_uri, applications_postfix) if node is False: ret = storage.inventory() else: ret = storage.nodeinfo(node) return ret def output(data, fmt, pretty_print=False): output_class = OutputLoader(fmt).load() outputter = output_class() return outputter.dump(data, pretty_print=pretty_print)
d978f9c54d3509a5fd8ef3b287d2c3dfa7683d77
setup.py
setup.py
#!/usr/bin/python from setuptools import setup setup(name="catsnap", version="6.0.0", description="catalog and store images", author="Erin Call", author_email="[email protected]", url="https://github.com/ErinCall/", packages=['catsnap', 'catsnap.document', 'catsnap.config', 'catsnap.batch'], install_requires=[ "Flask==0.9", "gunicorn==0.14.6", "boto==2.5.2", "requests==0.13.2", "argparse==1.2.1", "psycopg2==2.4.6", "sqlalchemy==0.8.0b2", "yoyo-migrations==4.1.6", "wand==0.3.3", "celery==3.1.16", "redis==2.10.3", "gevent==1.0.2", "Flask-Sockets==0.1", "PyYAML==3.11", "mock==1.0.1", "nose==1.1.2", "splinter==0.5.3", "bcrypt==1.1.1", ], )
#!/usr/bin/python from setuptools import setup setup(name="catsnap", version="6.0.0", description="catalog and store images", author="Erin Call", author_email="[email protected]", url="https://github.com/ErinCall/", packages=['catsnap', 'catsnap.document', 'catsnap.config', 'catsnap.batch'], install_requires=[ "Flask==0.9", "gunicorn==0.14.6", "boto==2.5.2", "requests==0.13.2", "argparse==1.2.1", "psycopg2==2.4.6", "sqlalchemy==0.8.0b2", "yoyo-migrations==4.1.6", "wand==0.3.3", "celery==3.1.16", "redis==2.10.3", "gevent==1.1b5", "Flask-Sockets==0.1", "PyYAML==3.11", "mock==1.0.1", "nose==1.1.2", "splinter==0.5.3", "bcrypt==1.1.1", ], )
Upgrade to a newer gevent for OSX Yosemity compat
Upgrade to a newer gevent for OSX Yosemity compat See https://github.com/gevent/gevent/issues/656
Python
mit
ErinCall/catsnap,ErinCall/catsnap,ErinCall/catsnap
#!/usr/bin/python from setuptools import setup setup(name="catsnap", version="6.0.0", description="catalog and store images", author="Erin Call", author_email="[email protected]", url="https://github.com/ErinCall/", packages=['catsnap', 'catsnap.document', 'catsnap.config', 'catsnap.batch'], install_requires=[ "Flask==0.9", "gunicorn==0.14.6", "boto==2.5.2", "requests==0.13.2", "argparse==1.2.1", "psycopg2==2.4.6", "sqlalchemy==0.8.0b2", "yoyo-migrations==4.1.6", "wand==0.3.3", "celery==3.1.16", "redis==2.10.3", "gevent==1.1b5", "Flask-Sockets==0.1", "PyYAML==3.11", "mock==1.0.1", "nose==1.1.2", "splinter==0.5.3", "bcrypt==1.1.1", ], )
Upgrade to a newer gevent for OSX Yosemity compat See https://github.com/gevent/gevent/issues/656 #!/usr/bin/python from setuptools import setup setup(name="catsnap", version="6.0.0", description="catalog and store images", author="Erin Call", author_email="[email protected]", url="https://github.com/ErinCall/", packages=['catsnap', 'catsnap.document', 'catsnap.config', 'catsnap.batch'], install_requires=[ "Flask==0.9", "gunicorn==0.14.6", "boto==2.5.2", "requests==0.13.2", "argparse==1.2.1", "psycopg2==2.4.6", "sqlalchemy==0.8.0b2", "yoyo-migrations==4.1.6", "wand==0.3.3", "celery==3.1.16", "redis==2.10.3", "gevent==1.0.2", "Flask-Sockets==0.1", "PyYAML==3.11", "mock==1.0.1", "nose==1.1.2", "splinter==0.5.3", "bcrypt==1.1.1", ], )
32f99cd7a9f20e2c8d7ebd140c23ac0e43b1284c
pulldb/users.py
pulldb/users.py
# Copyright 2013 Russell Heilling import logging from google.appengine.api import users from pulldb import base from pulldb import session from pulldb.models.users import User class Profile(session.SessionHandler): def get(self): app_user = users.get_current_user() template_values = self.base_template_values() template_values.update({ 'user': user_key(app_user).get(), }) template = self.templates.get_template('users_profile.html') self.response.write(template.render(template_values)) def user_key(app_user=users.get_current_user(), create=True): key = None user = User.query(User.userid == app_user.user_id()).get() if user: key = user.key elif create: logging.info('Adding user to datastore: %s', app_user.nickname()) user = User(userid=app_user.user_id(), nickname=app_user.nickname()) user.put() key = user.key return user.key app = base.create_app([ ('/users/me', Profile), ])
# Copyright 2013 Russell Heilling import logging from google.appengine.api import users from pulldb import base from pulldb import session from pulldb.models.users import User class Profile(session.SessionHandler): def get(self): app_user = users.get_current_user() template_values = self.base_template_values() template_values.update({ 'user': user_key(app_user).get(), }) template = self.templates.get_template('users_profile.html') self.response.write(template.render(template_values)) def user_key(app_user=users.get_current_user(), create=True): logging.debug("Looking up user key for: %r", app_user) key = None user = User.query(User.userid == app_user.user_id()).get() if user: key = user.key elif create: logging.info('Adding user to datastore: %s', app_user.nickname()) user = User(userid=app_user.user_id(), nickname=app_user.nickname()) user.put() key = user.key return user.key app = base.create_app([ ('/users/me', Profile), ])
Add logging to track down a bug
Add logging to track down a bug
Python
mit
xchewtoyx/pulldb
# Copyright 2013 Russell Heilling import logging from google.appengine.api import users from pulldb import base from pulldb import session from pulldb.models.users import User class Profile(session.SessionHandler): def get(self): app_user = users.get_current_user() template_values = self.base_template_values() template_values.update({ 'user': user_key(app_user).get(), }) template = self.templates.get_template('users_profile.html') self.response.write(template.render(template_values)) def user_key(app_user=users.get_current_user(), create=True): logging.debug("Looking up user key for: %r", app_user) key = None user = User.query(User.userid == app_user.user_id()).get() if user: key = user.key elif create: logging.info('Adding user to datastore: %s', app_user.nickname()) user = User(userid=app_user.user_id(), nickname=app_user.nickname()) user.put() key = user.key return user.key app = base.create_app([ ('/users/me', Profile), ])
Add logging to track down a bug # Copyright 2013 Russell Heilling import logging from google.appengine.api import users from pulldb import base from pulldb import session from pulldb.models.users import User class Profile(session.SessionHandler): def get(self): app_user = users.get_current_user() template_values = self.base_template_values() template_values.update({ 'user': user_key(app_user).get(), }) template = self.templates.get_template('users_profile.html') self.response.write(template.render(template_values)) def user_key(app_user=users.get_current_user(), create=True): key = None user = User.query(User.userid == app_user.user_id()).get() if user: key = user.key elif create: logging.info('Adding user to datastore: %s', app_user.nickname()) user = User(userid=app_user.user_id(), nickname=app_user.nickname()) user.put() key = user.key return user.key app = base.create_app([ ('/users/me', Profile), ])
9c6f3e1994f686e57092a7cd947c49b4f857743e
apps/predict/urls.py
apps/predict/urls.py
""" Predict app's urls """ # # pylint: disable=bad-whitespace # from django.conf.urls import patterns, include, url from .views import * def url_tree(regex, *urls): """Quick access to stitching url patterns""" return url(regex, include(patterns('', *urls))) urlpatterns = patterns('', url(r'^$', Datasets.as_view(), name="view_my_datasets"), url_tree(r'^upload/', url(r'^$', UploadChoices.as_view(), name="upload"), url(r'^manual/$', UploadManual.as_view(), name="upload_manual"), url_tree(r'^(?P<type>[\w-]+)/', url(r'^$', UploadView.as_view(), name="upload"), url(r'^(?P<fastq>[\w-]+)/$', UploadView.as_view(), name="upload"), ), ), url_tree(r'^(?P<slug>\w{32})/', url(r'^$', DatasetView.as_view(), name="view_single_dataset"), url(r'^callback/$', Callback.as_view(), name="callback"), url(r'^note/$', AddNote.as_view(), name="add_note"), ), )
""" Predict app's urls """ # # pylint: disable=bad-whitespace # from django.conf.urls import patterns, include, url from .views import * def url_tree(regex, *urls): """Quick access to stitching url patterns""" return url(regex, include(patterns('', *urls))) urlpatterns = patterns('', url(r'^$', Datasets.as_view(), name="view_my_datasets"), url_tree(r'^upload/', url(r'^$', UploadChoices.as_view(), name="upload"), url(r'^(?P<type>[\w-]+)/', UploadView.as_view(), name="upload"), ), url_tree(r'^(?P<slug>\w{32})/', url(r'^$', DatasetView.as_view(), name="view_single_dataset"), url(r'^note/$', AddNote.as_view(), name="add_note"), ), )
Remove callback url and bring uploads together
Remove callback url and bring uploads together
Python
agpl-3.0
IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site,IQSS/gentb-site
""" Predict app's urls """ # # pylint: disable=bad-whitespace # from django.conf.urls import patterns, include, url from .views import * def url_tree(regex, *urls): """Quick access to stitching url patterns""" return url(regex, include(patterns('', *urls))) urlpatterns = patterns('', url(r'^$', Datasets.as_view(), name="view_my_datasets"), url_tree(r'^upload/', url(r'^$', UploadChoices.as_view(), name="upload"), url(r'^(?P<type>[\w-]+)/', UploadView.as_view(), name="upload"), ), url_tree(r'^(?P<slug>\w{32})/', url(r'^$', DatasetView.as_view(), name="view_single_dataset"), url(r'^note/$', AddNote.as_view(), name="add_note"), ), )
Remove callback url and bring uploads together """ Predict app's urls """ # # pylint: disable=bad-whitespace # from django.conf.urls import patterns, include, url from .views import * def url_tree(regex, *urls): """Quick access to stitching url patterns""" return url(regex, include(patterns('', *urls))) urlpatterns = patterns('', url(r'^$', Datasets.as_view(), name="view_my_datasets"), url_tree(r'^upload/', url(r'^$', UploadChoices.as_view(), name="upload"), url(r'^manual/$', UploadManual.as_view(), name="upload_manual"), url_tree(r'^(?P<type>[\w-]+)/', url(r'^$', UploadView.as_view(), name="upload"), url(r'^(?P<fastq>[\w-]+)/$', UploadView.as_view(), name="upload"), ), ), url_tree(r'^(?P<slug>\w{32})/', url(r'^$', DatasetView.as_view(), name="view_single_dataset"), url(r'^callback/$', Callback.as_view(), name="callback"), url(r'^note/$', AddNote.as_view(), name="add_note"), ), )
bec98cca8a765743cf990f5807f5d52b95dd5d9e
setup.py
setup.py
#!/usr/bin/env python3 # encoding: utf-8 try: from setuptools import setup except ImportError: from distutils.core import setup import re with open('glooey/__init__.py') as file: version_pattern = re.compile("__version__ = '(.*)'") version = version_pattern.search(file.read()).group(1) with open('README.rst') as file: readme = file.read() setup( name='glooey', version=version, author='Kale Kundert', author_email='[email protected]', description='An object-oriented GUI library for pyglet.', long_description=readme, url='https://github.com/kxgames/glooey', packages=[ 'glooey', ], include_package_data=True, install_requires=[ 'pyglet', 'more_itertools', 'vecrec', 'autoprop', ], license='MIT', zip_safe=False, keywords=[ 'glooey', 'pyglet', 'gui', 'library', ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3.5', 'Topic :: Games/Entertainment', 'Topic :: Software Development :: User Interfaces', 'Topic :: Software Development :: Libraries', ], )
#!/usr/bin/env python3 # encoding: utf-8 try: from setuptools import setup except ImportError: from distutils.core import setup import re with open('glooey/__init__.py') as file: version_pattern = re.compile("__version__ = '(.*)'") version = version_pattern.search(file.read()).group(1) with open('README.rst') as file: readme = file.read() setup( name='glooey', version=version, author='Kale Kundert', author_email='[email protected]', description='An object-oriented GUI library for pyglet.', long_description=readme, url='https://github.com/kxgames/glooey', packages=[ 'glooey', ], include_package_data=True, install_requires=[ 'pyglet', 'more_itertools', 'vecrec', 'autoprop', 'debugtools', ], license='MIT', zip_safe=False, keywords=[ 'glooey', 'pyglet', 'gui', 'library', ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3.5', 'Topic :: Games/Entertainment', 'Topic :: Software Development :: User Interfaces', 'Topic :: Software Development :: Libraries', ], )
Add debugtools as a dependency.
Add debugtools as a dependency.
Python
mit
kxgames/glooey,kxgames/glooey
#!/usr/bin/env python3 # encoding: utf-8 try: from setuptools import setup except ImportError: from distutils.core import setup import re with open('glooey/__init__.py') as file: version_pattern = re.compile("__version__ = '(.*)'") version = version_pattern.search(file.read()).group(1) with open('README.rst') as file: readme = file.read() setup( name='glooey', version=version, author='Kale Kundert', author_email='[email protected]', description='An object-oriented GUI library for pyglet.', long_description=readme, url='https://github.com/kxgames/glooey', packages=[ 'glooey', ], include_package_data=True, install_requires=[ 'pyglet', 'more_itertools', 'vecrec', 'autoprop', 'debugtools', ], license='MIT', zip_safe=False, keywords=[ 'glooey', 'pyglet', 'gui', 'library', ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3.5', 'Topic :: Games/Entertainment', 'Topic :: Software Development :: User Interfaces', 'Topic :: Software Development :: Libraries', ], )
Add debugtools as a dependency. #!/usr/bin/env python3 # encoding: utf-8 try: from setuptools import setup except ImportError: from distutils.core import setup import re with open('glooey/__init__.py') as file: version_pattern = re.compile("__version__ = '(.*)'") version = version_pattern.search(file.read()).group(1) with open('README.rst') as file: readme = file.read() setup( name='glooey', version=version, author='Kale Kundert', author_email='[email protected]', description='An object-oriented GUI library for pyglet.', long_description=readme, url='https://github.com/kxgames/glooey', packages=[ 'glooey', ], include_package_data=True, install_requires=[ 'pyglet', 'more_itertools', 'vecrec', 'autoprop', ], license='MIT', zip_safe=False, keywords=[ 'glooey', 'pyglet', 'gui', 'library', ], classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3.5', 'Topic :: Games/Entertainment', 'Topic :: Software Development :: User Interfaces', 'Topic :: Software Development :: Libraries', ], )
12cac5280ab5c74b3497055c4104f23e52cdd5f1
scripts/generate_posts.py
scripts/generate_posts.py
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
Use HTML entity for colon
Use HTML entity for colon
Python
bsd-3-clause
amperser/proselint,jstewmon/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint
import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + "&#58;" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
Use HTML entity for colon import os import ast import datetime import re grandparent = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) checks_dir = os.path.join(grandparent, "proselint", "checks") listing = os.listdir(checks_dir) def is_check(fn): return fn[-3:] == ".py" and not fn == "__init__.py" for fn in listing: if is_check(fn): M = ast.parse(''.join(open(os.path.join(checks_dir, fn)))) docstring = ast.get_docstring(M) error_code = re.search("error_code: (.*)\n", docstring).group(1) head, sep, tail = docstring.partition("title: ") docstring = head + sep + " " + error_code + ":" + tail[4:] post_filename = os.path.join( os.path.join(grandparent, "site", "_posts"), str(datetime.date.today()) + "-" + docstring[0:6] + ".md") # Chop off the first two lines for i in xrange(2): docstring = '\n'.join(docstring.split('\n')[1:]) # Create a new post in the blog. with open(post_filename, 'w') as f: f.write(docstring)
72e69f3535c7e2cd82cdda62636eabd7421ebddf
generative/tests/compare_test/concat_first/dump_hiddens.py
generative/tests/compare_test/concat_first/dump_hiddens.py
from __future__ import division from __future__ import print_function from __future__ import absolute_import import os import subprocess if __name__ == "__main__": for hiddens_dim in [512, 256, 128, 64, 32, 16]: print('Dumping files for (%d)' % hiddens_dim) model_path = '/mnt/visual_communication_dataset/trained_models_5_30_18/hiddens_fc6/%d/model_best.pth.tar' % hiddens_dim out_dir = './dump_hiddens_outputs/%d/' % hiddens_dim if not os.path.isdir(out_dir): os.makedirs(out_dir) command = 'CUDA_VISIBLE_DEVICES=7 python dump.py {model} --train-test-split-dir ./train_test_split/1 --out-dir {outdir} --average-labels --overwrite-layer fc6 --cuda'.format(model=model_path, outdir=out_dir) subprocess.call(command, shell=True)
Add dump script for all hiddens
Add dump script for all hiddens
Python
mit
judithfan/pix2svg
from __future__ import division from __future__ import print_function from __future__ import absolute_import import os import subprocess if __name__ == "__main__": for hiddens_dim in [512, 256, 128, 64, 32, 16]: print('Dumping files for (%d)' % hiddens_dim) model_path = '/mnt/visual_communication_dataset/trained_models_5_30_18/hiddens_fc6/%d/model_best.pth.tar' % hiddens_dim out_dir = './dump_hiddens_outputs/%d/' % hiddens_dim if not os.path.isdir(out_dir): os.makedirs(out_dir) command = 'CUDA_VISIBLE_DEVICES=7 python dump.py {model} --train-test-split-dir ./train_test_split/1 --out-dir {outdir} --average-labels --overwrite-layer fc6 --cuda'.format(model=model_path, outdir=out_dir) subprocess.call(command, shell=True)
Add dump script for all hiddens
aa9829567f65c36c5c7356aa5e7d6ac1762f62aa
setup.py
setup.py
#!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='[email protected]', url='https://git.barrera.io/hobarrera/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:run', ] }, install_requires=[ 'click', 'icalendar', 'urwid', 'pyxdg', 'atomicwrites', # https://github.com/tehmaze/ansi/pull/7 'ansi>=0.1.3', 'parsedatetime', 'setuptools_scm', ], use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], # TODO: classifiers )
#!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='[email protected]', url='https://gitlab.com/hobarrera/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:run', ] }, install_requires=[ 'click', 'icalendar', 'urwid', 'pyxdg', 'atomicwrites', # https://github.com/tehmaze/ansi/pull/7 'ansi>=0.1.3', 'parsedatetime', 'setuptools_scm', ], use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], # TODO: classifiers )
Update URL to current point to gitlab.com
Update URL to current point to gitlab.com
Python
isc
AnubhaAgrawal/todoman,asalminen/todoman,hobarrera/todoman,pimutils/todoman,Sakshisaraswat/todoman,rimshaakhan/todoman
#!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='[email protected]', url='https://gitlab.com/hobarrera/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:run', ] }, install_requires=[ 'click', 'icalendar', 'urwid', 'pyxdg', 'atomicwrites', # https://github.com/tehmaze/ansi/pull/7 'ansi>=0.1.3', 'parsedatetime', 'setuptools_scm', ], use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], # TODO: classifiers )
Update URL to current point to gitlab.com #!/usr/bin/env python3 from setuptools import setup setup( name='todoman', description='A simple CalDav-based todo manager.', author='Hugo Osvaldo Barrera', author_email='[email protected]', url='https://git.barrera.io/hobarrera/todoman', license='MIT', packages=['todoman'], entry_points={ 'console_scripts': [ 'todo = todoman.cli:run', ] }, install_requires=[ 'click', 'icalendar', 'urwid', 'pyxdg', 'atomicwrites', # https://github.com/tehmaze/ansi/pull/7 'ansi>=0.1.3', 'parsedatetime', 'setuptools_scm', ], use_scm_version={'version_scheme': 'post-release'}, setup_requires=['setuptools_scm'], # TODO: classifiers )
9ee332f6f0af3d632860581971446f9edf4f74be
changetext/WIKIXML2TW.py
changetext/WIKIXML2TW.py
def WIKIXML2TW(inputfilename, outputfilename): "Convert Wikimedia XML dump to TiddlyWiki import file" inputfile = open(inputfilename, "r") xmlinput = unicode(inputfile.read(), errors='ignore') outputfilemenu = open(outputfilename + '.menu', "w") outputfile = open(outputfilename, "w") outputfile.write('<html><head></head><body><div id="storeArea">'.encode('utf-8')) startpos = 0 while startpos > -1: titleString = '' textString = '' startpos = xmlinput.find('<title>', startpos) if startpos > -1: titleString = xmlinput[startpos+len('<title>'):xmlinput.find('</title>', startpos)] if startpos > -1: startpos = xmlinput.find('</title>', startpos) if startpos > -1: startpos = xmlinput.find('<text xml:space="preserve">', startpos) if startpos > -1: textString = xmlinput[startpos+len('<text xml:space="preserve">'):xmlinput.find('</text>', startpos)] if startpos > -1: startpos = xmlinput.find('</text>', startpos) if titleString != '': originalTitleString = titleString titleString = titleString.replace(' ', '_') outputfile.write('<div title="'.encode('utf-8')) outputfile.write(titleString.encode('utf-8')) outputfilemenu.write('[[' + originalTitleString + '|' + titleString.encode('utf-8') + ']]\n') outputfile.write('" creator="YourName" modifier="YourName" created="201309161317" modified="201309161321" tags="MediaWikiFormat" changecount="1"><pre>'.encode('utf-8')) outputfile.write(textString.encode('utf-8')) outputfile.write('</pre></div>\n'.encode('utf-8')) outputfile.write('</div></body></html>'.encode('utf-8')) outputfile.close() outputfilemenu.close() return;
Convert wiki XML to Tiddlywiki import format
Convert wiki XML to Tiddlywiki import format
Python
mit
cottley/moruga
def WIKIXML2TW(inputfilename, outputfilename): "Convert Wikimedia XML dump to TiddlyWiki import file" inputfile = open(inputfilename, "r") xmlinput = unicode(inputfile.read(), errors='ignore') outputfilemenu = open(outputfilename + '.menu', "w") outputfile = open(outputfilename, "w") outputfile.write('<html><head></head><body><div id="storeArea">'.encode('utf-8')) startpos = 0 while startpos > -1: titleString = '' textString = '' startpos = xmlinput.find('<title>', startpos) if startpos > -1: titleString = xmlinput[startpos+len('<title>'):xmlinput.find('</title>', startpos)] if startpos > -1: startpos = xmlinput.find('</title>', startpos) if startpos > -1: startpos = xmlinput.find('<text xml:space="preserve">', startpos) if startpos > -1: textString = xmlinput[startpos+len('<text xml:space="preserve">'):xmlinput.find('</text>', startpos)] if startpos > -1: startpos = xmlinput.find('</text>', startpos) if titleString != '': originalTitleString = titleString titleString = titleString.replace(' ', '_') outputfile.write('<div title="'.encode('utf-8')) outputfile.write(titleString.encode('utf-8')) outputfilemenu.write('[[' + originalTitleString + '|' + titleString.encode('utf-8') + ']]\n') outputfile.write('" creator="YourName" modifier="YourName" created="201309161317" modified="201309161321" tags="MediaWikiFormat" changecount="1"><pre>'.encode('utf-8')) outputfile.write(textString.encode('utf-8')) outputfile.write('</pre></div>\n'.encode('utf-8')) outputfile.write('</div></body></html>'.encode('utf-8')) outputfile.close() outputfilemenu.close() return;
Convert wiki XML to Tiddlywiki import format
fe9f2b7e76088afb6f4d905c0c4188df88b81516
pollirio/modules/__init__.py
pollirio/modules/__init__.py
# -*- coding: utf-8 -*- from functools import wraps from pollirio import commands def old_expose(cmd): def inner(fn): def wrapped(*args, **kwargs): commands[cmd] = fn fn(*args) return wraps(fn)(wrapped) return inner def expose(cmd, args=None): def decorator(fn): commands[cmd] = {"func":fn, "args":args} return fn return decorator def plugin_run(name, *args): if name in commands: return commands.get(name)["func"](*args) def check_args(name, bot, ievent): if name in commands: if commands.get(name)["args"]: # TODO: check if we have all the arguments print len(ievent.args), commands.get(name)["args"] if len(ievent.args) < commands.get(name)["args"]: bot.msg(ievent.channel, "%s: %s" % (ievent.nick, commands.get(name)["func"].__doc__)) return False else: return True else: return True return False from lart import * from polygen import * from bts import * from misc import *
# -*- coding: utf-8 -*- from functools import wraps from pollirio import commands def old_expose(cmd): def inner(fn): def wrapped(*args, **kwargs): commands[cmd] = fn fn(*args) return wraps(fn)(wrapped) return inner def expose(cmd, args=None): def decorator(fn): commands[cmd] = {"func":fn, "args":args} return fn return decorator def plugin_run(name, *args): if name in commands: return commands.get(name)["func"](*args) def check_args(name, bot, ievent): if name in commands: if commands.get(name)["args"]: # TODO: check if we have all the arguments print len(ievent.args), commands.get(name)["args"] if len(ievent.args) < commands.get(name)["args"]: bot.msg(ievent.channel, "%s: %s" % (ievent.nick, commands.get(name)["func"].__doc__)) return False else: return True else: return True return False from lart import * from polygen import * #from bts import * from misc import *
Disable bts plugin for general usage
Disable bts plugin for general usage
Python
mit
dpaleino/pollirio,dpaleino/pollirio
# -*- coding: utf-8 -*- from functools import wraps from pollirio import commands def old_expose(cmd): def inner(fn): def wrapped(*args, **kwargs): commands[cmd] = fn fn(*args) return wraps(fn)(wrapped) return inner def expose(cmd, args=None): def decorator(fn): commands[cmd] = {"func":fn, "args":args} return fn return decorator def plugin_run(name, *args): if name in commands: return commands.get(name)["func"](*args) def check_args(name, bot, ievent): if name in commands: if commands.get(name)["args"]: # TODO: check if we have all the arguments print len(ievent.args), commands.get(name)["args"] if len(ievent.args) < commands.get(name)["args"]: bot.msg(ievent.channel, "%s: %s" % (ievent.nick, commands.get(name)["func"].__doc__)) return False else: return True else: return True return False from lart import * from polygen import * #from bts import * from misc import *
Disable bts plugin for general usage # -*- coding: utf-8 -*- from functools import wraps from pollirio import commands def old_expose(cmd): def inner(fn): def wrapped(*args, **kwargs): commands[cmd] = fn fn(*args) return wraps(fn)(wrapped) return inner def expose(cmd, args=None): def decorator(fn): commands[cmd] = {"func":fn, "args":args} return fn return decorator def plugin_run(name, *args): if name in commands: return commands.get(name)["func"](*args) def check_args(name, bot, ievent): if name in commands: if commands.get(name)["args"]: # TODO: check if we have all the arguments print len(ievent.args), commands.get(name)["args"] if len(ievent.args) < commands.get(name)["args"]: bot.msg(ievent.channel, "%s: %s" % (ievent.nick, commands.get(name)["func"].__doc__)) return False else: return True else: return True return False from lart import * from polygen import * from bts import * from misc import *
8223d62c22d4c4f7a66e1e468de53556796a03a9
src/functions/exercise7.py
src/functions/exercise7.py
"""Module docstring. This serves as a long usage message. """ import sys import getopt def main(): # parse command line options try: opts, args = getopt.getopt(sys.argv[1:], "h", ["help"]) except getopt.error, msg: print msg print "for help use --help" sys.exit(2) # process options for o, a in opts: if o in ("-h", "--help"): print __doc__ sys.exit(0) # process arguments for arg in args: process(arg) # process() is defined elsewhere if __name__ == "__main__": main()
Write a function that print something n times including relatives spaces
Write a function that print something n times including relatives spaces
Python
mit
let42/python-course
"""Module docstring. This serves as a long usage message. """ import sys import getopt def main(): # parse command line options try: opts, args = getopt.getopt(sys.argv[1:], "h", ["help"]) except getopt.error, msg: print msg print "for help use --help" sys.exit(2) # process options for o, a in opts: if o in ("-h", "--help"): print __doc__ sys.exit(0) # process arguments for arg in args: process(arg) # process() is defined elsewhere if __name__ == "__main__": main()
Write a function that print something n times including relatives spaces
556e6ba4d9bc32384526501acbbc4c0c2b6f983e
mopidy/frontends/mpd/__init__.py
mopidy/frontends/mpd/__init__.py
import logging from mopidy.frontends.mpd.dispatcher import MpdDispatcher from mopidy.frontends.mpd.process import MpdProcess from mopidy.utils.process import unpickle_connection logger = logging.getLogger('mopidy.frontends.mpd') class MpdFrontend(object): """ The MPD frontend. **Settings:** - :attr:`mopidy.settings.MPD_SERVER_HOSTNAME` - :attr:`mopidy.settings.MPD_SERVER_PORT` """ def __init__(self, core_queue, backend): self.core_queue = core_queue self.process = None self.dispatcher = MpdDispatcher(backend) def start(self): """Starts the MPD server.""" self.process = MpdProcess(self.core_queue) self.process.start() def process_message(self, message): """ Processes messages with the MPD frontend as destination. :param message: the message :type message: dict """ assert message['to'] == 'frontend', \ u'Message recipient must be "frontend".' if message['command'] == 'mpd_request': response = self.dispatcher.handle_request(message['request']) connection = unpickle_connection(message['reply_to']) connection.send(response) else: logger.warning(u'Cannot handle message: %s', message)
import logging from mopidy.frontends.base import BaseFrontend from mopidy.frontends.mpd.dispatcher import MpdDispatcher from mopidy.frontends.mpd.process import MpdProcess from mopidy.utils.process import unpickle_connection logger = logging.getLogger('mopidy.frontends.mpd') class MpdFrontend(BaseFrontend): """ The MPD frontend. **Settings:** - :attr:`mopidy.settings.MPD_SERVER_HOSTNAME` - :attr:`mopidy.settings.MPD_SERVER_PORT` """ def __init__(self, *args, **kwargs): super(MpdFrontend, self).__init__(*args, **kwargs) self.process = None self.dispatcher = MpdDispatcher(self.backend) def start(self): """Starts the MPD server.""" self.process = MpdProcess(self.core_queue) self.process.start() def destroy(self): """Destroys the MPD server.""" self.process.destroy() def process_message(self, message): """ Processes messages with the MPD frontend as destination. :param message: the message :type message: dict """ assert message['to'] == 'frontend', \ u'Message recipient must be "frontend".' if message['command'] == 'mpd_request': response = self.dispatcher.handle_request(message['request']) connection = unpickle_connection(message['reply_to']) connection.send(response) else: logger.warning(u'Cannot handle message: %s', message)
Make MpdFrontend a subclass of BaseFrontend
Make MpdFrontend a subclass of BaseFrontend
Python
apache-2.0
kingosticks/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,tkem/mopidy,rawdlite/mopidy,diandiankan/mopidy,ZenithDK/mopidy,jcass77/mopidy,jodal/mopidy,SuperStarPL/mopidy,hkariti/mopidy,mokieyue/mopidy,rawdlite/mopidy,bacontext/mopidy,quartz55/mopidy,diandiankan/mopidy,tkem/mopidy,woutervanwijk/mopidy,adamcik/mopidy,swak/mopidy,quartz55/mopidy,mokieyue/mopidy,rawdlite/mopidy,abarisain/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,dbrgn/mopidy,bacontext/mopidy,diandiankan/mopidy,bacontext/mopidy,vrs01/mopidy,hkariti/mopidy,ZenithDK/mopidy,bacontext/mopidy,priestd09/mopidy,abarisain/mopidy,woutervanwijk/mopidy,jmarsik/mopidy,ali/mopidy,ali/mopidy,jmarsik/mopidy,mopidy/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,quartz55/mopidy,dbrgn/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,bencevans/mopidy,mopidy/mopidy,jodal/mopidy,swak/mopidy,adamcik/mopidy,pacificIT/mopidy,bencevans/mopidy,diandiankan/mopidy,quartz55/mopidy,jcass77/mopidy,bencevans/mopidy,liamw9534/mopidy,SuperStarPL/mopidy,swak/mopidy,tkem/mopidy,pacificIT/mopidy,ZenithDK/mopidy,kingosticks/mopidy,priestd09/mopidy,glogiotatidis/mopidy,swak/mopidy,ali/mopidy,kingosticks/mopidy,hkariti/mopidy,jcass77/mopidy,priestd09/mopidy,pacificIT/mopidy,jmarsik/mopidy,vrs01/mopidy,bencevans/mopidy,vrs01/mopidy,pacificIT/mopidy,hkariti/mopidy,vrs01/mopidy,dbrgn/mopidy,tkem/mopidy,ali/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,mopidy/mopidy,liamw9534/mopidy,jodal/mopidy,adamcik/mopidy
import logging from mopidy.frontends.base import BaseFrontend from mopidy.frontends.mpd.dispatcher import MpdDispatcher from mopidy.frontends.mpd.process import MpdProcess from mopidy.utils.process import unpickle_connection logger = logging.getLogger('mopidy.frontends.mpd') class MpdFrontend(BaseFrontend): """ The MPD frontend. **Settings:** - :attr:`mopidy.settings.MPD_SERVER_HOSTNAME` - :attr:`mopidy.settings.MPD_SERVER_PORT` """ def __init__(self, *args, **kwargs): super(MpdFrontend, self).__init__(*args, **kwargs) self.process = None self.dispatcher = MpdDispatcher(self.backend) def start(self): """Starts the MPD server.""" self.process = MpdProcess(self.core_queue) self.process.start() def destroy(self): """Destroys the MPD server.""" self.process.destroy() def process_message(self, message): """ Processes messages with the MPD frontend as destination. :param message: the message :type message: dict """ assert message['to'] == 'frontend', \ u'Message recipient must be "frontend".' if message['command'] == 'mpd_request': response = self.dispatcher.handle_request(message['request']) connection = unpickle_connection(message['reply_to']) connection.send(response) else: logger.warning(u'Cannot handle message: %s', message)
Make MpdFrontend a subclass of BaseFrontend import logging from mopidy.frontends.mpd.dispatcher import MpdDispatcher from mopidy.frontends.mpd.process import MpdProcess from mopidy.utils.process import unpickle_connection logger = logging.getLogger('mopidy.frontends.mpd') class MpdFrontend(object): """ The MPD frontend. **Settings:** - :attr:`mopidy.settings.MPD_SERVER_HOSTNAME` - :attr:`mopidy.settings.MPD_SERVER_PORT` """ def __init__(self, core_queue, backend): self.core_queue = core_queue self.process = None self.dispatcher = MpdDispatcher(backend) def start(self): """Starts the MPD server.""" self.process = MpdProcess(self.core_queue) self.process.start() def process_message(self, message): """ Processes messages with the MPD frontend as destination. :param message: the message :type message: dict """ assert message['to'] == 'frontend', \ u'Message recipient must be "frontend".' if message['command'] == 'mpd_request': response = self.dispatcher.handle_request(message['request']) connection = unpickle_connection(message['reply_to']) connection.send(response) else: logger.warning(u'Cannot handle message: %s', message)
b0cf9904023c5ee20c5f29b3e88899420405550b
examples/puttiff.py
examples/puttiff.py
# Copyright 2014 Open Connectome Project (http://openconnecto.me) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import numpy as np import urllib, urllib2 import cStringIO import sys import zlib import libtiff def main(): parser = argparse.ArgumentParser(description='Post a file as a tiff') parser.add_argument('baseurl', action="store" ) parser.add_argument('token', action="store" ) parser.add_argument('channel', action="store" ) parser.add_argument('filename', action="store" ) parser.add_argument('resolution', action="store", type=int, default=0 ) parser.add_argument('xoffset', action="store", type=int, default=0 ) parser.add_argument('yoffset', action="store", type=int, default=0) parser.add_argument('zoffset', action="store", type=int, default=0) result = parser.parse_args() url = 'http://%s/ca/%s/%s/tiff/%s/%s/%s/%s/' % ( result.baseurl, result.token, result.channel, result.resolution, result.xoffset, result.yoffset, result.zoffset ) # open the file name as a tiff file fh = open ( result.filename ) # Get cube in question try: f = urllib2.urlopen ( url, fh.read() ) except urllib2.URLError, e: print "Failed %s. Exception %s." % (url,e) sys.exit(-1) if __name__ == "__main__": main()
Migrate this version to new workstation.
Migrate this version to new workstation.
Python
apache-2.0
neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,neurodata/ndstore,openconnectome/open-connectome,openconnectome/open-connectome,neurodata/ndstore
# Copyright 2014 Open Connectome Project (http://openconnecto.me) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import numpy as np import urllib, urllib2 import cStringIO import sys import zlib import libtiff def main(): parser = argparse.ArgumentParser(description='Post a file as a tiff') parser.add_argument('baseurl', action="store" ) parser.add_argument('token', action="store" ) parser.add_argument('channel', action="store" ) parser.add_argument('filename', action="store" ) parser.add_argument('resolution', action="store", type=int, default=0 ) parser.add_argument('xoffset', action="store", type=int, default=0 ) parser.add_argument('yoffset', action="store", type=int, default=0) parser.add_argument('zoffset', action="store", type=int, default=0) result = parser.parse_args() url = 'http://%s/ca/%s/%s/tiff/%s/%s/%s/%s/' % ( result.baseurl, result.token, result.channel, result.resolution, result.xoffset, result.yoffset, result.zoffset ) # open the file name as a tiff file fh = open ( result.filename ) # Get cube in question try: f = urllib2.urlopen ( url, fh.read() ) except urllib2.URLError, e: print "Failed %s. Exception %s." % (url,e) sys.exit(-1) if __name__ == "__main__": main()
Migrate this version to new workstation.
874816497e7a9bd0e091a62a9e9b33ae832eb130
pyjsonts/time_series_json.py
pyjsonts/time_series_json.py
import json import ijson class TimeSeriesJSON: def __init__(self, f=None, fn=None, tag='item'): """ :param f: file object (_io.TextIOWrapper) :param fn: file name as a string :param tag: tag for dividing json items default value is 'item' because this value is default in ijson """ if f is not None: self.__type = 'file' self.__file = f elif fn is not None: self.__type = 'file_name' self.__file_name = fn self.__file = open(fn) self.__items = self.parse_json_items(tag) def parse_json_items(self, tag, limit=0): self.__items = [] self.__file.seek(0) cnt = 0 objs = ijson.items(self.__file, tag) for obj in objs: item = json.dumps(obj, \ sort_keys=True, \ indent=4, \ ensure_ascii=True) self.__items.append(item) cnt += 1 if limit != 0 and cnt >= limit: break return self.__items
import json import ijson class TimeSeriesJSON: def __init__(self, f=None, fn=None, tag='item'): """ :param f: file object (_io.TextIOWrapper) :param fn: file name as a string :param tag: tag for dividing json items default value is 'item' because this value is default in ijson """ if f is not None: self.__type = 'file' self.__file = f elif fn is not None: self.__type = 'file_name' self.__file_name = fn self.__file = open(fn) self.__items = self.parse_json_items(tag) def parse_json_items(self, tag, limit=0): self.__items = [] self.__file.seek(0) cnt = 0 objs = ijson.items(self.__file, tag) for obj in objs: item = json.dumps(obj, sort_keys=True, indent=4, ensure_ascii=True) self.__items.append(item) cnt += 1 if limit != 0 and cnt >= limit: break return self.__items
Remove unnecessary backslashes in parse_json_items
Remove unnecessary backslashes in parse_json_items
Python
apache-2.0
jeongmincha/pyjsonts
import json import ijson class TimeSeriesJSON: def __init__(self, f=None, fn=None, tag='item'): """ :param f: file object (_io.TextIOWrapper) :param fn: file name as a string :param tag: tag for dividing json items default value is 'item' because this value is default in ijson """ if f is not None: self.__type = 'file' self.__file = f elif fn is not None: self.__type = 'file_name' self.__file_name = fn self.__file = open(fn) self.__items = self.parse_json_items(tag) def parse_json_items(self, tag, limit=0): self.__items = [] self.__file.seek(0) cnt = 0 objs = ijson.items(self.__file, tag) for obj in objs: item = json.dumps(obj, sort_keys=True, indent=4, ensure_ascii=True) self.__items.append(item) cnt += 1 if limit != 0 and cnt >= limit: break return self.__items
Remove unnecessary backslashes in parse_json_items import json import ijson class TimeSeriesJSON: def __init__(self, f=None, fn=None, tag='item'): """ :param f: file object (_io.TextIOWrapper) :param fn: file name as a string :param tag: tag for dividing json items default value is 'item' because this value is default in ijson """ if f is not None: self.__type = 'file' self.__file = f elif fn is not None: self.__type = 'file_name' self.__file_name = fn self.__file = open(fn) self.__items = self.parse_json_items(tag) def parse_json_items(self, tag, limit=0): self.__items = [] self.__file.seek(0) cnt = 0 objs = ijson.items(self.__file, tag) for obj in objs: item = json.dumps(obj, \ sort_keys=True, \ indent=4, \ ensure_ascii=True) self.__items.append(item) cnt += 1 if limit != 0 and cnt >= limit: break return self.__items
d66355e4758b37be39d17d681ede1dbbd6b9b311
setmagic/admin.py
setmagic/admin.py
from django import forms from django.contrib import admin from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
Use importlib to load custom fields by str
Use importlib to load custom fields by str
Python
mit
7ws/django-setmagic
from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
Use importlib to load custom fields by str from django import forms from django.contrib import admin from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
e4345634ea6a4c43db20ea1d3d33134b6ee6204d
alembic/versions/151b2f642877_text_to_json.py
alembic/versions/151b2f642877_text_to_json.py
"""text to JSON Revision ID: 151b2f642877 Revises: aee7291c81 Create Date: 2015-06-12 14:40:56.956657 """ # revision identifiers, used by Alembic. revision = '151b2f642877' down_revision = 'aee7291c81' from alembic import op import sqlalchemy as sa def upgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) def downgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query)
"""text to JSON Revision ID: 151b2f642877 Revises: ac115763654 Create Date: 2015-06-12 14:40:56.956657 """ # revision identifiers, used by Alembic. revision = '151b2f642877' down_revision = 'ac115763654' from alembic import op import sqlalchemy as sa def upgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) def downgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query)
Fix alembic revision after merge master
Fix alembic revision after merge master
Python
agpl-3.0
OpenNewsLabs/pybossa,PyBossa/pybossa,PyBossa/pybossa,Scifabric/pybossa,jean/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,jean/pybossa,Scifabric/pybossa,geotagx/pybossa
"""text to JSON Revision ID: 151b2f642877 Revises: ac115763654 Create Date: 2015-06-12 14:40:56.956657 """ # revision identifiers, used by Alembic. revision = '151b2f642877' down_revision = 'ac115763654' from alembic import op import sqlalchemy as sa def upgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) def downgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query)
Fix alembic revision after merge master """text to JSON Revision ID: 151b2f642877 Revises: aee7291c81 Create Date: 2015-06-12 14:40:56.956657 """ # revision identifiers, used by Alembic. revision = '151b2f642877' down_revision = 'aee7291c81' from alembic import op import sqlalchemy as sa def upgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;' op.execute(query) def downgrade(): query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query) query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;' op.execute(query)
e8c8464d36e91c9a8d61db0531a2e73dcdee88b7
utilities/tests/test_simulation_utils.py
utilities/tests/test_simulation_utils.py
from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
Add a test for check_inputs.
Add a test for check_inputs.
Python
mit
jason-neal/companion_simulations,jason-neal/companion_simulations
from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
Add a test for check_inputs.
a47b82f7feb18da55cf402e363508141764a180f
2014/round-1/labelmaker-v2.py
2014/round-1/labelmaker-v2.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- def solve(): L, N = input().split(' ') N = int(N) result = '' while N > 0: N -= 1 result = L[N % len(L)] + result N = int(N / len(L)) return result def main(): T = int(input()) for i in range(T): print('Case #{i}: {answer}'.format(i=i+1, answer=solve())) if __name__ == '__main__': main()
Add solution v2 for Labelmaker.
Add solution v2 for Labelmaker.
Python
mit
changyuheng/hacker-cup-solutions
#!/usr/bin/env python3 # -*- coding: utf-8 -*- def solve(): L, N = input().split(' ') N = int(N) result = '' while N > 0: N -= 1 result = L[N % len(L)] + result N = int(N / len(L)) return result def main(): T = int(input()) for i in range(T): print('Case #{i}: {answer}'.format(i=i+1, answer=solve())) if __name__ == '__main__': main()
Add solution v2 for Labelmaker.
bdd842f55f3a234fefee4cd2a701fa23e07c3789
scikits/umfpack/setup.py
scikits/umfpack/setup.py
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
Python
bsd-3-clause
scikit-umfpack/scikit-umfpack,scikit-umfpack/scikit-umfpack,rc/scikit-umfpack-rc,rc/scikit-umfpack,rc/scikit-umfpack,rc/scikit-umfpack-rc
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies. #!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
1473af1b50da6390e1b4475ae63d5a28f712e791
tests/test_frijoles.py
tests/test_frijoles.py
import unittest from frijoles import app class TamalesAPITestCase(unittest.TestCase): def setUp(self): self.app = app.test_client() def test_basic(self): res = self.app.get('/api/v1/') self.assertEqual(res.status_code, 200)
import unittest from frijoles import app class FrijolesAPITestCase(unittest.TestCase): def setUp(self): self.app = app.test_client() def test_basic(self): res = self.app.get('/api/v1/') self.assertEqual(res.status_code, 200)
Fix wrong test case name
Fix wrong test case name
Python
agpl-3.0
Antojitos/frijoles
import unittest from frijoles import app class FrijolesAPITestCase(unittest.TestCase): def setUp(self): self.app = app.test_client() def test_basic(self): res = self.app.get('/api/v1/') self.assertEqual(res.status_code, 200)
Fix wrong test case name import unittest from frijoles import app class TamalesAPITestCase(unittest.TestCase): def setUp(self): self.app = app.test_client() def test_basic(self): res = self.app.get('/api/v1/') self.assertEqual(res.status_code, 200)
c32bdff4b0ee570ed58cd869830d89e3251cf82a
pytils/test/__init__.py
pytils/test/__init__.py
# -*- coding: utf-8 -*- """ Unit tests for pytils """ __all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"] import unittest def get_django_suite(): try: import django except ImportError: return unittest.TestSuite() import pytils.test.templatetags return pytils.test.templatetags.get_suite() def get_suite(): """Return TestSuite for all unit-test of pytils""" suite = unittest.TestSuite() for module_name in __all__: imported_module = __import__("pytils.test."+module_name, globals(), locals(), ["pytils.test"]) loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(imported_module)) suite.addTest(get_django_suite()) return suite def run_tests_from_module(module, verbosity=1): """Run unit-tests for single module""" suite = unittest.TestSuite() loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(module)) unittest.TextTestRunner(verbosity=verbosity).run(suite) def run(verbosity=1): """Run all unit-test of pytils""" suite = get_suite() unittest.TextTestRunner(verbosity=verbosity).run(suite) if __name__ == '__main__': run(2)
# -*- coding: utf-8 -*- """ Unit tests for pytils """ __all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"] import unittest import sys def get_django_suite(): try: import django except ImportError: return unittest.TestSuite() import pytils.test.templatetags return pytils.test.templatetags.get_suite() def get_suite(): """Return TestSuite for all unit-test of pytils""" suite = unittest.TestSuite() for module_name in __all__: imported_module = __import__("pytils.test."+module_name, globals(), locals(), ["pytils.test"]) loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(imported_module)) suite.addTest(get_django_suite()) return suite def run_tests_from_module(module, verbosity=1): """Run unit-tests for single module""" suite = unittest.TestSuite() loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(module)) unittest.TextTestRunner(verbosity=verbosity).run(suite) def run(verbosity=1): """Run all unit-test of pytils""" suite = get_suite() res = unittest.TextTestRunner(verbosity=verbosity).run(suite) if res.errors or res.failures: sys.exit(1) if __name__ == '__main__': run(2)
Exit with non-0 status if there are failed tests or errors.
Py3: Exit with non-0 status if there are failed tests or errors.
Python
mit
Forever-Young/pytils,j2a/pytils
# -*- coding: utf-8 -*- """ Unit tests for pytils """ __all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"] import unittest import sys def get_django_suite(): try: import django except ImportError: return unittest.TestSuite() import pytils.test.templatetags return pytils.test.templatetags.get_suite() def get_suite(): """Return TestSuite for all unit-test of pytils""" suite = unittest.TestSuite() for module_name in __all__: imported_module = __import__("pytils.test."+module_name, globals(), locals(), ["pytils.test"]) loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(imported_module)) suite.addTest(get_django_suite()) return suite def run_tests_from_module(module, verbosity=1): """Run unit-tests for single module""" suite = unittest.TestSuite() loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(module)) unittest.TextTestRunner(verbosity=verbosity).run(suite) def run(verbosity=1): """Run all unit-test of pytils""" suite = get_suite() res = unittest.TextTestRunner(verbosity=verbosity).run(suite) if res.errors or res.failures: sys.exit(1) if __name__ == '__main__': run(2)
Py3: Exit with non-0 status if there are failed tests or errors. # -*- coding: utf-8 -*- """ Unit tests for pytils """ __all__ = ["test_numeral", "test_dt", "test_translit", "test_utils", "test_typo"] import unittest def get_django_suite(): try: import django except ImportError: return unittest.TestSuite() import pytils.test.templatetags return pytils.test.templatetags.get_suite() def get_suite(): """Return TestSuite for all unit-test of pytils""" suite = unittest.TestSuite() for module_name in __all__: imported_module = __import__("pytils.test."+module_name, globals(), locals(), ["pytils.test"]) loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(imported_module)) suite.addTest(get_django_suite()) return suite def run_tests_from_module(module, verbosity=1): """Run unit-tests for single module""" suite = unittest.TestSuite() loader = unittest.defaultTestLoader suite.addTest(loader.loadTestsFromModule(module)) unittest.TextTestRunner(verbosity=verbosity).run(suite) def run(verbosity=1): """Run all unit-test of pytils""" suite = get_suite() unittest.TextTestRunner(verbosity=verbosity).run(suite) if __name__ == '__main__': run(2)
54b3b69d152611d55ce7db66c2c34dc2b1140cc7
wellknown/models.py
wellknown/models.py
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) # # cache resources # for res in Resource.objects.all(): wellknown.register(res.path, content=res.content, content_type=res.content_type)
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
Remove code that was causing a problem running syncdb. Code seems to be redundant anyway.
Remove code that was causing a problem running syncdb. Code seems to be redundant anyway.
Python
bsd-3-clause
jcarbaugh/django-wellknown
from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource)
Remove code that was causing a problem running syncdb. Code seems to be redundant anyway. from django.db import models from django.db.models.signals import post_save import mimetypes import wellknown # # create default host-meta handler # from wellknown.resources import HostMeta wellknown.register('host-meta', handler=HostMeta(), content_type='application/xrd+xml') # # resource model # class Resource(models.Model): path = models.CharField(max_length=128) content = models.TextField(blank=True) content_type = models.CharField(max_length=128, blank=True) class Meta: ordering = ('path',) def __unicode__(self): return self.path def save(self, **kwargs): self.path = self.path.strip('/') if not self.content_type: self.content_type = mimetypes.guess_type(self.path)[0] or 'text/plain' super(Resource, self).save(**kwargs) # # update resources when models are saved # def save_handler(sender, **kwargs): reg = kwargs['instance'] wellknown.register( reg.path, content=reg.content, content_type=reg.content_type, update=True ) post_save.connect(save_handler, sender=Resource) # # cache resources # for res in Resource.objects.all(): wellknown.register(res.path, content=res.content, content_type=res.content_type)
ee0d901f0eb8c098e715485efb7d43ade4a8aeb8
tests/test_nsq.py
tests/test_nsq.py
import os import unittest import numpy as np import chainer from chainer import optimizers import q_function import nstep_q_learning import async import simple_abc import random_seed import replay_buffer from simple_abc import ABC class TestNSQ(unittest.TestCase): def setUp(self): pass def test_abc(self): self._test_abc(1) self._test_abc(5) def _test_abc(self, t_max): nproc = 8 def agent_func(): n_actions = 3 q_func = q_function.FCSIQFunction(1, n_actions, 10, 2) opt = optimizers.RMSprop(1e-3, eps=1e-2) opt.setup(q_func) return nstep_q_learning.NStepQLearning(q_func, opt, t_max, 0.9, 0.1, i_target=10) def env_func(): return simple_abc.ABC() def run_func(agent, env): total_r = 0 episode_r = 0 for i in xrange(5000): total_r += env.reward episode_r += env.reward action = agent.act(env.state, env.reward, env.is_terminal) if env.is_terminal: print 'i:{} episode_r:{}'.format(i, episode_r) episode_r = 0 env.initialize() else: env.receive_action(action) print 'pid:{}, total_r:{}'.format(os.getpid(), total_r) return agent # Train final_agent = async.run_async(nproc, agent_func, env_func, run_func) # Test env = env_func() total_r = env.reward while not env.is_terminal: action = final_agent.q_function.sample_greedily_with_value( env.state.reshape((1,) + env.state.shape))[0][0] print 'state:', env.state, 'action:', action env.receive_action(action) total_r += env.reward self.assertAlmostEqual(total_r, 1)
Add a ABC test for n-step Q-learning
Add a ABC test for n-step Q-learning
Python
mit
toslunar/chainerrl,toslunar/chainerrl
import os import unittest import numpy as np import chainer from chainer import optimizers import q_function import nstep_q_learning import async import simple_abc import random_seed import replay_buffer from simple_abc import ABC class TestNSQ(unittest.TestCase): def setUp(self): pass def test_abc(self): self._test_abc(1) self._test_abc(5) def _test_abc(self, t_max): nproc = 8 def agent_func(): n_actions = 3 q_func = q_function.FCSIQFunction(1, n_actions, 10, 2) opt = optimizers.RMSprop(1e-3, eps=1e-2) opt.setup(q_func) return nstep_q_learning.NStepQLearning(q_func, opt, t_max, 0.9, 0.1, i_target=10) def env_func(): return simple_abc.ABC() def run_func(agent, env): total_r = 0 episode_r = 0 for i in xrange(5000): total_r += env.reward episode_r += env.reward action = agent.act(env.state, env.reward, env.is_terminal) if env.is_terminal: print 'i:{} episode_r:{}'.format(i, episode_r) episode_r = 0 env.initialize() else: env.receive_action(action) print 'pid:{}, total_r:{}'.format(os.getpid(), total_r) return agent # Train final_agent = async.run_async(nproc, agent_func, env_func, run_func) # Test env = env_func() total_r = env.reward while not env.is_terminal: action = final_agent.q_function.sample_greedily_with_value( env.state.reshape((1,) + env.state.shape))[0][0] print 'state:', env.state, 'action:', action env.receive_action(action) total_r += env.reward self.assertAlmostEqual(total_r, 1)
Add a ABC test for n-step Q-learning
0e740b5fd924b113173b546f2dd2b8fa1e55d074
indra/sparser/sparser_api.py
indra/sparser/sparser_api.py
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import logging import xml.etree.ElementTree as ET from indra.util import UnicodeXMLTreeBuilder as UTB from indra.sparser.processor import SparserProcessor logger = logging.getLogger('sparser') def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError: logger.error('Could not parse XML string') return None sp = _process_elementtree(tree) return sp def _process_elementtree(tree): sp = SparserProcessor(tree) sp.get_modifications() sp.get_activations() return sp
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import logging import xml.etree.ElementTree as ET from indra.util import UnicodeXMLTreeBuilder as UTB from indra.sparser.processor import SparserProcessor logger = logging.getLogger('sparser') def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError as e: logger.error('Could not parse XML string') logger.error(e) return None sp = _process_elementtree(tree) return sp def _process_elementtree(tree): sp = SparserProcessor(tree) sp.get_modifications() sp.get_activations() return sp
Print XML parse errors in Sparser API
Print XML parse errors in Sparser API
Python
bsd-2-clause
sorgerlab/belpy,bgyori/indra,johnbachman/belpy,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,sorgerlab/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/indra
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import logging import xml.etree.ElementTree as ET from indra.util import UnicodeXMLTreeBuilder as UTB from indra.sparser.processor import SparserProcessor logger = logging.getLogger('sparser') def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError as e: logger.error('Could not parse XML string') logger.error(e) return None sp = _process_elementtree(tree) return sp def _process_elementtree(tree): sp = SparserProcessor(tree) sp.get_modifications() sp.get_activations() return sp
Print XML parse errors in Sparser API from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import logging import xml.etree.ElementTree as ET from indra.util import UnicodeXMLTreeBuilder as UTB from indra.sparser.processor import SparserProcessor logger = logging.getLogger('sparser') def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError: logger.error('Could not parse XML string') return None sp = _process_elementtree(tree) return sp def _process_elementtree(tree): sp = SparserProcessor(tree) sp.get_modifications() sp.get_activations() return sp
3a711d6005b16fcc6faf19c80f292ad6ef25455c
sqlserver_ado/__init__.py
sqlserver_ado/__init__.py
import os.path VERSION = (1, 0, 0, 'stable') def get_version(): """ Return the version as a string. If this is flagged as a development release and mercurial can be loaded the specifics about the changeset will be appended to the version string. """ if 'dev' in VERSION: try: from mercurial import hg, ui repo_path = os.path.join(os.path.dirname(__file__), '..') repo = hg.repository(ui.ui(), repo_path) ctx = repo['tip'] build_info = 'dev %s %s:%s' % (ctx.branch(), ctx.rev(), str(ctx)) except: # mercurial module missing or repository not found build_info = 'dev-unknown' v = VERSION[:VERSION.index('dev')] + (build_info,) return '.'.join(map(str, v))
import os.path VERSION = (1, 0, 1, 'stable') def get_version(): """ Return the version as a string. If this is flagged as a development release and mercurial can be loaded the specifics about the changeset will be appended to the version string. """ if 'dev' in VERSION: try: from mercurial import hg, ui repo_path = os.path.join(os.path.dirname(__file__), '..') repo = hg.repository(ui.ui(), repo_path) ctx = repo['tip'] build_info = 'dev %s %s:%s' % (ctx.branch(), ctx.rev(), str(ctx)) except: # mercurial module missing or repository not found build_info = 'dev-unknown' v = VERSION[:VERSION.index('dev')] + (build_info,) return '.'.join(map(str, v))
Bump version to 1.0.1 for unit test fix.
Bump version to 1.0.1 for unit test fix.
Python
mit
theoriginalgri/django-mssql,theoriginalgri/django-mssql
import os.path VERSION = (1, 0, 1, 'stable') def get_version(): """ Return the version as a string. If this is flagged as a development release and mercurial can be loaded the specifics about the changeset will be appended to the version string. """ if 'dev' in VERSION: try: from mercurial import hg, ui repo_path = os.path.join(os.path.dirname(__file__), '..') repo = hg.repository(ui.ui(), repo_path) ctx = repo['tip'] build_info = 'dev %s %s:%s' % (ctx.branch(), ctx.rev(), str(ctx)) except: # mercurial module missing or repository not found build_info = 'dev-unknown' v = VERSION[:VERSION.index('dev')] + (build_info,) return '.'.join(map(str, v))
Bump version to 1.0.1 for unit test fix. import os.path VERSION = (1, 0, 0, 'stable') def get_version(): """ Return the version as a string. If this is flagged as a development release and mercurial can be loaded the specifics about the changeset will be appended to the version string. """ if 'dev' in VERSION: try: from mercurial import hg, ui repo_path = os.path.join(os.path.dirname(__file__), '..') repo = hg.repository(ui.ui(), repo_path) ctx = repo['tip'] build_info = 'dev %s %s:%s' % (ctx.branch(), ctx.rev(), str(ctx)) except: # mercurial module missing or repository not found build_info = 'dev-unknown' v = VERSION[:VERSION.index('dev')] + (build_info,) return '.'.join(map(str, v))
d5cf661b2658d7f9a0f5436444373202e514bf37
src/psd_tools2/__init__.py
src/psd_tools2/__init__.py
from __future__ import absolute_import, unicode_literals from .api.psd_image import PSDImage
from __future__ import absolute_import, unicode_literals from .api.psd_image import PSDImage from .api.composer import compose
Include compose in the top level
Include compose in the top level
Python
mit
kmike/psd-tools,psd-tools/psd-tools,kmike/psd-tools
from __future__ import absolute_import, unicode_literals from .api.psd_image import PSDImage from .api.composer import compose
Include compose in the top level from __future__ import absolute_import, unicode_literals from .api.psd_image import PSDImage
3540f827e12960b5ce48608249514051bb02cf61
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # (c) 2012 Urban Airship and Contributors import os import sys import mithril try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() required = [ 'netaddr==0.7.6', ] packages = [ 'mithril', 'mithril.tests', ] setup( name='django-mithril', version='%d.%d.%d' % mithril.__version__, description='IP Whitelisting for Django', long_description=open('README.md').read(), author='Chris Dickinson', author_email='[email protected]', url='http://urbanairship.github.com/django-mithril/', packages=packages, package_data={'': ['LICENSE']}, include_package_data=True, install_requires=required, license=open("LICENSE").read(), zip_safe=False, classifiers=( 'Development Status :: 1 - Alpha', 'Intended Audience :: Developers', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ), )
#!/usr/bin/env python # -*- coding: utf-8 -*- # (c) 2012 Urban Airship and Contributors import os import sys import mithril try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() required = [ 'netaddr==0.7.6', ] packages = [ 'mithril', 'mithril.tests', 'mithril.migrations', ] setup( name='django-mithril', version='%d.%d.%d' % mithril.__version__, description='IP Whitelisting for Django', long_description=open('README.md').read(), author='Chris Dickinson', author_email='[email protected]', url='http://urbanairship.github.com/django-mithril/', packages=packages, package_data={'': ['LICENSE']}, include_package_data=True, install_requires=required, license=open("LICENSE").read(), zip_safe=False, classifiers=( 'Development Status :: 1 - Alpha', 'Intended Audience :: Developers', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ), )
Make sure to include migrations! :hurtrealbad:
Make sure to include migrations! :hurtrealbad:
Python
bsd-3-clause
urbanairship/django-mithril,urbanairship/django-mithril
#!/usr/bin/env python # -*- coding: utf-8 -*- # (c) 2012 Urban Airship and Contributors import os import sys import mithril try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() required = [ 'netaddr==0.7.6', ] packages = [ 'mithril', 'mithril.tests', 'mithril.migrations', ] setup( name='django-mithril', version='%d.%d.%d' % mithril.__version__, description='IP Whitelisting for Django', long_description=open('README.md').read(), author='Chris Dickinson', author_email='[email protected]', url='http://urbanairship.github.com/django-mithril/', packages=packages, package_data={'': ['LICENSE']}, include_package_data=True, install_requires=required, license=open("LICENSE").read(), zip_safe=False, classifiers=( 'Development Status :: 1 - Alpha', 'Intended Audience :: Developers', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ), )
Make sure to include migrations! :hurtrealbad: #!/usr/bin/env python # -*- coding: utf-8 -*- # (c) 2012 Urban Airship and Contributors import os import sys import mithril try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() required = [ 'netaddr==0.7.6', ] packages = [ 'mithril', 'mithril.tests', ] setup( name='django-mithril', version='%d.%d.%d' % mithril.__version__, description='IP Whitelisting for Django', long_description=open('README.md').read(), author='Chris Dickinson', author_email='[email protected]', url='http://urbanairship.github.com/django-mithril/', packages=packages, package_data={'': ['LICENSE']}, include_package_data=True, install_requires=required, license=open("LICENSE").read(), zip_safe=False, classifiers=( 'Development Status :: 1 - Alpha', 'Intended Audience :: Developers', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ), )
6ec0b59c3f105f13503acaab691bccf3a6bf70b1
test/runtest/testargv.py
test/runtest/testargv.py
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test subdir args for runtest.py, for example: python runtest.py test/subdir """ import os import TestRuntest test = TestRuntest.TestRuntest() test.subdir('test', ['test', 'subdir']) files = {} files['pythonstring'] = TestRuntest.pythonstring files['one'] = os.path.join('test/subdir', 'test_one.py') files['two'] = os.path.join('test/subdir', 'two.py') files['three'] = os.path.join('test', 'test_three.py') test.write_passing_test(files['one']) test.write_passing_test(files['two']) test.write_passing_test(files['three']) expect_stdout = """\ %(pythonstring)s -tt %(one)s PASSING TEST STDOUT %(pythonstring)s -tt %(two)s PASSING TEST STDOUT """ % files expect_stderr = """\ PASSING TEST STDERR PASSING TEST STDERR """ test.run(arguments = '--no-progress test/subdir', status = 0, stdout = expect_stdout, stderr = expect_stderr) test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
Add test for "runtest test/somedir" case
Add test for "runtest test/somedir" case
Python
mit
andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons,andrewyoung1991/scons
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test subdir args for runtest.py, for example: python runtest.py test/subdir """ import os import TestRuntest test = TestRuntest.TestRuntest() test.subdir('test', ['test', 'subdir']) files = {} files['pythonstring'] = TestRuntest.pythonstring files['one'] = os.path.join('test/subdir', 'test_one.py') files['two'] = os.path.join('test/subdir', 'two.py') files['three'] = os.path.join('test', 'test_three.py') test.write_passing_test(files['one']) test.write_passing_test(files['two']) test.write_passing_test(files['three']) expect_stdout = """\ %(pythonstring)s -tt %(one)s PASSING TEST STDOUT %(pythonstring)s -tt %(two)s PASSING TEST STDOUT """ % files expect_stderr = """\ PASSING TEST STDERR PASSING TEST STDERR """ test.run(arguments = '--no-progress test/subdir', status = 0, stdout = expect_stdout, stderr = expect_stderr) test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
Add test for "runtest test/somedir" case
2539f8adbe2b7deed2974c4245fd8087a8f05e65
wluopensource/osl_comments/models.py
wluopensource/osl_comments/models.py
from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True)
from django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
Use signals to add authenticated user URL to comment when posted
Use signals to add authenticated user URL to comment when posted
Python
bsd-3-clause
jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website
from django.contrib.comments.models import Comment from django.contrib.comments.signals import comment_was_posted from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True) def comment_user_url_injection_handler(sender, **kwargs): if 'request' in kwargs and kwargs['request'].user.is_authenticated() and \ 'comment' in kwargs: comment = kwargs['comment'] comment.url = comment.user.get_profile().url comment.save() comment_was_posted.connect(comment_user_url_injection_handler)
Use signals to add authenticated user URL to comment when posted from django.contrib.comments.models import Comment from django.db import models class OslComment(Comment): parent_comment = models.ForeignKey(Comment, blank=True, null=True, related_name='parent_comment') inline_to_object = models.BooleanField() edit_timestamp = models.DateTimeField(auto_now=True)
d9330450854e5fd7b7e9d038283c8fb80058cc2e
scripts/ensure_tilesize.py
scripts/ensure_tilesize.py
#!/usr/bin/python # # This is a helper script to ensure an image has the correct tile size. # It uses pgmagick[1] to read and (if needed) correct the image. To use # it on a number of files one could use e.g. the find command: # # find <data-folder> -name *.jpg -exec scripts/ensure_tilesize.py {} 256 \; # # [1] http://pypi.python.org/pypi/pgmagick/ import sys import os from pgmagick import Image, Geometry, Color, CompositeOperator as co # Make sure we got the arguments we expect if len(sys.argv) != 3: print >> sys.stderr, "Usage: ensure_tilesize.py <FILENAME> <TILESIZE>" sys.exit(1) image_path = sys.argv[1] tile_size = int(sys.argv[2]) # Make sure the file actually exists if not os.path.exists(image_path): print >> sys.stderr, "Could not find file!" sys.exit(1) # Get properties of image image = Image(image_path) image_width = image.size().width() image_height = image.size().height() image_name = image.fileName() # If the image has the correct size, just exit if image_width == tile_size and image_height == tile_size: sys.exit(0) # A new image with the correct size is needed, create it geometry = Geometry(tile_size, tile_size) color = Color("black") new_image = Image(geometry, color) # Copy original image to position 0,0 of new image new_image.composite(image, 0, 0, co.OverCompositeOp) # Override original image new_image.write(image_name) print >> sys.stdout, "Corrected " + image_name + " from " + str(image_width) + "x" + str(image_height) + " to " + str(tile_size) + "x" + str(tile_size)
Add script to ensure the correct tile size of a file
Add script to ensure the correct tile size of a file
Python
agpl-3.0
htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID,htem/CATMAID,htem/CATMAID,htem/CATMAID,fzadow/CATMAID,fzadow/CATMAID
#!/usr/bin/python # # This is a helper script to ensure an image has the correct tile size. # It uses pgmagick[1] to read and (if needed) correct the image. To use # it on a number of files one could use e.g. the find command: # # find <data-folder> -name *.jpg -exec scripts/ensure_tilesize.py {} 256 \; # # [1] http://pypi.python.org/pypi/pgmagick/ import sys import os from pgmagick import Image, Geometry, Color, CompositeOperator as co # Make sure we got the arguments we expect if len(sys.argv) != 3: print >> sys.stderr, "Usage: ensure_tilesize.py <FILENAME> <TILESIZE>" sys.exit(1) image_path = sys.argv[1] tile_size = int(sys.argv[2]) # Make sure the file actually exists if not os.path.exists(image_path): print >> sys.stderr, "Could not find file!" sys.exit(1) # Get properties of image image = Image(image_path) image_width = image.size().width() image_height = image.size().height() image_name = image.fileName() # If the image has the correct size, just exit if image_width == tile_size and image_height == tile_size: sys.exit(0) # A new image with the correct size is needed, create it geometry = Geometry(tile_size, tile_size) color = Color("black") new_image = Image(geometry, color) # Copy original image to position 0,0 of new image new_image.composite(image, 0, 0, co.OverCompositeOp) # Override original image new_image.write(image_name) print >> sys.stdout, "Corrected " + image_name + " from " + str(image_width) + "x" + str(image_height) + " to " + str(tile_size) + "x" + str(tile_size)
Add script to ensure the correct tile size of a file
e1111ad6e8802b3c90df55e05eb695d6db9005e4
import_script/create_users.py
import_script/create_users.py
#!/usr/bin/python import django.contrib.auth.models as auth_models import django.contrib.contenttypes as contenttypes def main(): # Read only user: # auth_models.User.objects.create_user('cube', 'toolkit_admin_readonly@localhost', '***REMOVED***') # Read/write user: user_rw = auth_models.User.objects.create_user('admin', 'toolkit_admin@localhost', '***REMOVED***') # Create dummy ContentType: ct = contenttypes.models.ContentType.objects.get_or_create( model='', app_label='toolkit' )[0] # Create 'write' permission: write_permission = auth_models.Permission.objects.get_or_create( name='Write access to all toolkit content', content_type=ct, codename='write' )[0] # Give "admin" user the write permission: user_rw.user_permissions.add(write_permission) if __name__ == "__main__": main()
#!/usr/bin/python import django.contrib.auth.models as auth_models import django.contrib.contenttypes as contenttypes def get_password(): print "*" * 80 password = raw_input("Please enter string to use as admin password: ") check_password = None while check_password != password: print check_password = raw_input("Please re-enter for confirmation: ") return password def main(): # Read only user: # auth_models.User.objects.create_user('cube', 'toolkit_admin_readonly@localhost', '********') # Read/write user: cube_password = get_password() user_rw = auth_models.User.objects.create_user('admin', 'toolkit_admin@localhost', cube_password) # Create dummy ContentType: ct = contenttypes.models.ContentType.objects.get_or_create( model='', app_label='toolkit' )[0] # Create 'write' permission: write_permission = auth_models.Permission.objects.get_or_create( name='Write access to all toolkit content', content_type=ct, codename='write' )[0] # Give "admin" user the write permission: user_rw.user_permissions.add(write_permission) if __name__ == "__main__": main()
Remove cube credentials from import script
Remove cube credentials from import script
Python
agpl-3.0
BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit
#!/usr/bin/python import django.contrib.auth.models as auth_models import django.contrib.contenttypes as contenttypes def get_password(): print "*" * 80 password = raw_input("Please enter string to use as admin password: ") check_password = None while check_password != password: print check_password = raw_input("Please re-enter for confirmation: ") return password def main(): # Read only user: # auth_models.User.objects.create_user('cube', 'toolkit_admin_readonly@localhost', '********') # Read/write user: cube_password = get_password() user_rw = auth_models.User.objects.create_user('admin', 'toolkit_admin@localhost', cube_password) # Create dummy ContentType: ct = contenttypes.models.ContentType.objects.get_or_create( model='', app_label='toolkit' )[0] # Create 'write' permission: write_permission = auth_models.Permission.objects.get_or_create( name='Write access to all toolkit content', content_type=ct, codename='write' )[0] # Give "admin" user the write permission: user_rw.user_permissions.add(write_permission) if __name__ == "__main__": main()
Remove cube credentials from import script #!/usr/bin/python import django.contrib.auth.models as auth_models import django.contrib.contenttypes as contenttypes def main(): # Read only user: # auth_models.User.objects.create_user('cube', 'toolkit_admin_readonly@localhost', '***REMOVED***') # Read/write user: user_rw = auth_models.User.objects.create_user('admin', 'toolkit_admin@localhost', '***REMOVED***') # Create dummy ContentType: ct = contenttypes.models.ContentType.objects.get_or_create( model='', app_label='toolkit' )[0] # Create 'write' permission: write_permission = auth_models.Permission.objects.get_or_create( name='Write access to all toolkit content', content_type=ct, codename='write' )[0] # Give "admin" user the write permission: user_rw.user_permissions.add(write_permission) if __name__ == "__main__": main()
2f63f134d2c9aa67044eb176a3f81857279f107d
troposphere/utils.py
troposphere/utils.py
import time def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name): """Show and then tail the event log""" def tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: tail_print(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: tail_print(e) seen.add(e.event_id) time.sleep(5)
import time def _tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name, log_func=_tail_print, sleep_time=5): """Show and then tail the event log""" # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: log_func(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: log_func(e) seen.add(e.event_id) time.sleep(sleep_time)
Support a custom logging function and sleep time within tail
Support a custom logging function and sleep time within tail
Python
bsd-2-clause
mhahn/troposphere
import time def _tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name, log_func=_tail_print, sleep_time=5): """Show and then tail the event log""" # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: log_func(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: log_func(e) seen.add(e.event_id) time.sleep(sleep_time)
Support a custom logging function and sleep time within tail import time def get_events(conn, stackname): """Get the events in batches and return in chronological order""" next = None event_list = [] while 1: events = conn.describe_stack_events(stackname, next) event_list.append(events) if events.next_token is None: break next = events.next_token time.sleep(1) return reversed(sum(event_list, [])) def tail(conn, stack_name): """Show and then tail the event log""" def tail_print(e): print("%s %s %s" % (e.resource_status, e.resource_type, e.event_id)) # First dump the full list of events in chronological order and keep # track of the events we've seen already seen = set() initial_events = get_events(conn, stack_name) for e in initial_events: tail_print(e) seen.add(e.event_id) # Now keep looping through and dump the new events while 1: events = get_events(conn, stack_name) for e in events: if e.event_id not in seen: tail_print(e) seen.add(e.event_id) time.sleep(5)
9982e62981a7ec0fc7f05dcc8b5eabe11c65d2b3
anthology/representations.py
anthology/representations.py
"""Representation filters for API""" from flask import make_response, current_app from bson.json_util import dumps def output_bson(data, code, headers=None): """Makes Flask response with a BSON encoded body Copied from module `flask_restful.representations.json` """ settings = current_app.config.get('RESTFUL_JSON', {}) # If we're in debug mode, and the indent is not set, we set it to a # reasonable value here. Note that this won't override any existing value # that was set. We also set the "sort_keys" value. if current_app.debug: settings.setdefault('indent', 4) settings.setdefault('sort_keys', True) # always end the json dumps with a new line # see https://github.com/mitsuhiko/flask/pull/1262 dumped = dumps(data, **settings) + "\n" resp = make_response(dumped, code) resp.headers.extend(headers or {}) return resp
"""Representation filters for API""" from flask import make_response, current_app from bson.json_util import dumps def output_bson(data, code, headers=None): """Makes Flask response with a JSON encoded body. Response items are serialized from MongoDB BSON objects to JSON compatible format. Modified from module `flask_restful.representations.json` """ settings = current_app.config.get('RESTFUL_JSON', {}) # If we're in debug mode, and the indent is not set, we set it to a # reasonable value here. Note that this won't override any existing value # that was set. We also set the "sort_keys" value. if current_app.debug: settings.setdefault('indent', 4) settings.setdefault('sort_keys', True) # always end the json dumps with a new line # see https://github.com/mitsuhiko/flask/pull/1262 dumped = dumps(data, **settings) + "\n" resp = make_response(dumped, code) resp.headers.extend(headers or {}) return resp
Correct JSON/BSON terminology in docstrings
Correct JSON/BSON terminology in docstrings
Python
mit
surfmikko/anthology
"""Representation filters for API""" from flask import make_response, current_app from bson.json_util import dumps def output_bson(data, code, headers=None): """Makes Flask response with a JSON encoded body. Response items are serialized from MongoDB BSON objects to JSON compatible format. Modified from module `flask_restful.representations.json` """ settings = current_app.config.get('RESTFUL_JSON', {}) # If we're in debug mode, and the indent is not set, we set it to a # reasonable value here. Note that this won't override any existing value # that was set. We also set the "sort_keys" value. if current_app.debug: settings.setdefault('indent', 4) settings.setdefault('sort_keys', True) # always end the json dumps with a new line # see https://github.com/mitsuhiko/flask/pull/1262 dumped = dumps(data, **settings) + "\n" resp = make_response(dumped, code) resp.headers.extend(headers or {}) return resp
Correct JSON/BSON terminology in docstrings """Representation filters for API""" from flask import make_response, current_app from bson.json_util import dumps def output_bson(data, code, headers=None): """Makes Flask response with a BSON encoded body Copied from module `flask_restful.representations.json` """ settings = current_app.config.get('RESTFUL_JSON', {}) # If we're in debug mode, and the indent is not set, we set it to a # reasonable value here. Note that this won't override any existing value # that was set. We also set the "sort_keys" value. if current_app.debug: settings.setdefault('indent', 4) settings.setdefault('sort_keys', True) # always end the json dumps with a new line # see https://github.com/mitsuhiko/flask/pull/1262 dumped = dumps(data, **settings) + "\n" resp = make_response(dumped, code) resp.headers.extend(headers or {}) return resp
f7e85968a3256485276858ebfa9ef9cc538e2ee2
blimp/urls.py
blimp/urls.py
from django.conf.urls import patterns, include from django.views.generic import TemplateView from django.contrib import admin admin.autodiscover() urlpatterns = patterns( # Prefix '', (r'^admin/', include(admin.site.urls)), (r'^api/', include('blimp.router')), (r'', include('blimp.users.urls')), # Catch all URL (r'^', TemplateView.as_view(template_name='index.html')) )
from django.conf.urls import patterns, include from django.views.generic import TemplateView from django.contrib import admin admin.autodiscover() urlpatterns = patterns( # Prefix '', (r'^admin/', include(admin.site.urls)), (r'^api/', include('blimp.router')), (r'', include('blimp.users.urls')), # Catch all URL (r'^.*/$', TemplateView.as_view(template_name='index.html')) )
Fix catch all URL to allow APPEND_SLASH to work
Fix catch all URL to allow APPEND_SLASH to work
Python
agpl-3.0
jessamynsmith/boards-backend,jessamynsmith/boards-backend,GetBlimp/boards-backend
from django.conf.urls import patterns, include from django.views.generic import TemplateView from django.contrib import admin admin.autodiscover() urlpatterns = patterns( # Prefix '', (r'^admin/', include(admin.site.urls)), (r'^api/', include('blimp.router')), (r'', include('blimp.users.urls')), # Catch all URL (r'^.*/$', TemplateView.as_view(template_name='index.html')) )
Fix catch all URL to allow APPEND_SLASH to work from django.conf.urls import patterns, include from django.views.generic import TemplateView from django.contrib import admin admin.autodiscover() urlpatterns = patterns( # Prefix '', (r'^admin/', include(admin.site.urls)), (r'^api/', include('blimp.router')), (r'', include('blimp.users.urls')), # Catch all URL (r'^', TemplateView.as_view(template_name='index.html')) )
aa6e5e93406cc614d1935f0ee61f28dbc955c2c0
forms.py
forms.py
from flask_wtf import Form from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(Form): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('quotes', 'quotes'), ('numbers', 'numbers')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(Form): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('quotes', 'quotes'), ('numbers', 'numbers')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')]) limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
from flask_wtf import Form from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(Form): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(Form): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')]) limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
Make numbers the default comments style
Make numbers the default comments style
Python
mit
JamieMagee/reddit2kindle,JamieMagee/reddit2kindle
from flask_wtf import Form from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(Form): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(Form): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('numbers', 'numbers'), ('quotes', 'quotes')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')]) limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
Make numbers the default comments style from flask_wtf import Form from flask_wtf.csrf import CsrfProtect from wtforms import StringField, IntegerField, SelectField, BooleanField csrf = CsrfProtect() class Submission(Form): submission = StringField('Submission URL') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('quotes', 'quotes'), ('numbers', 'numbers')]) email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')]) class Subreddit(Form): subreddit = StringField('Subreddit') comments = BooleanField('Include comments') comments_style = SelectField('Comments style', choices=[('quotes', 'quotes'), ('numbers', 'numbers')]) time = SelectField('Time period', choices=[('all', 'all'), ('year', 'year'), ('month', 'month'), ('week', 'week'), ('day', 'day'), ('hour', 'hour')]) limit = IntegerField('Number of posts') email = StringField('Kindle email address') kindle_address = SelectField('Kindle address', choices=[('normal', '@kindle.com'), ('free', '@free.kindle.com')])
7ceba1f2b83628a2b89ffbdd30e435970e8c5e91
tests/test_kafka_streams.py
tests/test_kafka_streams.py
""" Test the top-level Kafka Streams class """ import pytest from winton_kafka_streams import kafka_config from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError from winton_kafka_streams.kafka_streams import KafkaStreams from winton_kafka_streams.processor.processor import BaseProcessor from winton_kafka_streams.processor.topology import TopologyBuilder class MyTestProcessor(BaseProcessor): pass def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError(): kafka_config.NUM_STREAM_THREADS = 0 topology_builder = TopologyBuilder() topology_builder.source('my-source', ['my-input-topic-1']) topology_builder.processor('my-processor', MyTestProcessor, 'my-source') topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor') topology = topology_builder.build() kafka_streams = KafkaStreams(topology, kafka_config) kafka_streams.start() with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'): kafka_streams.start()
""" Test the top-level Kafka Streams class """ import pytest from winton_kafka_streams import kafka_config from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError from winton_kafka_streams.kafka_streams import KafkaStreams from winton_kafka_streams.processor.processor import BaseProcessor from winton_kafka_streams.processor.topology import TopologyBuilder class MyTestProcessor(BaseProcessor): pass def test__given__stream_already_started__when__call_start_again__then__raise_error(): kafka_config.NUM_STREAM_THREADS = 0 topology_builder = TopologyBuilder() topology_builder.source('my-source', ['my-input-topic-1']) topology_builder.processor('my-processor', MyTestProcessor, 'my-source') topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor') topology = topology_builder.build() kafka_streams = KafkaStreams(topology, kafka_config) kafka_streams.start() with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'): kafka_streams.start()
Use more Pythonic name for test.
Use more Pythonic name for test.
Python
apache-2.0
wintoncode/winton-kafka-streams
""" Test the top-level Kafka Streams class """ import pytest from winton_kafka_streams import kafka_config from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError from winton_kafka_streams.kafka_streams import KafkaStreams from winton_kafka_streams.processor.processor import BaseProcessor from winton_kafka_streams.processor.topology import TopologyBuilder class MyTestProcessor(BaseProcessor): pass def test__given__stream_already_started__when__call_start_again__then__raise_error(): kafka_config.NUM_STREAM_THREADS = 0 topology_builder = TopologyBuilder() topology_builder.source('my-source', ['my-input-topic-1']) topology_builder.processor('my-processor', MyTestProcessor, 'my-source') topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor') topology = topology_builder.build() kafka_streams = KafkaStreams(topology, kafka_config) kafka_streams.start() with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'): kafka_streams.start()
Use more Pythonic name for test. """ Test the top-level Kafka Streams class """ import pytest from winton_kafka_streams import kafka_config from winton_kafka_streams.errors.kafka_streams_error import KafkaStreamsError from winton_kafka_streams.kafka_streams import KafkaStreams from winton_kafka_streams.processor.processor import BaseProcessor from winton_kafka_streams.processor.topology import TopologyBuilder class MyTestProcessor(BaseProcessor): pass def test_Given_StreamAlreadyStarted_When_CallStartAgain_Then_RaiseError(): kafka_config.NUM_STREAM_THREADS = 0 topology_builder = TopologyBuilder() topology_builder.source('my-source', ['my-input-topic-1']) topology_builder.processor('my-processor', MyTestProcessor, 'my-source') topology_builder.sink('my-sink', 'my-output-topic-1', 'my-processor') topology = topology_builder.build() kafka_streams = KafkaStreams(topology, kafka_config) kafka_streams.start() with pytest.raises(KafkaStreamsError, message='KafkaStreams already started.'): kafka_streams.start()
da8efb34fe00f4c625c6ab7d3cf5651193d972d0
mopidy/backends/__init__.py
mopidy/backends/__init__.py
import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend def add(self, track, at_position=None): raise NotImplementedError class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
Add add method to BaseCurrentPlaylistController
Add add method to BaseCurrentPlaylistController
Python
apache-2.0
priestd09/mopidy,jcass77/mopidy,mokieyue/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,mopidy/mopidy,bencevans/mopidy,tkem/mopidy,quartz55/mopidy,rawdlite/mopidy,tkem/mopidy,quartz55/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,adamcik/mopidy,woutervanwijk/mopidy,bencevans/mopidy,pacificIT/mopidy,hkariti/mopidy,bacontext/mopidy,abarisain/mopidy,SuperStarPL/mopidy,adamcik/mopidy,vrs01/mopidy,jcass77/mopidy,priestd09/mopidy,diandiankan/mopidy,jmarsik/mopidy,ZenithDK/mopidy,swak/mopidy,hkariti/mopidy,ZenithDK/mopidy,jmarsik/mopidy,ali/mopidy,quartz55/mopidy,SuperStarPL/mopidy,jodal/mopidy,ali/mopidy,rawdlite/mopidy,diandiankan/mopidy,rawdlite/mopidy,dbrgn/mopidy,quartz55/mopidy,tkem/mopidy,mopidy/mopidy,mokieyue/mopidy,swak/mopidy,tkem/mopidy,mokieyue/mopidy,liamw9534/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,kingosticks/mopidy,hkariti/mopidy,jodal/mopidy,swak/mopidy,bencevans/mopidy,jmarsik/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,pacificIT/mopidy,bacontext/mopidy,dbrgn/mopidy,rawdlite/mopidy,priestd09/mopidy,ali/mopidy,hkariti/mopidy,liamw9534/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,dbrgn/mopidy,adamcik/mopidy,glogiotatidis/mopidy,abarisain/mopidy,diandiankan/mopidy,dbrgn/mopidy,pacificIT/mopidy,ali/mopidy,bacontext/mopidy,mopidy/mopidy,pacificIT/mopidy,bacontext/mopidy,swak/mopidy,vrs01/mopidy,vrs01/mopidy,bencevans/mopidy,ZenithDK/mopidy,vrs01/mopidy,jcass77/mopidy,kingosticks/mopidy,jodal/mopidy,kingosticks/mopidy
import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend def add(self, track, at_position=None): raise NotImplementedError class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
Add add method to BaseCurrentPlaylistController import logging import time from mopidy.exceptions import MpdNotImplemented from mopidy.models import Playlist logger = logging.getLogger('backends.base') class BaseBackend(object): current_playlist = None library = None playback = None stored_playlists = None uri_handlers = [] class BaseCurrentPlaylistController(object): def __init__(self, backend): self.backend = backend class BasePlaybackController(object): PAUSED = 'paused' PLAYING = 'playing' STOPPED = 'stopped' def __init__(self, backend): self.backend = backend self.state = self.STOPPED self.current_track = None self.playlist_position = None def play(self, id=None, position=None): raise NotImplementedError def next(self): raise NotImplementedError
fa6402472e30f59e67acf45d9faba632a3efc5e8
raiden/constants.py
raiden/constants.py
# -*- coding: utf-8 -*- UINT64_MAX = 2 ** 64 - 1 UINT64_MIN = 0 INT64_MAX = 2 ** 63 - 1 INT64_MIN = -(2 ** 63) UINT256_MAX = 2 ** 256 - 1 ROPSTEN_REGISTRY_ADDRESS = 'bbc60aa23059b039407ac008bd0b7e902890d382' ROPSTEN_DISCOVERY_ADDRESS = '524b7dcacac3055bd42fc03b006e9fdcb607e2be' MINUTE_SEC = 60 MINUTE_MS = 60 * 1000 NETTINGCHANNEL_SETTLE_TIMEOUT_MIN = 6 # TODO: add this as an attribute of the transport class UDP_MAX_MESSAGE_SIZE = 1200
# -*- coding: utf-8 -*- UINT64_MAX = 2 ** 64 - 1 UINT64_MIN = 0 INT64_MAX = 2 ** 63 - 1 INT64_MIN = -(2 ** 63) UINT256_MAX = 2 ** 256 - 1 # Deployed to Ropsten revival on 2017-06-19 from commit 2677298a69c1b1f35b9ab26beafe457acfdcc0ee ROPSTEN_REGISTRY_ADDRESS = 'aff1f958c69a6820b08a02549ff9041629ae8257' ROPSTEN_DISCOVERY_ADDRESS = 'cf56165f4f6e8ec38bb463854c1fe28a5d320f4f' MINUTE_SEC = 60 MINUTE_MS = 60 * 1000 NETTINGCHANNEL_SETTLE_TIMEOUT_MIN = 6 # TODO: add this as an attribute of the transport class UDP_MAX_MESSAGE_SIZE = 1200
Update pre-deployed Ropsten contract addresses
Update pre-deployed Ropsten contract addresses
Python
mit
hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,hackaugusto/raiden
# -*- coding: utf-8 -*- UINT64_MAX = 2 ** 64 - 1 UINT64_MIN = 0 INT64_MAX = 2 ** 63 - 1 INT64_MIN = -(2 ** 63) UINT256_MAX = 2 ** 256 - 1 # Deployed to Ropsten revival on 2017-06-19 from commit 2677298a69c1b1f35b9ab26beafe457acfdcc0ee ROPSTEN_REGISTRY_ADDRESS = 'aff1f958c69a6820b08a02549ff9041629ae8257' ROPSTEN_DISCOVERY_ADDRESS = 'cf56165f4f6e8ec38bb463854c1fe28a5d320f4f' MINUTE_SEC = 60 MINUTE_MS = 60 * 1000 NETTINGCHANNEL_SETTLE_TIMEOUT_MIN = 6 # TODO: add this as an attribute of the transport class UDP_MAX_MESSAGE_SIZE = 1200
Update pre-deployed Ropsten contract addresses # -*- coding: utf-8 -*- UINT64_MAX = 2 ** 64 - 1 UINT64_MIN = 0 INT64_MAX = 2 ** 63 - 1 INT64_MIN = -(2 ** 63) UINT256_MAX = 2 ** 256 - 1 ROPSTEN_REGISTRY_ADDRESS = 'bbc60aa23059b039407ac008bd0b7e902890d382' ROPSTEN_DISCOVERY_ADDRESS = '524b7dcacac3055bd42fc03b006e9fdcb607e2be' MINUTE_SEC = 60 MINUTE_MS = 60 * 1000 NETTINGCHANNEL_SETTLE_TIMEOUT_MIN = 6 # TODO: add this as an attribute of the transport class UDP_MAX_MESSAGE_SIZE = 1200
3d7707d20c299358476cca01babf14c7cacddb50
smaug/tests/fullstack/test_providers.py
smaug/tests/fullstack/test_providers.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from smaug.tests.fullstack import smaug_base class ProvidersTest(smaug_base.SmaugBaseTest): """Test Providers operation""" provider_id = u"cf56bd3e-97a7-4078-b6d5-f36246333fd9" def test_providers_list(self): provider_res = self.smaug_client.providers.list() self.assertEqual(1, len(provider_res)) def test_provider_get(self): provider_res = self.smaug_client.providers.get(self.provider_id) self.assertEqual("OS Infra Provider", provider_res.name)
Add fullstack tests of the resource providers
Add fullstack tests of the resource providers Change-Id: Ie4f769de3060fdb279320637ba965d5b452e2a2d Closes-Bug: #1578889
Python
apache-2.0
openstack/smaug,openstack/smaug
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from smaug.tests.fullstack import smaug_base class ProvidersTest(smaug_base.SmaugBaseTest): """Test Providers operation""" provider_id = u"cf56bd3e-97a7-4078-b6d5-f36246333fd9" def test_providers_list(self): provider_res = self.smaug_client.providers.list() self.assertEqual(1, len(provider_res)) def test_provider_get(self): provider_res = self.smaug_client.providers.get(self.provider_id) self.assertEqual("OS Infra Provider", provider_res.name)
Add fullstack tests of the resource providers Change-Id: Ie4f769de3060fdb279320637ba965d5b452e2a2d Closes-Bug: #1578889
9c2075f13e2aa8ff7a5c4644208e8de17ebefbab
finding-geodesic-basins-with-scipy.py
finding-geodesic-basins-with-scipy.py
# IPython log file import numpy as np from scipy import sparse from skimage import graph from skimage.graph import _mcp image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]]) mcp = graph.MCP_Geometric(image) destinations = [[0, 0], [3, 3]] costs, traceback = mcp.find_costs(destinations) offsets = _mcp.make_offsets(2, True) indices = np.indices(traceback.shape) offsets.append([0, 0]) offsets_arr = np.array(offsets) offset_to_neighbor = offsets_arr[traceback] neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1)) ids = np.arange(traceback.size).reshape(image.shape) neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape) g = sparse.coo_matrix(( np.ones(traceback.size), (ids.flat, neighbor_ids.flat), )).tocsr() basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4)) print(basins)
# IPython log file # See https://stackoverflow.com/questions/62135639/mcp-geometrics-for-calculating-marketsheds/62144556 import numpy as np from scipy import sparse from skimage import graph from skimage.graph import _mcp image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]]) mcp = graph.MCP_Geometric(image) destinations = [[0, 0], [3, 3]] costs, traceback = mcp.find_costs(destinations) offsets = _mcp.make_offsets(2, True) indices = np.indices(traceback.shape) offsets.append([0, 0]) offsets_arr = np.array(offsets) offset_to_neighbor = offsets_arr[traceback] neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1)) ids = np.arange(traceback.size).reshape(image.shape) neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape) g = sparse.coo_matrix(( np.ones(traceback.size), (ids.flat, neighbor_ids.flat), )).tocsr() basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4)) print(basins)
Add link to SO question
Add link to SO question
Python
bsd-3-clause
jni/useful-histories
# IPython log file # See https://stackoverflow.com/questions/62135639/mcp-geometrics-for-calculating-marketsheds/62144556 import numpy as np from scipy import sparse from skimage import graph from skimage.graph import _mcp image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]]) mcp = graph.MCP_Geometric(image) destinations = [[0, 0], [3, 3]] costs, traceback = mcp.find_costs(destinations) offsets = _mcp.make_offsets(2, True) indices = np.indices(traceback.shape) offsets.append([0, 0]) offsets_arr = np.array(offsets) offset_to_neighbor = offsets_arr[traceback] neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1)) ids = np.arange(traceback.size).reshape(image.shape) neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape) g = sparse.coo_matrix(( np.ones(traceback.size), (ids.flat, neighbor_ids.flat), )).tocsr() basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4)) print(basins)
Add link to SO question # IPython log file import numpy as np from scipy import sparse from skimage import graph from skimage.graph import _mcp image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]]) mcp = graph.MCP_Geometric(image) destinations = [[0, 0], [3, 3]] costs, traceback = mcp.find_costs(destinations) offsets = _mcp.make_offsets(2, True) indices = np.indices(traceback.shape) offsets.append([0, 0]) offsets_arr = np.array(offsets) offset_to_neighbor = offsets_arr[traceback] neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1)) ids = np.arange(traceback.size).reshape(image.shape) neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape) g = sparse.coo_matrix(( np.ones(traceback.size), (ids.flat, neighbor_ids.flat), )).tocsr() basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4)) print(basins)
dca8802b77a4682d9f6a09e68cdc807736e830a8
fmn/rules/buidsys.py
fmn/rules/buidsys.py
def buildsys_build_state_change(config, message): """ Buildsys: build changed state (started, failed, finished) TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.build.state.change') def buildsys_package_list_change(config, message): """ Buildsys: Package list changed TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.package.list.change') def buildsys_repo_done(config, message): """ Buildsys: Building a repo has finished TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.repo.done') def buildsys_repo_init(config, message): """ Buildsys: Building a repo has started TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.repo.init') def buildsys_tag(config, message): """ Buildsys: A package has been tagged TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.tag') def buildsys_untag(config, message): """ Buildsys: A package has been untagged TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.untag')
Add filters for the buildsystem messages
Add filters for the buildsystem messages
Python
lgpl-2.1
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
def buildsys_build_state_change(config, message): """ Buildsys: build changed state (started, failed, finished) TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.build.state.change') def buildsys_package_list_change(config, message): """ Buildsys: Package list changed TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.package.list.change') def buildsys_repo_done(config, message): """ Buildsys: Building a repo has finished TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.repo.done') def buildsys_repo_init(config, message): """ Buildsys: Building a repo has started TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.repo.init') def buildsys_tag(config, message): """ Buildsys: A package has been tagged TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.tag') def buildsys_untag(config, message): """ Buildsys: A package has been untagged TODO description for the web interface goes here """ return message['topic'].endswith('buildsys.untag')
Add filters for the buildsystem messages
3595bffb71f415999847f323af36737a41ce4b56
main.py
main.py
from flask import Flask, request from pprint import pprint import json app = Flask(__name__) lastCommit = "No recorded commits!" @app.route("/") def hello(): return "IntegralGit: continuous integration via GitHub" @app.route("/update", methods=["POST"]) def update(): print json.dumps(request.form['payload']) return if __name__=="__main__": app.run(host="0.0.0.0", debug=True)
from flask import Flask, request from pprint import pprint import json app = Flask(__name__) lastCommit = "No recorded commits!" @app.route("/") def hello(): return "IntegralGit: continuous integration via GitHub" @app.route("/latest") def latest(): return lastCommit @app.route("/update", methods=["POST"]) def update(): payload = json.dumps(request.form['payload']) lastCommit = payload['commits'][0]['message'] return "" if __name__=="__main__": app.run(host="0.0.0.0", debug=True)
Add code to show last commit message
Add code to show last commit message
Python
mit
LinuxMercedes/IntegralGit,LinuxMercedes/IntegralGit
from flask import Flask, request from pprint import pprint import json app = Flask(__name__) lastCommit = "No recorded commits!" @app.route("/") def hello(): return "IntegralGit: continuous integration via GitHub" @app.route("/latest") def latest(): return lastCommit @app.route("/update", methods=["POST"]) def update(): payload = json.dumps(request.form['payload']) lastCommit = payload['commits'][0]['message'] return "" if __name__=="__main__": app.run(host="0.0.0.0", debug=True)
Add code to show last commit message from flask import Flask, request from pprint import pprint import json app = Flask(__name__) lastCommit = "No recorded commits!" @app.route("/") def hello(): return "IntegralGit: continuous integration via GitHub" @app.route("/update", methods=["POST"]) def update(): print json.dumps(request.form['payload']) return if __name__=="__main__": app.run(host="0.0.0.0", debug=True)
6072022e2debeb4dcd75e4969bd2beb16bac8827
source/sqlserver_ado/fields.py
source/sqlserver_ado/fields.py
"""This module provides SQL Server specific fields for Django models.""" from django.db.models import AutoField, ForeignKey, IntegerField class BigAutoField(AutoField): """A bigint IDENTITY field""" def get_internal_type(self): return "BigAutoField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise exceptions.ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value) class BigForeignKey(ForeignKey): """A ForeignKey field that points to a BigAutoField or BigIntegerField""" def db_type(self): return BigIntegerField().db_type() class BigIntegerField(IntegerField): """A BigInteger field, until Django ticket #399 lands (if ever.)""" def get_internal_type(self): return "BigIntegerField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise exceptions.ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value)
"""This module provides SQL Server specific fields for Django models.""" from django.db.models import AutoField, ForeignKey, IntegerField from django.forms import ValidationError class BigAutoField(AutoField): """A bigint IDENTITY field""" def get_internal_type(self): return "BigAutoField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value) class BigForeignKey(ForeignKey): """A ForeignKey field that points to a BigAutoField or BigIntegerField""" def db_type(self): return BigIntegerField().db_type() class BigIntegerField(IntegerField): """A BigInteger field, until Django ticket #399 lands (if ever.)""" def get_internal_type(self): return "BigIntegerField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value)
Fix import error for custom Field validation
Fix import error for custom Field validation
Python
mit
theoriginalgri/django-mssql,theoriginalgri/django-mssql
"""This module provides SQL Server specific fields for Django models.""" from django.db.models import AutoField, ForeignKey, IntegerField from django.forms import ValidationError class BigAutoField(AutoField): """A bigint IDENTITY field""" def get_internal_type(self): return "BigAutoField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value) class BigForeignKey(ForeignKey): """A ForeignKey field that points to a BigAutoField or BigIntegerField""" def db_type(self): return BigIntegerField().db_type() class BigIntegerField(IntegerField): """A BigInteger field, until Django ticket #399 lands (if ever.)""" def get_internal_type(self): return "BigIntegerField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value)
Fix import error for custom Field validation """This module provides SQL Server specific fields for Django models.""" from django.db.models import AutoField, ForeignKey, IntegerField class BigAutoField(AutoField): """A bigint IDENTITY field""" def get_internal_type(self): return "BigAutoField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise exceptions.ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value) class BigForeignKey(ForeignKey): """A ForeignKey field that points to a BigAutoField or BigIntegerField""" def db_type(self): return BigIntegerField().db_type() class BigIntegerField(IntegerField): """A BigInteger field, until Django ticket #399 lands (if ever.)""" def get_internal_type(self): return "BigIntegerField" def to_python(self, value): if value is None: return value try: return long(value) except (TypeError, ValueError): raise exceptions.ValidationError( _("This value must be an long.")) def get_db_prep_value(self, value): if value is None: return None return long(value)
a27b03a89af6442dc8e1be3d310a8fc046a98ed4
foampy/tests.py
foampy/tests.py
""" Tests for foamPy. """ from .core import * from .dictionaries import * from .types import * from .foil import *
"""Tests for foamPy.""" from .core import * from .dictionaries import * from .types import * from .foil import * def test_load_all_torque_drag(): """Test the `load_all_torque_drag` function.""" t, torque, drag = load_all_torque_drag(casedir="test") assert t.max() == 4.0
Add test for loading all torque and drag data
Add test for loading all torque and drag data
Python
mit
petebachant/foamPy,petebachant/foamPy,petebachant/foamPy
"""Tests for foamPy.""" from .core import * from .dictionaries import * from .types import * from .foil import * def test_load_all_torque_drag(): """Test the `load_all_torque_drag` function.""" t, torque, drag = load_all_torque_drag(casedir="test") assert t.max() == 4.0
Add test for loading all torque and drag data """ Tests for foamPy. """ from .core import * from .dictionaries import * from .types import * from .foil import *
3295b30ba3e243801a520adff332663dbe490cf9
tools/mini_spectrum.py
tools/mini_spectrum.py
# -*- encoding: utf-8 -*- # JN 2016-02-16 """ Plot a spectrum from the first 1000 records of data """ import sys import scipy.signal as sig import matplotlib.pyplot as mpl from combinato import NcsFile, DefaultFilter def plot_spectrum(fname): fid = NcsFile(fname) rawdata = fid.read(0, 1000) data = rawdata * (1e6 * fid.header['ADBitVolts']) fs = 1/fid.timestep my_filter = DefaultFilter(fid.timestep) filt_data = my_filter.filter_extract(data) [f, p] = sig.welch(data, fs, nperseg=32768) [f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768) fig = mpl.figure() plot = fig.add_subplot(1, 1, 1) plot.plot(f, p, label='Unfiltered') plot.plot(f_filt, p_filt, label='Filtered') plot.set_yscale('log') plot.legend() plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$') plot.set_xlabel(r'$\mathrm{Hz}$') def main(): plot_spectrum(sys.argv[1]) mpl.show() if __name__ == '__main__': main()
Add small plot of power spectral density
Add small plot of power spectral density
Python
mit
jniediek/combinato
# -*- encoding: utf-8 -*- # JN 2016-02-16 """ Plot a spectrum from the first 1000 records of data """ import sys import scipy.signal as sig import matplotlib.pyplot as mpl from combinato import NcsFile, DefaultFilter def plot_spectrum(fname): fid = NcsFile(fname) rawdata = fid.read(0, 1000) data = rawdata * (1e6 * fid.header['ADBitVolts']) fs = 1/fid.timestep my_filter = DefaultFilter(fid.timestep) filt_data = my_filter.filter_extract(data) [f, p] = sig.welch(data, fs, nperseg=32768) [f_filt, p_filt] = sig.welch(filt_data, fs, nperseg=32768) fig = mpl.figure() plot = fig.add_subplot(1, 1, 1) plot.plot(f, p, label='Unfiltered') plot.plot(f_filt, p_filt, label='Filtered') plot.set_yscale('log') plot.legend() plot.set_ylabel(r'$\mu\mathrm{V}^2/\mathrm{Hz}$') plot.set_xlabel(r'$\mathrm{Hz}$') def main(): plot_spectrum(sys.argv[1]) mpl.show() if __name__ == '__main__': main()
Add small plot of power spectral density
53e1ff21bb219495f1b99f84dbb31624fdd35231
lpthw/ex33.py
lpthw/ex33.py
#i = 0 #numbers = [] #while i < 6: # print "At the top i is %d" % i # numbers.append(i) # # i += 1 # print "Numbers now: ", numbers # print "At the bottom i is %d" % i # # #print "The numbers: " # #for num in numbers: # print num #Study Drills print "What's the limit of the list?" a = raw_input("> ") def list_numbers(a): """This function might add numbers to a list?""" i = 0 numbers = [] while i < a: print "At the top i is %d" % i numbers.append(i) i += 1 print "Numbers now: ", numbers print "At the bottom i is %d" % i print "The numbers: " for num in numbers: print num return list_numbers(a)
#i = 0 #numbers = [] #while i < 6: # print "At the top i is %d" % i # numbers.append(i) # # i += 1 # print "Numbers now: ", numbers # print "At the bottom i is %d" % i # # #print "The numbers: " # #for num in numbers: # print num #Study Drills print "What's the limit of the list?" a = int(raw_input("> ")) def list_numbers(a): """This function might add numbers to a list?""" i = 0 numbers = [] while i < a: print "At the top i is %d" % i numbers.append(i) i += 1 print "Numbers now: ", numbers print "At the bottom i is %d" % i print "The numbers: " for num in numbers: print num return list_numbers(a)
Fix that crazy error that would cause enless looping...
Fix that crazy error that would cause enless looping...
Python
mit
jaredmanning/learning,jaredmanning/learning
#i = 0 #numbers = [] #while i < 6: # print "At the top i is %d" % i # numbers.append(i) # # i += 1 # print "Numbers now: ", numbers # print "At the bottom i is %d" % i # # #print "The numbers: " # #for num in numbers: # print num #Study Drills print "What's the limit of the list?" a = int(raw_input("> ")) def list_numbers(a): """This function might add numbers to a list?""" i = 0 numbers = [] while i < a: print "At the top i is %d" % i numbers.append(i) i += 1 print "Numbers now: ", numbers print "At the bottom i is %d" % i print "The numbers: " for num in numbers: print num return list_numbers(a)
Fix that crazy error that would cause enless looping... #i = 0 #numbers = [] #while i < 6: # print "At the top i is %d" % i # numbers.append(i) # # i += 1 # print "Numbers now: ", numbers # print "At the bottom i is %d" % i # # #print "The numbers: " # #for num in numbers: # print num #Study Drills print "What's the limit of the list?" a = raw_input("> ") def list_numbers(a): """This function might add numbers to a list?""" i = 0 numbers = [] while i < a: print "At the top i is %d" % i numbers.append(i) i += 1 print "Numbers now: ", numbers print "At the bottom i is %d" % i print "The numbers: " for num in numbers: print num return list_numbers(a)
31921ce5ca7ccbaa2db8b8fa11b2b9a6caa14aeb
daisyproducer/settings.py
daisyproducer/settings.py
from settings_common import * PACKAGE_VERSION = "0.5" DEBUG = TEMPLATE_DEBUG = True DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline') EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp') SERVE_STATIC_FILES = True # the following is an idea from https://code.djangoproject.com/wiki/SplitSettings # We have both local settings and common settings. They are used as follows: # - common settings are shared data between normal settings and unit test settings # - local settings are used on productive servers to keep the local # settings such as db passwords, etc out of version control try: from settings_local import * except ImportError: pass
from settings_common import * PACKAGE_VERSION = "0.5" DEBUG = TEMPLATE_DEBUG = True DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline') EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp') SERVE_STATIC_FILES = True # the following is an idea from https://code.djangoproject.com/wiki/SplitSettings # We have both local settings and common settings. They are used as follows: # - common settings are shared data between normal settings and unit test settings # - local settings are used on productive servers to keep the local # settings such as db passwords, etc out of version control try: from settings_local import * except ImportError: pass if SERVE_STATIC_FILES: INSTALLED_APPS += ('django.contrib.staticfiles',)
Use django.contrib.staticfiles when running locally
Use django.contrib.staticfiles when running locally so that the admin interface uses the proper css when running on the dev machine
Python
agpl-3.0
sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer
from settings_common import * PACKAGE_VERSION = "0.5" DEBUG = TEMPLATE_DEBUG = True DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline') EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp') SERVE_STATIC_FILES = True # the following is an idea from https://code.djangoproject.com/wiki/SplitSettings # We have both local settings and common settings. They are used as follows: # - common settings are shared data between normal settings and unit test settings # - local settings are used on productive servers to keep the local # settings such as db passwords, etc out of version control try: from settings_local import * except ImportError: pass if SERVE_STATIC_FILES: INSTALLED_APPS += ('django.contrib.staticfiles',)
Use django.contrib.staticfiles when running locally so that the admin interface uses the proper css when running on the dev machine from settings_common import * PACKAGE_VERSION = "0.5" DEBUG = TEMPLATE_DEBUG = True DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline') EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp') SERVE_STATIC_FILES = True # the following is an idea from https://code.djangoproject.com/wiki/SplitSettings # We have both local settings and common settings. They are used as follows: # - common settings are shared data between normal settings and unit test settings # - local settings are used on productive servers to keep the local # settings such as db passwords, etc out of version control try: from settings_local import * except ImportError: pass