commit
stringlengths
40
40
old_file
stringlengths
5
117
new_file
stringlengths
5
117
old_contents
stringlengths
0
1.93k
new_contents
stringlengths
19
3.3k
subject
stringlengths
17
320
message
stringlengths
18
3.28k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
42.4k
completion
stringlengths
19
3.3k
prompt
stringlengths
21
3.65k
e1e7189bbe859d6dfa6f883d2ff46ff1faed4842
scrape.py
scrape.py
import scholarly import requests _SEARCH = '/scholar?q=\"{}\"&as_ylo={}&as_yhi={}' def search(query, start_year, end_year): """Search by scholar query and return a generator of Publication objects""" soup = scholarly._get_soup( _SEARCH.format(requests.utils.quote(query), str(start_year), str(end_year))) return scholarly._search_scholar_soup(soup) if __name__ == '__main__': s = search("Cure Alzheimer's Fund", 2015, 2015) num = 0 for x in s: x.fill() stuff = ['title', 'author', 'journal', 'volume', 'issue'] for thing in stuff: if thing in x.bib: print("{}: {}".format(thing, x.bib[thing])) num += 1 print("Number of results:", num)
import scholarly import requests _EXACT_SEARCH = '/scholar?q="{}"' _START_YEAR = '&as_ylo={}' _END_YEAR = '&as_yhi={}' def search(query, exact=True, start_year=None, end_year=None): """Search by scholar query and return a generator of Publication objects""" url = _EXACT_SEARCH.format(requests.utils.quote(query)) if start_year: url += _START_YEAR.format(start_year) if end_year: url += _END_YEAR.format(end_year) soup = scholarly._get_soup(url) return scholarly._search_scholar_soup(soup) if __name__ == '__main__': s = search("Cure Alzheimer's Fund", start_year=2015, end_year=2015) num = 0 for x in s: x.fill() stuff = ['title', 'author', 'journal', 'volume', 'issue'] for thing in stuff: if thing in x.bib: print("{}: {}".format(thing, x.bib[thing])) num += 1 print("Number of results:", num)
Make year range arguments optional in search
Make year range arguments optional in search
Python
mit
Spferical/cure-alzheimers-fund-tracker,Spferical/cure-alzheimers-fund-tracker,Spferical/cure-alzheimers-fund-tracker
import scholarly import requests _EXACT_SEARCH = '/scholar?q="{}"' _START_YEAR = '&as_ylo={}' _END_YEAR = '&as_yhi={}' def search(query, exact=True, start_year=None, end_year=None): """Search by scholar query and return a generator of Publication objects""" url = _EXACT_SEARCH.format(requests.utils.quote(query)) if start_year: url += _START_YEAR.format(start_year) if end_year: url += _END_YEAR.format(end_year) soup = scholarly._get_soup(url) return scholarly._search_scholar_soup(soup) if __name__ == '__main__': s = search("Cure Alzheimer's Fund", start_year=2015, end_year=2015) num = 0 for x in s: x.fill() stuff = ['title', 'author', 'journal', 'volume', 'issue'] for thing in stuff: if thing in x.bib: print("{}: {}".format(thing, x.bib[thing])) num += 1 print("Number of results:", num)
Make year range arguments optional in search import scholarly import requests _SEARCH = '/scholar?q=\"{}\"&as_ylo={}&as_yhi={}' def search(query, start_year, end_year): """Search by scholar query and return a generator of Publication objects""" soup = scholarly._get_soup( _SEARCH.format(requests.utils.quote(query), str(start_year), str(end_year))) return scholarly._search_scholar_soup(soup) if __name__ == '__main__': s = search("Cure Alzheimer's Fund", 2015, 2015) num = 0 for x in s: x.fill() stuff = ['title', 'author', 'journal', 'volume', 'issue'] for thing in stuff: if thing in x.bib: print("{}: {}".format(thing, x.bib[thing])) num += 1 print("Number of results:", num)
9139a2efc445887f59b99052f1ffd05c98ee2c72
tests/test_reporter.py
tests/test_reporter.py
"""Test the Reporter base class.""" import pytest @pytest.fixture def klass(): """Return the CUT.""" from agile_analytics.reporters import Reporter return Reporter def test_klass(klass): """Ensure the CUT exists.""" assert klass @pytest.fixture def instance(klass, days_ago): """Return a pre-init'd CUT.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) return k def test_init(klass, days_ago): """Verify we can init it correctly.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) assert k assert k.start_date == a_month_ago assert k.end_date == now def test_valid_start_date(klass, days_ago): """Verify valid_start_date returns whatever is passed.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) assert now == k.valid_start_date(now) def test_valid_end_date(klass, days_ago): """Verify valid_end_date returns whatever is passed.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) assert a_month_ago == k.valid_end_date(a_month_ago) def test_filter_issues(instance): """Verify that filter_issues raises NotImplementedError.""" with pytest.raises(NotImplementedError): instance.filter_issues([]) def test_report_on(instance): """Verify that report_on raises NotImplementedError.""" with pytest.raises(NotImplementedError): instance.report_on([])
Add tests for base class.
Add tests for base class.
Python
mit
cmheisel/agile-analytics
"""Test the Reporter base class.""" import pytest @pytest.fixture def klass(): """Return the CUT.""" from agile_analytics.reporters import Reporter return Reporter def test_klass(klass): """Ensure the CUT exists.""" assert klass @pytest.fixture def instance(klass, days_ago): """Return a pre-init'd CUT.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) return k def test_init(klass, days_ago): """Verify we can init it correctly.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) assert k assert k.start_date == a_month_ago assert k.end_date == now def test_valid_start_date(klass, days_ago): """Verify valid_start_date returns whatever is passed.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) assert now == k.valid_start_date(now) def test_valid_end_date(klass, days_ago): """Verify valid_end_date returns whatever is passed.""" now = days_ago(0) a_month_ago = days_ago(30) k = klass(title="Foo", start_date=a_month_ago, end_date=now) assert a_month_ago == k.valid_end_date(a_month_ago) def test_filter_issues(instance): """Verify that filter_issues raises NotImplementedError.""" with pytest.raises(NotImplementedError): instance.filter_issues([]) def test_report_on(instance): """Verify that report_on raises NotImplementedError.""" with pytest.raises(NotImplementedError): instance.report_on([])
Add tests for base class.
fa77d7d83ed9150670ac374f1494b38f2338217a
migrations/versions/0028_add_default_permissions.py
migrations/versions/0028_add_default_permissions.py
"""empty message Revision ID: 0028_add_default_permissions Revises: 0027_add_service_permission Create Date: 2016-02-26 10:33:20.536362 """ # revision identifiers, used by Alembic. revision = '0028_add_default_permissions' down_revision = '0027_add_service_permission' import uuid from datetime import datetime from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() user_services = conn.execute("SELECT * FROM user_to_service").fetchall() for entry in user_services: id_ = uuid.uuid4() created_at = datetime.now().isoformat().replace('T', ' ') conn.execute(( "INSERT INTO permissions (id, user_id, service_id, permission, created_at)" " VALUES ('{}', '{}', '{}', 'manage_service', '{}')").format(id_, entry[0], entry[1], created_at)) id_ = uuid.uuid4() conn.execute(( "INSERT INTO permissions (id, user_id, service_id, permission, created_at)" " VALUES ('{}', '{}', '{}', 'send_messages', '{}')").format(id_, entry[0], entry[1], created_at)) id_ = uuid.uuid4() conn.execute(( "INSERT INTO permissions (id, user_id, service_id, permission, created_at)" " VALUES ('{}', '{}', '{}', 'manage_api_keys', '{}')").format(id_, entry[0], entry[1], created_at)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() conn.execute("DELETE FROM permissions") ### end Alembic commands ###
Add default permissions for existing services.
Add default permissions for existing services.
Python
mit
alphagov/notifications-api,alphagov/notifications-api
"""empty message Revision ID: 0028_add_default_permissions Revises: 0027_add_service_permission Create Date: 2016-02-26 10:33:20.536362 """ # revision identifiers, used by Alembic. revision = '0028_add_default_permissions' down_revision = '0027_add_service_permission' import uuid from datetime import datetime from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() user_services = conn.execute("SELECT * FROM user_to_service").fetchall() for entry in user_services: id_ = uuid.uuid4() created_at = datetime.now().isoformat().replace('T', ' ') conn.execute(( "INSERT INTO permissions (id, user_id, service_id, permission, created_at)" " VALUES ('{}', '{}', '{}', 'manage_service', '{}')").format(id_, entry[0], entry[1], created_at)) id_ = uuid.uuid4() conn.execute(( "INSERT INTO permissions (id, user_id, service_id, permission, created_at)" " VALUES ('{}', '{}', '{}', 'send_messages', '{}')").format(id_, entry[0], entry[1], created_at)) id_ = uuid.uuid4() conn.execute(( "INSERT INTO permissions (id, user_id, service_id, permission, created_at)" " VALUES ('{}', '{}', '{}', 'manage_api_keys', '{}')").format(id_, entry[0], entry[1], created_at)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### conn = op.get_bind() conn.execute("DELETE FROM permissions") ### end Alembic commands ###
Add default permissions for existing services.
f40dd24af6788e7de7d06254850b83edb179b423
bootcamp/lesson4.py
bootcamp/lesson4.py
import datetime import math from core import test_helper # Question 1 # ---------- # Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1 def playing_with_dt(): return datetime.datetime(2015, 06, 01) # Question 2 # ---------- # Using the math module return pi def playing_with_math(): return math.pi def main(): print "\nRunning playing_with_dt_one function..." test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01)) print "\nRunning playing_with_dt_one function..." test_helper(playing_with_math(), math.pi) if __name__ == '__main__': main()
import datetime import math from core import test_helper # Question 1 # ---------- # Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1 def playing_with_dt(): # Write code here pass # Question 2 # ---------- # Using the math module return pi def playing_with_math(): # Write code here pass def main(): print "\nRunning playing_with_dt_one function..." test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01)) print "\nRunning playing_with_dt_one function..." test_helper(playing_with_math(), math.pi) if __name__ == '__main__': main()
Revert "Added solutions for lesson 4"
Revert "Added solutions for lesson 4" This reverts commit 58d049c78b16ec5b61f9681b605dc4e937ea7e3e.
Python
mit
infoscout/python-bootcamp-pv
import datetime import math from core import test_helper # Question 1 # ---------- # Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1 def playing_with_dt(): # Write code here pass # Question 2 # ---------- # Using the math module return pi def playing_with_math(): # Write code here pass def main(): print "\nRunning playing_with_dt_one function..." test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01)) print "\nRunning playing_with_dt_one function..." test_helper(playing_with_math(), math.pi) if __name__ == '__main__': main()
Revert "Added solutions for lesson 4" This reverts commit 58d049c78b16ec5b61f9681b605dc4e937ea7e3e. import datetime import math from core import test_helper # Question 1 # ---------- # Using the datetime module return a datetime object with the year of 2015, the month of June, and the day of 1 def playing_with_dt(): return datetime.datetime(2015, 06, 01) # Question 2 # ---------- # Using the math module return pi def playing_with_math(): return math.pi def main(): print "\nRunning playing_with_dt_one function..." test_helper(playing_with_dt(), datetime.datetime(2015, 06, 01)) print "\nRunning playing_with_dt_one function..." test_helper(playing_with_math(), math.pi) if __name__ == '__main__': main()
9201e9c433930da8fd0bfb13eadbc249469e4d84
fireplace/cards/tourney/mage.py
fireplace/cards/tourney/mage.py
from ..utils import * ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
from ..utils import * ## # Minions # Dalaran Aspirant class AT_006: inspire = Buff(SELF, "AT_006e") # Spellslinger class AT_007: play = Give(ALL_PLAYERS, RandomSpell()) # Rhonin class AT_009: deathrattle = Give(CONTROLLER, "EX1_277") * 3 ## # Spells # Flame Lance class AT_001: play = Hit(TARGET, 8) # Arcane Blast class AT_004: play = Hit(TARGET, 2) # Polymorph: Boar class AT_005: play = Morph(TARGET, "AT_005t") ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
Implement Mage cards for The Grand Tournament
Implement Mage cards for The Grand Tournament
Python
agpl-3.0
Meerkov/fireplace,amw2104/fireplace,liujimj/fireplace,Ragowit/fireplace,smallnamespace/fireplace,jleclanche/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace,NightKev/fireplace,liujimj/fireplace,Meerkov/fireplace,amw2104/fireplace,oftc-ftw/fireplace
from ..utils import * ## # Minions # Dalaran Aspirant class AT_006: inspire = Buff(SELF, "AT_006e") # Spellslinger class AT_007: play = Give(ALL_PLAYERS, RandomSpell()) # Rhonin class AT_009: deathrattle = Give(CONTROLLER, "EX1_277") * 3 ## # Spells # Flame Lance class AT_001: play = Hit(TARGET, 8) # Arcane Blast class AT_004: play = Hit(TARGET, 2) # Polymorph: Boar class AT_005: play = Morph(TARGET, "AT_005t") ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
Implement Mage cards for The Grand Tournament from ..utils import * ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
c12d70090b47765a658a98c29fd332ca6ec057d7
bin/migrate-tips.py
bin/migrate-tips.py
from gratipay.wireup import db, env from gratipay.models.team import Team, AlreadyMigrated db = db(env()) slugs = db.all(""" SELECT slug FROM teams WHERE is_approved IS TRUE """) for slug in slugs: team = Team.from_slug(slug) try: team.migrate_tips() print("Migrated tips for '%s'" % slug) except AlreadyMigrated: print("'%s' already migrated." % slug) print("Done.")
Add script for migrating tips to new teams
Add script for migrating tips to new teams
Python
mit
studio666/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com
from gratipay.wireup import db, env from gratipay.models.team import Team, AlreadyMigrated db = db(env()) slugs = db.all(""" SELECT slug FROM teams WHERE is_approved IS TRUE """) for slug in slugs: team = Team.from_slug(slug) try: team.migrate_tips() print("Migrated tips for '%s'" % slug) except AlreadyMigrated: print("'%s' already migrated." % slug) print("Done.")
Add script for migrating tips to new teams
23f59f95ea3e7d6504e03949a1400be452166d17
buildPy2app.py
buildPy2app.py
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
Update py2app script for Qt 5.11
Update py2app script for Qt 5.11
Python
apache-2.0
NeverDecaf/syncplay,alby128/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay,NeverDecaf/syncplay
""" This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib', 'styles/libqmacstyle.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
Update py2app script for Qt 5.11 """ This is a setup.py script generated by py2applet Usage: python setup.py py2app """ from setuptools import setup from glob import glob import syncplay APP = ['syncplayClient.py'] DATA_FILES = [ ('resources', glob('resources/*.png') + glob('resources/*.rtf') + glob('resources/*.lua')), ] OPTIONS = { 'iconfile':'resources/icon.icns', 'includes': {'PySide2.QtCore', 'PySide2.QtUiTools', 'PySide2.QtGui','PySide2.QtWidgets', 'certifi'}, 'excludes': {'PySide', 'PySide.QtCore', 'PySide.QtUiTools', 'PySide.QtGui'}, 'qt_plugins': ['platforms/libqcocoa.dylib', 'platforms/libqminimal.dylib','platforms/libqoffscreen.dylib'], 'plist': { 'CFBundleName':'Syncplay', 'CFBundleShortVersionString':syncplay.version, 'CFBundleIdentifier':'pl.syncplay.Syncplay', 'NSHumanReadableCopyright': '@ 2017 Syncplay All Rights Reserved' } } setup( app=APP, name='Syncplay', data_files=DATA_FILES, options={'py2app': OPTIONS}, setup_requires=['py2app'], )
497f5085143322d4b9d3ad23d35d30cdf852d1f6
test/unit/sorting/test_heap_sort.py
test/unit/sorting/test_heap_sort.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import unittest from helper.read_data_file import read_int_array from sorting.heap_sort import sort BASE_DIR = os.path.dirname(os.path.abspath(__file__)) class InsertionSortTester(unittest.TestCase): # Test sort in default order, i.e., in ascending order. def test_sort_default(self): array = read_int_array(os.path.join(BASE_DIR, 'data1.data')) array = sort(array) expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969] self.assertEqual(expect, array) # Test sort in ascending order. def test_sort_ascending(self): array = read_int_array(os.path.join(BASE_DIR, 'data1.data')) array = sort(array, 'asc') expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969] self.assertEqual(expect, array) # Test sort in descending order. def test_sort_descending(self): array = read_int_array(os.path.join(BASE_DIR, 'data1.data')) array = sort(array, 'desc') expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65] self.assertEqual(expect, array) if __name__ == '__main__': unittest.main()
Add unit test for heap sort implementation.
Add unit test for heap sort implementation.
Python
mit
weichen2046/algorithm-study,weichen2046/algorithm-study
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import unittest from helper.read_data_file import read_int_array from sorting.heap_sort import sort BASE_DIR = os.path.dirname(os.path.abspath(__file__)) class InsertionSortTester(unittest.TestCase): # Test sort in default order, i.e., in ascending order. def test_sort_default(self): array = read_int_array(os.path.join(BASE_DIR, 'data1.data')) array = sort(array) expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969] self.assertEqual(expect, array) # Test sort in ascending order. def test_sort_ascending(self): array = read_int_array(os.path.join(BASE_DIR, 'data1.data')) array = sort(array, 'asc') expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969] self.assertEqual(expect, array) # Test sort in descending order. def test_sort_descending(self): array = read_int_array(os.path.join(BASE_DIR, 'data1.data')) array = sort(array, 'desc') expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65] self.assertEqual(expect, array) if __name__ == '__main__': unittest.main()
Add unit test for heap sort implementation.
1cb79216f992ea0f31abb28031a74f6e703582cb
YouKnowShit/DownloadPic.py
YouKnowShit/DownloadPic.py
import requests import bs4 import os import urllib.request import shutil import re base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword=' srcDir = 'F:\\utorrent\\WEST' filterWord = "video_jacket_img" filenames = os.listdir(srcDir) for filename in filenames: preFileName = filename.split(".")[0] if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"): preFileName = preFileName[0:len(preFileName) - 1] destPicName = srcDir + os.sep + preFileName + '.jpg' if (os.path.isfile(destPicName)): print(destPicName + ' already here.\n') else: full_url = base_url + preFileName response = requests.get(full_url) soup = bs4.BeautifulSoup(response.text, "html.parser") try: imgsrc = soup.find(id = filterWord)['src'] print(preFileName + "\n" + imgsrc) print(destPicName + "\n") if not (os.path.isfile(destPicName)): urllib.request.urlretrieve(imgsrc, destPicName) except: print('Can not find picture of ' + filename + '\n')
import requests import bs4 import os import urllib.request import shutil import re base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword=' srcDir = 'H:\\temp' filterWord = "video_jacket_img" filenames = os.listdir(srcDir) for filename in filenames: preFileName = filename.split(".")[0] if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"): preFileName = preFileName[0:len(preFileName) - 1] destPicName = srcDir + os.sep + preFileName + '.jpg' if (os.path.isfile(destPicName)): print(destPicName + ' already here.\n') else: full_url = base_url + preFileName response = requests.get(full_url) soup = bs4.BeautifulSoup(response.text, "html.parser") try: imgsrc = soup.find(id = filterWord)['src'] print(preFileName + "\n" + imgsrc) print(destPicName + "\n") if not (os.path.isfile(destPicName)): urllib.request.urlretrieve(imgsrc, destPicName) except: print('Can not find picture of ' + filename + '\n')
Update the pic download base url.
Update the pic download base url.
Python
mit
jiangtianyu2009/PiSoftCake
import requests import bs4 import os import urllib.request import shutil import re base_url = 'http://www.jav11b.com/cn/vl_searchbyid.php?keyword=' srcDir = 'H:\\temp' filterWord = "video_jacket_img" filenames = os.listdir(srcDir) for filename in filenames: preFileName = filename.split(".")[0] if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"): preFileName = preFileName[0:len(preFileName) - 1] destPicName = srcDir + os.sep + preFileName + '.jpg' if (os.path.isfile(destPicName)): print(destPicName + ' already here.\n') else: full_url = base_url + preFileName response = requests.get(full_url) soup = bs4.BeautifulSoup(response.text, "html.parser") try: imgsrc = soup.find(id = filterWord)['src'] print(preFileName + "\n" + imgsrc) print(destPicName + "\n") if not (os.path.isfile(destPicName)): urllib.request.urlretrieve(imgsrc, destPicName) except: print('Can not find picture of ' + filename + '\n')
Update the pic download base url. import requests import bs4 import os import urllib.request import shutil import re base_url = 'http://www.j8vlib.com/cn/vl_searchbyid.php?keyword=' srcDir = 'F:\\utorrent\\WEST' filterWord = "video_jacket_img" filenames = os.listdir(srcDir) for filename in filenames: preFileName = filename.split(".")[0] if (preFileName[-1] == "A" or preFileName[-1] == "B" or preFileName[-1] == "C"): preFileName = preFileName[0:len(preFileName) - 1] destPicName = srcDir + os.sep + preFileName + '.jpg' if (os.path.isfile(destPicName)): print(destPicName + ' already here.\n') else: full_url = base_url + preFileName response = requests.get(full_url) soup = bs4.BeautifulSoup(response.text, "html.parser") try: imgsrc = soup.find(id = filterWord)['src'] print(preFileName + "\n" + imgsrc) print(destPicName + "\n") if not (os.path.isfile(destPicName)): urllib.request.urlretrieve(imgsrc, destPicName) except: print('Can not find picture of ' + filename + '\n')
bd717b8056a69ee7074a94b3234d840dd431dd1f
src/341_flatten_nested_list_iterator.py
src/341_flatten_nested_list_iterator.py
""" This is the interface that allows for creating nested lists. You should not implement it, or speculate about its implementation """ class NestedInteger(object): def isInteger(self): """ @return True if this NestedInteger holds a single integer, rather than a nested list. :rtype bool """ def getInteger(self): """ @return the single integer that this NestedInteger holds, if it holds a single integer Return None if this NestedInteger holds a nested list :rtype int """ def getList(self): """ @return the nested list that this NestedInteger holds, if it holds a nested list Return None if this NestedInteger holds a single integer :rtype List[NestedInteger] """ class NestedIterator(object): def __init__(self, nestedList): """ Initialize your data structure here. :type nestedList: List[NestedInteger] """ self.stack = [[nestedList, 0]] def next(self): """ :rtype: int """ nestedList, i = self.stack[-1] self.stack[-1][1] += 1 return nestedList[i].getInteger() def hasNext(self): """ :rtype: bool """ stk = self.stack while stk: nestedList, i = stk[-1] if i == len(nestedList): stk.pop() else: val = nestedList[i] if val.isInteger(): return True else: stk[-1][1] += 1 stk.append([val.getList(), 0]) return False # Your NestedIterator object will be instantiated and called as such: # i, v = NestedIterator(nestedList), [] # while i.hasNext(): v.append(i.next())
Use stack to solve the problem
Use stack to solve the problem
Python
apache-2.0
zhuxiang/LeetCode-Python
""" This is the interface that allows for creating nested lists. You should not implement it, or speculate about its implementation """ class NestedInteger(object): def isInteger(self): """ @return True if this NestedInteger holds a single integer, rather than a nested list. :rtype bool """ def getInteger(self): """ @return the single integer that this NestedInteger holds, if it holds a single integer Return None if this NestedInteger holds a nested list :rtype int """ def getList(self): """ @return the nested list that this NestedInteger holds, if it holds a nested list Return None if this NestedInteger holds a single integer :rtype List[NestedInteger] """ class NestedIterator(object): def __init__(self, nestedList): """ Initialize your data structure here. :type nestedList: List[NestedInteger] """ self.stack = [[nestedList, 0]] def next(self): """ :rtype: int """ nestedList, i = self.stack[-1] self.stack[-1][1] += 1 return nestedList[i].getInteger() def hasNext(self): """ :rtype: bool """ stk = self.stack while stk: nestedList, i = stk[-1] if i == len(nestedList): stk.pop() else: val = nestedList[i] if val.isInteger(): return True else: stk[-1][1] += 1 stk.append([val.getList(), 0]) return False # Your NestedIterator object will be instantiated and called as such: # i, v = NestedIterator(nestedList), [] # while i.hasNext(): v.append(i.next())
Use stack to solve the problem
c85f423960050fea76452818ce25f9dc287c922a
vumidash/dummy_client.py
vumidash/dummy_client.py
"""MetricSource that serves dummy data.""" import random from vumidash.base import MetricSource, UnknownMetricError class DummyClient(MetricSource): """Serve dummy data.""" def __init__(self): self.latest = None self.metric_prefix = "test" self.prev_values = {} # map of metrics to previous values def new_value(self, metric): values = self.prev_values.setdefault(metric, []) values.insert(0, random.uniform(0, 100)) return values def get_latest(self, metric, start, end, summary_size, skip_nulls=True): values = self.get_history(metric, start, end, summary_size, skip_nulls) return values[0], values[-1] def get_history(self, metric, start, end, summary_size, skip_nulls=True): if not metric.startswith(self.metric_prefix): raise UnknownMetricError("Uknown metric %r" % (metric,)) steps = (self.total_seconds((-start) - (-end)) / float(self.total_seconds(summary_size))) values = self.new_value(metric) while len(values) < steps: values = self.new_value(metric) return values[:steps]
"""MetricSource that serves dummy data.""" import random from vumidash.base import MetricSource, UnknownMetricError class DummyClient(MetricSource): """Serve dummy data.""" def __init__(self): self.latest = None self.metric_prefix = "test" self.prev_values = {} # map of metrics to previous values def new_value(self, metric): values = self.prev_values.setdefault(metric, []) values.insert(0, random.uniform(0, 100)) return values def get_latest(self, metric, start, end, summary_size, skip_nulls=True): values = self.get_history(metric, start, end, summary_size, skip_nulls) return values[0], values[-1] def get_history(self, metric, start, end, summary_size, skip_nulls=True): if not metric.startswith(self.metric_prefix): raise UnknownMetricError("Uknown metric %r" % (metric,)) steps = int(self.total_seconds((-start) - (-end)) / float(self.total_seconds(summary_size))) values = self.new_value(metric) while len(values) < steps: values = self.new_value(metric) return values[:steps]
Fix steps calculation in dummy client -- how did this work before?
Fix steps calculation in dummy client -- how did this work before?
Python
bsd-3-clause
praekelt/vumi-dashboard,praekelt/vumi-dashboard
"""MetricSource that serves dummy data.""" import random from vumidash.base import MetricSource, UnknownMetricError class DummyClient(MetricSource): """Serve dummy data.""" def __init__(self): self.latest = None self.metric_prefix = "test" self.prev_values = {} # map of metrics to previous values def new_value(self, metric): values = self.prev_values.setdefault(metric, []) values.insert(0, random.uniform(0, 100)) return values def get_latest(self, metric, start, end, summary_size, skip_nulls=True): values = self.get_history(metric, start, end, summary_size, skip_nulls) return values[0], values[-1] def get_history(self, metric, start, end, summary_size, skip_nulls=True): if not metric.startswith(self.metric_prefix): raise UnknownMetricError("Uknown metric %r" % (metric,)) steps = int(self.total_seconds((-start) - (-end)) / float(self.total_seconds(summary_size))) values = self.new_value(metric) while len(values) < steps: values = self.new_value(metric) return values[:steps]
Fix steps calculation in dummy client -- how did this work before? """MetricSource that serves dummy data.""" import random from vumidash.base import MetricSource, UnknownMetricError class DummyClient(MetricSource): """Serve dummy data.""" def __init__(self): self.latest = None self.metric_prefix = "test" self.prev_values = {} # map of metrics to previous values def new_value(self, metric): values = self.prev_values.setdefault(metric, []) values.insert(0, random.uniform(0, 100)) return values def get_latest(self, metric, start, end, summary_size, skip_nulls=True): values = self.get_history(metric, start, end, summary_size, skip_nulls) return values[0], values[-1] def get_history(self, metric, start, end, summary_size, skip_nulls=True): if not metric.startswith(self.metric_prefix): raise UnknownMetricError("Uknown metric %r" % (metric,)) steps = (self.total_seconds((-start) - (-end)) / float(self.total_seconds(summary_size))) values = self.new_value(metric) while len(values) < steps: values = self.new_value(metric) return values[:steps]
35c264819bac12fcb3baf8a2a33d63dd916f5f86
mezzanine_fluent_pages/mezzanine_layout_page/widgets.py
mezzanine_fluent_pages/mezzanine_layout_page/widgets.py
from django.forms.widgets import Select class LayoutSelector(Select): """ Modified `Select` class to select the original value. This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets .py` in the `django-fluent-pages` app. """ def render(self, name, value, attrs=None, choices=()): """ Modified render to set the data original value. :param name: The name of the `Select` field. :param value: The value of the `Select` field. :param attrs: Additional attributes of the `Select` field. :param choices: Available choices for the `Select` field. :return: HTML select. """ if attrs: attrs['data-original-value'] = value return super(LayoutSelector, self).render(name, value, attrs, choices)
from django.forms.widgets import Select class LayoutSelector(Select): """ Modified `Select` class to select the original value. This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets .py` in the `django-fluent-pages` app. """ def render(self, name, value, attrs=None, *args, **kwargs): """ Modified render to set the data original value. :param name: The name of the `Select` field. :param value: The value of the `Select` field. :param attrs: Additional attributes of the `Select` field. :param args: pass along any other arguments. :param kwargs: pass along any other keyword arguments. :return: HTML select. """ if attrs: attrs['data-original-value'] = value return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
Remove keyword argument and allow generic argument passing.
Remove keyword argument and allow generic argument passing.
Python
bsd-2-clause
sjdines/mezzanine-fluent-pages,sjdines/mezzanine-fluent-pages,sjdines/mezzanine-fluent-pages
from django.forms.widgets import Select class LayoutSelector(Select): """ Modified `Select` class to select the original value. This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets .py` in the `django-fluent-pages` app. """ def render(self, name, value, attrs=None, *args, **kwargs): """ Modified render to set the data original value. :param name: The name of the `Select` field. :param value: The value of the `Select` field. :param attrs: Additional attributes of the `Select` field. :param args: pass along any other arguments. :param kwargs: pass along any other keyword arguments. :return: HTML select. """ if attrs: attrs['data-original-value'] = value return super(LayoutSelector, self).render(name, value, attrs, *args, **kwargs)
Remove keyword argument and allow generic argument passing. from django.forms.widgets import Select class LayoutSelector(Select): """ Modified `Select` class to select the original value. This was adapted from `fluent_pages/pagetypes/fluent_pages/widgets .py` in the `django-fluent-pages` app. """ def render(self, name, value, attrs=None, choices=()): """ Modified render to set the data original value. :param name: The name of the `Select` field. :param value: The value of the `Select` field. :param attrs: Additional attributes of the `Select` field. :param choices: Available choices for the `Select` field. :return: HTML select. """ if attrs: attrs['data-original-value'] = value return super(LayoutSelector, self).render(name, value, attrs, choices)
e728d6ebdd101b393f3d87fdfbade2c4c52c5ef1
cdent/emitter/perl.py
cdent/emitter/perl.py
"""\ Perl code emitter for C'Dent """ from __future__ import absolute_import from cdent.emitter import Emitter as Base class Emitter(Base): LANGUAGE_ID = 'pm' def emit_includecdent(self, includecdent): self.writeln('use CDent::Run;') def emit_class(self, class_): name = class_.name self.writeln('package %s;' % name) self.writeln('use CDent::Class;') self.writeln() self.emit(class_.has) self.writeln() self.writeln('1;') def emit_method(self, method): name = method.name self.writeln('sub %s {' % name) self.writeln(' my $self = shift;') self.emit(method.has, indent=True) self.writeln('}') def emit_println(self, println): self.write('print ', indent=True) self.emit(println.args) self.writeln(', "\\n";', indent=False) def emit_return(self, ret): self.writeln('return;')
"""\ Perl code emitter for C'Dent """ from __future__ import absolute_import from cdent.emitter import Emitter as Base class Emitter(Base): LANGUAGE_ID = 'pm' def emit_includecdent(self, includecdent): self.writeln('use CDent::Run;') def emit_class(self, class_): name = class_.name self.writeln('package %s;' % name) self.writeln('use Moose;') self.writeln() self.emit(class_.has) self.writeln() self.writeln('1;') def emit_method(self, method): name = method.name self.writeln('sub %s {' % name) self.writeln(' my $self = shift;') self.emit(method.has, indent=True) self.writeln('}') def emit_println(self, println): self.write('print ', indent=True) self.emit(println.args) self.writeln(', "\\n";', indent=False) def emit_return(self, ret): self.writeln('return;')
Use Moose for Perl 5
Use Moose for Perl 5
Python
bsd-2-clause
ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py
"""\ Perl code emitter for C'Dent """ from __future__ import absolute_import from cdent.emitter import Emitter as Base class Emitter(Base): LANGUAGE_ID = 'pm' def emit_includecdent(self, includecdent): self.writeln('use CDent::Run;') def emit_class(self, class_): name = class_.name self.writeln('package %s;' % name) self.writeln('use Moose;') self.writeln() self.emit(class_.has) self.writeln() self.writeln('1;') def emit_method(self, method): name = method.name self.writeln('sub %s {' % name) self.writeln(' my $self = shift;') self.emit(method.has, indent=True) self.writeln('}') def emit_println(self, println): self.write('print ', indent=True) self.emit(println.args) self.writeln(', "\\n";', indent=False) def emit_return(self, ret): self.writeln('return;')
Use Moose for Perl 5 """\ Perl code emitter for C'Dent """ from __future__ import absolute_import from cdent.emitter import Emitter as Base class Emitter(Base): LANGUAGE_ID = 'pm' def emit_includecdent(self, includecdent): self.writeln('use CDent::Run;') def emit_class(self, class_): name = class_.name self.writeln('package %s;' % name) self.writeln('use CDent::Class;') self.writeln() self.emit(class_.has) self.writeln() self.writeln('1;') def emit_method(self, method): name = method.name self.writeln('sub %s {' % name) self.writeln(' my $self = shift;') self.emit(method.has, indent=True) self.writeln('}') def emit_println(self, println): self.write('print ', indent=True) self.emit(println.args) self.writeln(', "\\n";', indent=False) def emit_return(self, ret): self.writeln('return;')
cd0426dbbfc6f1573cf5d09485b8930eb498e1c6
mbuild/tests/test_utils.py
mbuild/tests/test_utils.py
import difflib import pytest from mbuild.tests.base_test import BaseTest from mbuild.utils.io import get_fn from mbuild.utils.validation import assert_port_exists class TestUtils(BaseTest): def test_assert_port_exists(self, ch2): assert_port_exists('up', ch2) with pytest.raises(ValueError): assert_port_exists('dog', ch2) def test_structure_reproducibility(self, alkane_monolayer): filename = 'monolayer-tmp.pdb' alkane_monolayer.save(filename) with open(get_fn('monolayer.pdb')) as file1: with open('monolayer-tmp.pdb') as file2: diff = difflib.ndiff(file1.readlines(), file2.readlines()) changes = [l for l in diff if l.startswith('+ ') or l.startswith('- ')] assert not changes
import difflib import numpy as np import pytest from mbuild.tests.base_test import BaseTest from mbuild.utils.io import get_fn, import_ from mbuild.utils.validation import assert_port_exists class TestUtils(BaseTest): def test_assert_port_exists(self, ch2): assert_port_exists('up', ch2) with pytest.raises(ValueError): assert_port_exists('dog', ch2) def test_structure_reproducibility(self, alkane_monolayer): filename = 'monolayer-tmp.pdb' alkane_monolayer.save(filename) with open(get_fn('monolayer.pdb')) as file1: with open('monolayer-tmp.pdb') as file2: diff = difflib.ndiff(file1.readlines(), file2.readlines()) changes = [l for l in diff if l.startswith('+ ') or l.startswith('- ')] assert not changes def test_fn(self): get_fn('benzene.mol2') with pytest.raises((IOError, OSError)): get_fn('garbage_file_name.foo') def test_import(self): assert np == import_('numpy') with pytest.raises(ImportError): import_('garbagepackagename')
Add some unit test on utils.io
Add some unit test on utils.io
Python
mit
iModels/mbuild,iModels/mbuild
import difflib import numpy as np import pytest from mbuild.tests.base_test import BaseTest from mbuild.utils.io import get_fn, import_ from mbuild.utils.validation import assert_port_exists class TestUtils(BaseTest): def test_assert_port_exists(self, ch2): assert_port_exists('up', ch2) with pytest.raises(ValueError): assert_port_exists('dog', ch2) def test_structure_reproducibility(self, alkane_monolayer): filename = 'monolayer-tmp.pdb' alkane_monolayer.save(filename) with open(get_fn('monolayer.pdb')) as file1: with open('monolayer-tmp.pdb') as file2: diff = difflib.ndiff(file1.readlines(), file2.readlines()) changes = [l for l in diff if l.startswith('+ ') or l.startswith('- ')] assert not changes def test_fn(self): get_fn('benzene.mol2') with pytest.raises((IOError, OSError)): get_fn('garbage_file_name.foo') def test_import(self): assert np == import_('numpy') with pytest.raises(ImportError): import_('garbagepackagename')
Add some unit test on utils.io import difflib import pytest from mbuild.tests.base_test import BaseTest from mbuild.utils.io import get_fn from mbuild.utils.validation import assert_port_exists class TestUtils(BaseTest): def test_assert_port_exists(self, ch2): assert_port_exists('up', ch2) with pytest.raises(ValueError): assert_port_exists('dog', ch2) def test_structure_reproducibility(self, alkane_monolayer): filename = 'monolayer-tmp.pdb' alkane_monolayer.save(filename) with open(get_fn('monolayer.pdb')) as file1: with open('monolayer-tmp.pdb') as file2: diff = difflib.ndiff(file1.readlines(), file2.readlines()) changes = [l for l in diff if l.startswith('+ ') or l.startswith('- ')] assert not changes
b5bb360a78eb3493a52a4f085bb7ae2ef1355cdd
scavenger/net_utils.py
scavenger/net_utils.py
import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
import subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
Change name: logged_in => check_online
Change name: logged_in => check_online
Python
mit
ThomasLee969/scavenger
import subprocess import requests def check_online(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
Change name: logged_in => check_online import subprocess import requests def logged_in(): """Check whether the device has logged in. Return a dictionary containing: username byte duration (in seconds) Return False if no logged in """ r = requests.post('http://net.tsinghua.edu.cn/cgi-bin/do_login', data={'action': 'check_online'}) if r: # status: OK infos = r.text.split(',') if len(infos) == 5: # Decode successfully return dict(username=infos[1], byte=infos[2], duration=infos[4]) # Failed to get infos return False def arp_scan(): """Generate (IP, MAC) pairs using arp-scan""" proc = subprocess.Popen(['sudo', 'arp-scan', '-lq'], stdout=subprocess.PIPE) out = proc.stdout # Skip the first two lines. next(out) next(out) # Parse IPs & MACs for line in out: infos = line.split() if not infos: # Empty line at the end of the output return if len(infos) < 2: raise RuntimeError('Invalid output of arp-scan: "%s"' % line) yield (infos[0], infos[1]) # Generate (IP, MAC)
12f4f47d0f9a4a24d37e16fb1afc0841399ccadf
setup.py
setup.py
# Use the newer `setuptools.setup()`, if available. try: from setuptools import setup kw = { 'test_suite': 'tests', 'tests_require': ['astunparse'], } except ImportError: from distutils.core import setup kw = {} setup(name='gast', # gast, daou naer! version='0.2.0', packages=['gast'], description='Python AST that abstracts the underlying Python version', long_description=''' A generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST). GAST provides a compatibility layer between the AST of various Python versions, as produced by ``ast.parse`` from the standard ``ast`` module.''', author='serge-sans-paille', author_email='[email protected]', license="BSD 3-Clause", classifiers=['Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3'], **kw )
# Use the newer `setuptools.setup()`, if available. try: from setuptools import setup kw = { 'test_suite': 'tests', 'tests_require': ['astunparse'], } except ImportError: from distutils.core import setup kw = {} setup(name='gast', # gast, daou naer! version='0.2.0', packages=['gast'], description='Python AST that abstracts the underlying Python version', long_description=''' A generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST). GAST provides a compatibility layer between the AST of various Python versions, as produced by ``ast.parse`` from the standard ``ast`` module.''', author='serge-sans-paille', author_email='[email protected]', license="BSD 3-Clause", classifiers=['Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', **kw )
Add python_requires to help pip, and Trove classifiers
Add python_requires to help pip, and Trove classifiers
Python
bsd-3-clause
serge-sans-paille/gast
# Use the newer `setuptools.setup()`, if available. try: from setuptools import setup kw = { 'test_suite': 'tests', 'tests_require': ['astunparse'], } except ImportError: from distutils.core import setup kw = {} setup(name='gast', # gast, daou naer! version='0.2.0', packages=['gast'], description='Python AST that abstracts the underlying Python version', long_description=''' A generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST). GAST provides a compatibility layer between the AST of various Python versions, as produced by ``ast.parse`` from the standard ``ast`` module.''', author='serge-sans-paille', author_email='[email protected]', license="BSD 3-Clause", classifiers=['Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*', **kw )
Add python_requires to help pip, and Trove classifiers # Use the newer `setuptools.setup()`, if available. try: from setuptools import setup kw = { 'test_suite': 'tests', 'tests_require': ['astunparse'], } except ImportError: from distutils.core import setup kw = {} setup(name='gast', # gast, daou naer! version='0.2.0', packages=['gast'], description='Python AST that abstracts the underlying Python version', long_description=''' A generic AST to represent Python2 and Python3's Abstract Syntax Tree(AST). GAST provides a compatibility layer between the AST of various Python versions, as produced by ``ast.parse`` from the standard ``ast`` module.''', author='serge-sans-paille', author_email='[email protected]', license="BSD 3-Clause", classifiers=['Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3'], **kw )
be59230531d98dc25f806b2290a51a0f4fde1d3b
addons/survey/migrations/8.0.2.0/pre-migration.py
addons/survey/migrations/8.0.2.0/pre-migration.py
# coding: utf-8 from openupgradelib import openupgrade @openupgrade.migrate() def migrate(cr, version): openupgrade.rename_tables(cr, [('survey', 'survey_survey')]) openupgrade.rename_models(cr, [('survey', 'survey.survey')])
Rename model to prevent crash during module upgrade in tests
[ADD] Rename model to prevent crash during module upgrade in tests
Python
agpl-3.0
grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,Endika/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade
# coding: utf-8 from openupgradelib import openupgrade @openupgrade.migrate() def migrate(cr, version): openupgrade.rename_tables(cr, [('survey', 'survey_survey')]) openupgrade.rename_models(cr, [('survey', 'survey.survey')])
[ADD] Rename model to prevent crash during module upgrade in tests
f7dd16abcab5d5e0134083267f21672de8e3d5e1
hc/front/context_processors.py
hc/front/context_processors.py
from django.conf import settings def branding(request): return { "site_name": settings.SITE_NAME, "site_root": settings.SITE_ROOT, "site_logo_url": settings.SITE_LOGO_URL, }
from django.conf import settings def branding(request): return { "site_name": settings.SITE_NAME, "site_logo_url": settings.SITE_LOGO_URL, }
Remove site_root from template context, it's never used
Remove site_root from template context, it's never used
Python
bsd-3-clause
iphoting/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,healthchecks/healthchecks,iphoting/healthchecks
from django.conf import settings def branding(request): return { "site_name": settings.SITE_NAME, "site_logo_url": settings.SITE_LOGO_URL, }
Remove site_root from template context, it's never used from django.conf import settings def branding(request): return { "site_name": settings.SITE_NAME, "site_root": settings.SITE_ROOT, "site_logo_url": settings.SITE_LOGO_URL, }
513d8e83dc7aea052682df2bc93cd146b6799406
client/examples/cycle-cards.py
client/examples/cycle-cards.py
#!/bin/python import removinator import subprocess # This example cycles through each card slot in the Removinator. Any # slots that have a card present will then have the certificates on the # card printed out using the pkcs15-tool utility, which is provided by # the OpenSC project. # # Examples of parsing the Removinator status output and enabling debug # output from the firmware are also provided. print('--- Connecting to Removinator ---') ctl = removinator.Removinator() print('--- Cycling through cards ---') for card in range(1, 9): try: ctl.insert_card(card) print('Inserted card {0}'.format(card)) print('{0}'.format(subprocess.check_output(['pkcs15-tool', '--list-certificates']) .rstrip())) except removinator.SlotError: print('Card {0} is not inserted'.format(card)) print('--- Checking Removinator status ---') status = ctl.get_status() print('Current card: {0}'.format(status['current'])) for card in status['present']: print('Card {0} is present'.format(card)) print('--- Debug output for re-insertion of current card ---') ctl.set_debug(True) ctl.insert_card(status['current']) print('{0}'.format(ctl.last_response.rstrip())) ctl.set_debug(False) print('--- Remove current card ---') ctl.remove_card()
#!/usr/bin/env python import removinator import subprocess # This example cycles through each card slot in the Removinator. Any # slots that have a card present will then have the certificates on the # card printed out using the pkcs15-tool utility, which is provided by # the OpenSC project. # # Examples of parsing the Removinator status output and enabling debug # output from the firmware are also provided. print('--- Connecting to Removinator ---') ctl = removinator.Removinator() print('--- Cycling through cards ---') for card in range(1, 9): try: ctl.insert_card(card) print('Inserted card {0}'.format(card)) print('{0}'.format(subprocess.check_output(['pkcs15-tool', '--list-certificates']) .rstrip())) except removinator.SlotError: print('Card {0} is not inserted'.format(card)) print('--- Checking Removinator status ---') status = ctl.get_status() print('Current card: {0}'.format(status['current'])) for card in status['present']: print('Card {0} is present'.format(card)) print('--- Debug output for re-insertion of current card ---') ctl.set_debug(True) ctl.insert_card(status['current']) print('{0}'.format(ctl.last_response.rstrip())) ctl.set_debug(False) print('--- Remove current card ---') ctl.remove_card()
Use python from env in example script
Use python from env in example script This makes the cycle-cards example script use python from the env instead of harcoding the location. This allows a virtualenv to be easily used.
Python
apache-2.0
nkinder/smart-card-removinator
#!/usr/bin/env python import removinator import subprocess # This example cycles through each card slot in the Removinator. Any # slots that have a card present will then have the certificates on the # card printed out using the pkcs15-tool utility, which is provided by # the OpenSC project. # # Examples of parsing the Removinator status output and enabling debug # output from the firmware are also provided. print('--- Connecting to Removinator ---') ctl = removinator.Removinator() print('--- Cycling through cards ---') for card in range(1, 9): try: ctl.insert_card(card) print('Inserted card {0}'.format(card)) print('{0}'.format(subprocess.check_output(['pkcs15-tool', '--list-certificates']) .rstrip())) except removinator.SlotError: print('Card {0} is not inserted'.format(card)) print('--- Checking Removinator status ---') status = ctl.get_status() print('Current card: {0}'.format(status['current'])) for card in status['present']: print('Card {0} is present'.format(card)) print('--- Debug output for re-insertion of current card ---') ctl.set_debug(True) ctl.insert_card(status['current']) print('{0}'.format(ctl.last_response.rstrip())) ctl.set_debug(False) print('--- Remove current card ---') ctl.remove_card()
Use python from env in example script This makes the cycle-cards example script use python from the env instead of harcoding the location. This allows a virtualenv to be easily used. #!/bin/python import removinator import subprocess # This example cycles through each card slot in the Removinator. Any # slots that have a card present will then have the certificates on the # card printed out using the pkcs15-tool utility, which is provided by # the OpenSC project. # # Examples of parsing the Removinator status output and enabling debug # output from the firmware are also provided. print('--- Connecting to Removinator ---') ctl = removinator.Removinator() print('--- Cycling through cards ---') for card in range(1, 9): try: ctl.insert_card(card) print('Inserted card {0}'.format(card)) print('{0}'.format(subprocess.check_output(['pkcs15-tool', '--list-certificates']) .rstrip())) except removinator.SlotError: print('Card {0} is not inserted'.format(card)) print('--- Checking Removinator status ---') status = ctl.get_status() print('Current card: {0}'.format(status['current'])) for card in status['present']: print('Card {0} is present'.format(card)) print('--- Debug output for re-insertion of current card ---') ctl.set_debug(True) ctl.insert_card(status['current']) print('{0}'.format(ctl.last_response.rstrip())) ctl.set_debug(False) print('--- Remove current card ---') ctl.remove_card()
28bb129931e14d5681ba717f6c949e2305fd2e03
django/website/main/tests/test_merge_coverage_handling.py
django/website/main/tests/test_merge_coverage_handling.py
from mock import Mock from main.management.commands.merge_coverage_files import Command from main.tests.helper_methods import mock_out_unwanted_methods def test_merge_coverage_handle_calls_parse_options(): merge_coverage_files_command = Command() # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['prepare_packagefilters', 'write_filtered_data', 'write_merged_data']) merge_coverage_files_command.parse_options = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.parse_options.called def test_merge_coverage_handle_calls_prepare_packagefilters(): merge_coverage_files_command = Command() # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['parse_options', 'write_filtered_data', 'write_merged_data']) merge_coverage_files_command.prepare_packagefilters = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.prepare_packagefilters.called def test_write_filtered_data_called_when_filteronly_is_true(): merge_coverage_files_command = Command() merge_coverage_files_command.filteronly = True # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['parse_options', 'prepare_packagefilters', 'write_merged_data']) merge_coverage_files_command.write_filtered_data = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.write_filtered_data.called def test_write_merged_data_called_when_filteronly_is_false(): merge_coverage_files_command = Command() merge_coverage_files_command.filteronly = False # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['parse_options', 'prepare_packagefilters', 'write_filtered_data']) merge_coverage_files_command.write_merged_data = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.write_merged_data.called
Add tests to run command to merge content
Add tests to run command to merge content
Python
agpl-3.0
daniell/kashana,aptivate/alfie,daniell/kashana,aptivate/alfie,daniell/kashana,daniell/kashana,aptivate/kashana,aptivate/kashana,aptivate/kashana,aptivate/alfie,aptivate/alfie,aptivate/kashana
from mock import Mock from main.management.commands.merge_coverage_files import Command from main.tests.helper_methods import mock_out_unwanted_methods def test_merge_coverage_handle_calls_parse_options(): merge_coverage_files_command = Command() # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['prepare_packagefilters', 'write_filtered_data', 'write_merged_data']) merge_coverage_files_command.parse_options = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.parse_options.called def test_merge_coverage_handle_calls_prepare_packagefilters(): merge_coverage_files_command = Command() # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['parse_options', 'write_filtered_data', 'write_merged_data']) merge_coverage_files_command.prepare_packagefilters = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.prepare_packagefilters.called def test_write_filtered_data_called_when_filteronly_is_true(): merge_coverage_files_command = Command() merge_coverage_files_command.filteronly = True # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['parse_options', 'prepare_packagefilters', 'write_merged_data']) merge_coverage_files_command.write_filtered_data = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.write_filtered_data.called def test_write_merged_data_called_when_filteronly_is_false(): merge_coverage_files_command = Command() merge_coverage_files_command.filteronly = False # We don't want these methods to run mock_out_unwanted_methods(merge_coverage_files_command, ['parse_options', 'prepare_packagefilters', 'write_filtered_data']) merge_coverage_files_command.write_merged_data = Mock() merge_coverage_files_command.handle() assert merge_coverage_files_command.write_merged_data.called
Add tests to run command to merge content
97f81ddfdd78d062e5019793101926fb52b0db38
sum.py
sum.py
import sublime, sublime_plugin class SumCommand(sublime_plugin.TextCommand): def run(self, edit): new_view = self.view.window().new_file() new_view.set_name('Sum') new_view.insert(edit, 0, '42') new_view.set_scratch(True)
import sublime, sublime_plugin class SumCommand(sublime_plugin.TextCommand): def run(self, edit): new_view = self.view.window().new_file() new_view.set_name('Sum') new_view.insert(edit, 0, '42') new_view.set_read_only(True) new_view.set_scratch(True)
Set new file to read-only
Set new file to read-only Since the new file does not prompt about file changes when closed, if the user were to edit the new file and close without saving, their changes would be lost forever. By setting the new file to be read-only, the user will not be able to make changes to it that may be lost.
Python
mit
jbrudvik/sublime-sum,jbrudvik/sublime-sum
import sublime, sublime_plugin class SumCommand(sublime_plugin.TextCommand): def run(self, edit): new_view = self.view.window().new_file() new_view.set_name('Sum') new_view.insert(edit, 0, '42') new_view.set_read_only(True) new_view.set_scratch(True)
Set new file to read-only Since the new file does not prompt about file changes when closed, if the user were to edit the new file and close without saving, their changes would be lost forever. By setting the new file to be read-only, the user will not be able to make changes to it that may be lost. import sublime, sublime_plugin class SumCommand(sublime_plugin.TextCommand): def run(self, edit): new_view = self.view.window().new_file() new_view.set_name('Sum') new_view.insert(edit, 0, '42') new_view.set_scratch(True)
2ad1c276a96a77d2088f996ebc32fa74206d1cef
osf/migrations/0036_ensure_schemas.py
osf/migrations/0036_ensure_schemas.py
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-24 19:33 from __future__ import unicode_literals import logging from django.db import migrations from osf.models import MetaSchema from website.project.metadata.schemas import OSF_META_SCHEMAS logger = logging.getLogger(__file__) def add_schemas(*args): """Import meta-data schemas from JSON to database if not already loaded """ schema_count = 0 for schema in OSF_META_SCHEMAS: schema_obj, created = MetaSchema.objects.get_or_create( name=schema['name'], schema_version=schema.get('version', 1) ) schema_obj.schema = schema schema_obj.save() schema_count += 1 if created: logger.info('Added schema {} to the database'.format(schema['name'])) logger.info('Ensured {} schemas are in the database'.format(schema_count)) def remove_schemas(*args): removed_schemas = 0 for schema in OSF_META_SCHEMAS: schema_obj = MetaSchema.objects.get( schema=schema, name=schema['name'], schema_version=schema.get('version', 1) ) schema_obj.delete() removed_schemas += 1 logger.info('Removed {} schemas from the database'.format(removed_schemas)) class Migration(migrations.Migration): dependencies = [ ('osf', '0035_ensure_licenses'), ] operations = [ migrations.RunPython(add_schemas, remove_schemas), ]
Add migration for ensure schemas
Add migration for ensure schemas
Python
apache-2.0
CenterForOpenScience/osf.io,mattclark/osf.io,pattisdr/osf.io,caseyrollins/osf.io,crcresearch/osf.io,adlius/osf.io,erinspace/osf.io,baylee-d/osf.io,cslzchen/osf.io,laurenrevere/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,binoculars/osf.io,mattclark/osf.io,pattisdr/osf.io,sloria/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,adlius/osf.io,caneruguz/osf.io,felliott/osf.io,chennan47/osf.io,TomBaxter/osf.io,aaxelb/osf.io,caneruguz/osf.io,leb2dg/osf.io,mfraezz/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,leb2dg/osf.io,chrisseto/osf.io,saradbowman/osf.io,cslzchen/osf.io,TomBaxter/osf.io,chennan47/osf.io,leb2dg/osf.io,binoculars/osf.io,caseyrollins/osf.io,icereval/osf.io,adlius/osf.io,aaxelb/osf.io,chrisseto/osf.io,erinspace/osf.io,cslzchen/osf.io,icereval/osf.io,mattclark/osf.io,binoculars/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,felliott/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,sloria/osf.io,laurenrevere/osf.io,adlius/osf.io,crcresearch/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,sloria/osf.io,erinspace/osf.io,icereval/osf.io,TomBaxter/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,felliott/osf.io,laurenrevere/osf.io,chrisseto/osf.io,crcresearch/osf.io
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-24 19:33 from __future__ import unicode_literals import logging from django.db import migrations from osf.models import MetaSchema from website.project.metadata.schemas import OSF_META_SCHEMAS logger = logging.getLogger(__file__) def add_schemas(*args): """Import meta-data schemas from JSON to database if not already loaded """ schema_count = 0 for schema in OSF_META_SCHEMAS: schema_obj, created = MetaSchema.objects.get_or_create( name=schema['name'], schema_version=schema.get('version', 1) ) schema_obj.schema = schema schema_obj.save() schema_count += 1 if created: logger.info('Added schema {} to the database'.format(schema['name'])) logger.info('Ensured {} schemas are in the database'.format(schema_count)) def remove_schemas(*args): removed_schemas = 0 for schema in OSF_META_SCHEMAS: schema_obj = MetaSchema.objects.get( schema=schema, name=schema['name'], schema_version=schema.get('version', 1) ) schema_obj.delete() removed_schemas += 1 logger.info('Removed {} schemas from the database'.format(removed_schemas)) class Migration(migrations.Migration): dependencies = [ ('osf', '0035_ensure_licenses'), ] operations = [ migrations.RunPython(add_schemas, remove_schemas), ]
Add migration for ensure schemas
36021ba78d84dbb3aef8ea54369f88f6461eced6
history_rewrite_scripts/config.py
history_rewrite_scripts/config.py
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. AUTOMERGER_NAME = 'Chromium+Blink automerger' AUTOMERGER_EMAIL = '[email protected]' BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git' CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git' # 'ref/in/chromium' -> 'ref/in/blink' BRANCHES_TO_MERGE = [ ('refs/heads/master', 'refs/heads/master'), ('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'), ('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'), ('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'), ] MERGE_MSG = """Merge Chromium + Blink git repositories Chromium SHA1: %(chromium_sha)s Chromium position: %(chromium_branch)s@{#%(chromium_pos)s} Blink SHA1: %(blink_sha)s Blink revision: %(blink_branch)s@%(blink_rev)s BUG=431458 Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s} """
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. AUTOMERGER_NAME = 'Chromium+Blink automerger' AUTOMERGER_EMAIL = '[email protected]' BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git' CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git' # 'ref/in/chromium' -> 'ref/in/blink' BRANCHES_TO_MERGE = [ ('refs/heads/master', 'refs/heads/master'), ('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'), ('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'), ] MERGE_MSG = """Merge Chromium + Blink git repositories Chromium SHA1: %(chromium_sha)s Chromium position: %(chromium_branch)s@{#%(chromium_pos)s} Blink SHA1: %(blink_sha)s Blink revision: %(blink_branch)s@%(blink_rev)s BUG=431458 Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s} """
Switch to 2311 + 2357 branches
Switch to 2311 + 2357 branches
Python
bsd-3-clause
primiano/chrome-blink-automerger
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. AUTOMERGER_NAME = 'Chromium+Blink automerger' AUTOMERGER_EMAIL = '[email protected]' BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git' CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git' # 'ref/in/chromium' -> 'ref/in/blink' BRANCHES_TO_MERGE = [ ('refs/heads/master', 'refs/heads/master'), ('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'), ('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'), ] MERGE_MSG = """Merge Chromium + Blink git repositories Chromium SHA1: %(chromium_sha)s Chromium position: %(chromium_branch)s@{#%(chromium_pos)s} Blink SHA1: %(blink_sha)s Blink revision: %(blink_branch)s@%(blink_rev)s BUG=431458 Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s} """
Switch to 2311 + 2357 branches # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. AUTOMERGER_NAME = 'Chromium+Blink automerger' AUTOMERGER_EMAIL = '[email protected]' BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git' CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git' # 'ref/in/chromium' -> 'ref/in/blink' BRANCHES_TO_MERGE = [ ('refs/heads/master', 'refs/heads/master'), ('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'), ('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'), ('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'), ] MERGE_MSG = """Merge Chromium + Blink git repositories Chromium SHA1: %(chromium_sha)s Chromium position: %(chromium_branch)s@{#%(chromium_pos)s} Blink SHA1: %(blink_sha)s Blink revision: %(blink_branch)s@%(blink_rev)s BUG=431458 Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s} """
87cca84b6750a3176b86df2786a9b78f7647c062
plugins/hello/hello_test.py
plugins/hello/hello_test.py
from p1tr.test import * class HelloTest(PluginTestCase): @test def hello_test(self): for data in self.dummy_data: self.assertEqual(self.plugin.hello(data.server, data.channel, data.nick, data.params), 'Hello, %s!' % data.nick.split('!')[0])
Add test case for hello plugin
Add test case for hello plugin
Python
mit
howard/p1tr-tng,howard/p1tr-tng
from p1tr.test import * class HelloTest(PluginTestCase): @test def hello_test(self): for data in self.dummy_data: self.assertEqual(self.plugin.hello(data.server, data.channel, data.nick, data.params), 'Hello, %s!' % data.nick.split('!')[0])
Add test case for hello plugin
7d862be1aba5a062eeaf54ada9587278e7e93f5b
apps/provider/urls.py
apps/provider/urls.py
from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import patterns, include, url from .views import * urlpatterns = patterns('', url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"), url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"), )
from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import patterns, include, url from .views import * urlpatterns = patterns('', url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"), url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"), url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"), )
Change fhir practitioner url and add organization url
Change fhir practitioner url and add organization url
Python
apache-2.0
TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client
from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import patterns, include, url from .views import * urlpatterns = patterns('', url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"), url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"), url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"), )
Change fhir practitioner url and add organization url from __future__ import absolute_import from __future__ import unicode_literals from django.conf.urls import patterns, include, url from .views import * urlpatterns = patterns('', url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"), url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"), )
155ab92dd2ff4340e4773e22762d52f557b300e8
dividebatur/tests/test_ticket_sort_key.py
dividebatur/tests/test_ticket_sort_key.py
from ..aecdata import ticket_sort_key def apply_ticket_sort(items): return list(sorted(items, key=ticket_sort_key)) def test_a_c_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C']) == ['A', 'B', 'C']) def test_a_c_reversed(): assert(apply_ticket_sort(['C', 'B', 'A']) == ['A', 'B', 'C']) def test_a_c_aa_reversed(): assert(apply_ticket_sort(['AA', 'C', 'B', 'A']) == ['A', 'B', 'C', 'AA']) def test_a_c_aa_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C', 'AA']) == ['A', 'B', 'C', 'AA'])
from ..aecdata.utils import ticket_sort_key def apply_ticket_sort(items): return list(sorted(items, key=ticket_sort_key)) def test_a_c_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C']) == ['A', 'B', 'C']) def test_a_c_reversed(): assert(apply_ticket_sort(['C', 'B', 'A']) == ['A', 'B', 'C']) def test_a_c_aa_reversed(): assert(apply_ticket_sort(['AA', 'C', 'B', 'A']) == ['A', 'B', 'C', 'AA']) def test_a_c_aa_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C', 'AA']) == ['A', 'B', 'C', 'AA'])
Fix import in ticket_sort_key tests.
Fix import in ticket_sort_key tests.
Python
apache-2.0
grahame/dividebatur,grahame/dividebatur,grahame/dividebatur
from ..aecdata.utils import ticket_sort_key def apply_ticket_sort(items): return list(sorted(items, key=ticket_sort_key)) def test_a_c_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C']) == ['A', 'B', 'C']) def test_a_c_reversed(): assert(apply_ticket_sort(['C', 'B', 'A']) == ['A', 'B', 'C']) def test_a_c_aa_reversed(): assert(apply_ticket_sort(['AA', 'C', 'B', 'A']) == ['A', 'B', 'C', 'AA']) def test_a_c_aa_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C', 'AA']) == ['A', 'B', 'C', 'AA'])
Fix import in ticket_sort_key tests. from ..aecdata import ticket_sort_key def apply_ticket_sort(items): return list(sorted(items, key=ticket_sort_key)) def test_a_c_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C']) == ['A', 'B', 'C']) def test_a_c_reversed(): assert(apply_ticket_sort(['C', 'B', 'A']) == ['A', 'B', 'C']) def test_a_c_aa_reversed(): assert(apply_ticket_sort(['AA', 'C', 'B', 'A']) == ['A', 'B', 'C', 'AA']) def test_a_c_aa_already_sorted(): assert(apply_ticket_sort(['A', 'B', 'C', 'AA']) == ['A', 'B', 'C', 'AA'])
1e6f3689a21e12104792236d88e7596cb8397ba5
mezzanine/core/management.py
mezzanine/core/management.py
from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth import models as auth_app from django.db.models.signals import post_syncdb def create_demo_user(app, created_models, verbosity, db, **kwargs): if settings.DEBUG and User in created_models: if verbosity >= 2: print "Creating demo User object" User.objects.create_superuser("demo", "[email protected]", "demo") post_syncdb.connect(create_demo_user, sender=auth_app)
from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth import models as auth_app from django.db.models.signals import post_syncdb def create_demo_user(app, created_models, verbosity, **kwargs): if settings.DEBUG and User in created_models: if verbosity >= 2: print "Creating demo User object" User.objects.create_superuser("demo", "[email protected]", "demo") post_syncdb.connect(create_demo_user, sender=auth_app)
Fix post_syncdb signal for demo user to work with Django 1.1
Fix post_syncdb signal for demo user to work with Django 1.1
Python
bsd-2-clause
nikolas/mezzanine,AlexHill/mezzanine,webounty/mezzanine,douglaskastle/mezzanine,nikolas/mezzanine,christianwgd/mezzanine,jjz/mezzanine,guibernardino/mezzanine,stbarnabas/mezzanine,promil23/mezzanine,spookylukey/mezzanine,ryneeverett/mezzanine,gradel/mezzanine,jerivas/mezzanine,fusionbox/mezzanine,guibernardino/mezzanine,mush42/mezzanine,geodesign/mezzanine,orlenko/sfpirg,tuxinhang1989/mezzanine,viaregio/mezzanine,dsanders11/mezzanine,frankier/mezzanine,vladir/mezzanine,dovydas/mezzanine,Cajoline/mezzanine,ZeroXn/mezzanine,jerivas/mezzanine,molokov/mezzanine,Skytorn86/mezzanine,wbtuomela/mezzanine,SoLoHiC/mezzanine,spookylukey/mezzanine,adrian-the-git/mezzanine,jjz/mezzanine,Cajoline/mezzanine,gbosh/mezzanine,tuxinhang1989/mezzanine,christianwgd/mezzanine,viaregio/mezzanine,ryneeverett/mezzanine,Cajoline/mezzanine,ryneeverett/mezzanine,molokov/mezzanine,ZeroXn/mezzanine,joshcartme/mezzanine,scarcry/snm-mezzanine,wyzex/mezzanine,readevalprint/mezzanine,molokov/mezzanine,biomassives/mezzanine,industrydive/mezzanine,vladir/mezzanine,SoLoHiC/mezzanine,damnfine/mezzanine,Kniyl/mezzanine,frankchin/mezzanine,dovydas/mezzanine,saintbird/mezzanine,eino-makitalo/mezzanine,agepoly/mezzanine,spookylukey/mezzanine,orlenko/sfpirg,eino-makitalo/mezzanine,adrian-the-git/mezzanine,damnfine/mezzanine,viaregio/mezzanine,wyzex/mezzanine,readevalprint/mezzanine,batpad/mezzanine,emile2016/mezzanine,dustinrb/mezzanine,frankchin/mezzanine,Kniyl/mezzanine,wbtuomela/mezzanine,dekomote/mezzanine-modeltranslation-backport,frankier/mezzanine,Kniyl/mezzanine,saintbird/mezzanine,mush42/mezzanine,wyzex/mezzanine,dsanders11/mezzanine,PegasusWang/mezzanine,orlenko/sfpirg,joshcartme/mezzanine,cccs-web/mezzanine,promil23/mezzanine,geodesign/mezzanine,wrwrwr/mezzanine,promil23/mezzanine,theclanks/mezzanine,fusionbox/mezzanine,sjdines/mezzanine,wrwrwr/mezzanine,orlenko/plei,dsanders11/mezzanine,Skytorn86/mezzanine,Cicero-Zhao/mezzanine,readevalprint/mezzanine,wbtuomela/mezzanine,sjuxax/mezzanine,christianwgd/mezzanine,orlenko/plei,tuxinhang1989/mezzanine,stbarnabas/mezzanine,vladir/mezzanine,jjz/mezzanine,batpad/mezzanine,scarcry/snm-mezzanine,stephenmcd/mezzanine,jerivas/mezzanine,sjdines/mezzanine,orlenko/plei,gradel/mezzanine,joshcartme/mezzanine,theclanks/mezzanine,dovydas/mezzanine,saintbird/mezzanine,biomassives/mezzanine,biomassives/mezzanine,dekomote/mezzanine-modeltranslation-backport,Cicero-Zhao/mezzanine,agepoly/mezzanine,adrian-the-git/mezzanine,stephenmcd/mezzanine,PegasusWang/mezzanine,SoLoHiC/mezzanine,sjuxax/mezzanine,PegasusWang/mezzanine,sjdines/mezzanine,frankier/mezzanine,webounty/mezzanine,dustinrb/mezzanine,sjuxax/mezzanine,damnfine/mezzanine,Skytorn86/mezzanine,stephenmcd/mezzanine,theclanks/mezzanine,industrydive/mezzanine,ZeroXn/mezzanine,dekomote/mezzanine-modeltranslation-backport,emile2016/mezzanine,mush42/mezzanine,emile2016/mezzanine,gbosh/mezzanine,AlexHill/mezzanine,gradel/mezzanine,scarcry/snm-mezzanine,eino-makitalo/mezzanine,cccs-web/mezzanine,dustinrb/mezzanine,douglaskastle/mezzanine,gbosh/mezzanine,geodesign/mezzanine,nikolas/mezzanine,douglaskastle/mezzanine,frankchin/mezzanine,industrydive/mezzanine,agepoly/mezzanine,webounty/mezzanine
from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth import models as auth_app from django.db.models.signals import post_syncdb def create_demo_user(app, created_models, verbosity, **kwargs): if settings.DEBUG and User in created_models: if verbosity >= 2: print "Creating demo User object" User.objects.create_superuser("demo", "[email protected]", "demo") post_syncdb.connect(create_demo_user, sender=auth_app)
Fix post_syncdb signal for demo user to work with Django 1.1 from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth import models as auth_app from django.db.models.signals import post_syncdb def create_demo_user(app, created_models, verbosity, db, **kwargs): if settings.DEBUG and User in created_models: if verbosity >= 2: print "Creating demo User object" User.objects.create_superuser("demo", "[email protected]", "demo") post_syncdb.connect(create_demo_user, sender=auth_app)
3bdc7250f7a40ef4b3ad5f431c6b6e3e92ccacc8
app.py
app.py
from flask import Flask, render_template, request, redirect import requests import pandas as pd from datetime import datetime from bokeh.plotting import figure, output_notebook, output_file, save app = Flask(__name__) # @app.route('/') # def main(): # return redirect('/index') @app.route('/', methods=['GET', 'POST']) def index(): if request.method == 'GET': return render_template('index.html') else: pitcher = request.form['pitcher'] image_file = pitcher.lower() image_file = image_file.split() image_file = '_'.join(image_file) + '.png' return render_template('results.html', image_file = image_file) if __name__ == '__main__': app.run(port=33508)
from flask import Flask, render_template, request, redirect import requests import pandas as pd from datetime import datetime from bokeh.plotting import figure, output_notebook, output_file, save app = Flask(__name__) @app.route('/') def main(): return redirect('/index') @app.route('/index', methods=['GET', 'POST']) def index(): if request.method == 'GET': return render_template('index.html') else: pitcher = request.form['pitcher'] image_file = pitcher.lower() image_file = image_file.split() image_file = '_'.join(image_file) + '.png' return render_template('results.html', image_file = image_file) if __name__ == '__main__': app.run(port=33508)
Revert "Remove redirect to avoid Chrome privacy error"
Revert "Remove redirect to avoid Chrome privacy error" This reverts commit e5322958f14b2428b74de726476fd98adae8c454.
Python
mit
gsganden/pitcher-reports,gsganden/pitcher-reports
from flask import Flask, render_template, request, redirect import requests import pandas as pd from datetime import datetime from bokeh.plotting import figure, output_notebook, output_file, save app = Flask(__name__) @app.route('/') def main(): return redirect('/index') @app.route('/index', methods=['GET', 'POST']) def index(): if request.method == 'GET': return render_template('index.html') else: pitcher = request.form['pitcher'] image_file = pitcher.lower() image_file = image_file.split() image_file = '_'.join(image_file) + '.png' return render_template('results.html', image_file = image_file) if __name__ == '__main__': app.run(port=33508)
Revert "Remove redirect to avoid Chrome privacy error" This reverts commit e5322958f14b2428b74de726476fd98adae8c454. from flask import Flask, render_template, request, redirect import requests import pandas as pd from datetime import datetime from bokeh.plotting import figure, output_notebook, output_file, save app = Flask(__name__) # @app.route('/') # def main(): # return redirect('/index') @app.route('/', methods=['GET', 'POST']) def index(): if request.method == 'GET': return render_template('index.html') else: pitcher = request.form['pitcher'] image_file = pitcher.lower() image_file = image_file.split() image_file = '_'.join(image_file) + '.png' return render_template('results.html', image_file = image_file) if __name__ == '__main__': app.run(port=33508)
9bb1aebbfc0ca0ff893bafe99de3c32c2ba99952
tests/test_model.py
tests/test_model.py
from context import models from models import model import unittest class test_logic_core(unittest.TestCase): def setUp(self): self.room = model.Room(20, 'new_room') self.room1 = model.Room(6, 'new_room1') self.livingspace = model.LivingSpace('orange') self.office = model.Office('manjaro') def test_Room_instance(self): self.assertIsInstance(self.room, model.Room) self.assertIsInstance(self.room1, model.Room) def test_Room_max_occupation(self): self.assertEqual(20, self.room.max_occupants) def test_Room_name(self): self.assertEqual('new_room1', self.room1.name) def test_office_ocupants(self): self.assertEqual(6, self.office.max_occupants) def test_livingspace_ocupants(self): self.assertEqual(4, self.livingspace.max_occupants) def test_sublclass_Room(self): self.assertTrue(issubclass(model.Office, model.Room)) self.assertTrue(issubclass(model.LivingSpace, model.Room))
from context import models from models import model import unittest class test_model(unittest.TestCase): def setUp(self): self.room = model.Room(20, 'new_room') self.room1 = model.Room(6, 'new_room1') self.livingspace = model.LivingSpace('orange') self.office = model.Office('manjaro') def test_Room_instance(self): self.assertIsInstance(self.room, model.Room) self.assertIsInstance(self.room1, model.Room) def test_Room_max_occupation(self): self.assertEqual(20, self.room.max_occupants) def test_Room_name(self): self.assertEqual('new_room1', self.room1.name) self.room1.name = "changedname" self.assertEqual('changedname', self.room1.name) def test_office_ocupants(self): self.assertEqual(6, self.office.max_occupants) def test_livingspace_ocupants(self): self.assertEqual(4, self.livingspace.max_occupants) def test_sublclass_Room(self): self.assertTrue(issubclass(model.Office, model.Room)) self.assertTrue(issubclass(model.LivingSpace, model.Room)) def test_room_current_population(self): self.assertEqual(self.room.current_population, 0)
Refactor model test to test added properties
Refactor model test to test added properties
Python
mit
georgreen/Geoogreen-Mamboleo-Dojo-Project
from context import models from models import model import unittest class test_model(unittest.TestCase): def setUp(self): self.room = model.Room(20, 'new_room') self.room1 = model.Room(6, 'new_room1') self.livingspace = model.LivingSpace('orange') self.office = model.Office('manjaro') def test_Room_instance(self): self.assertIsInstance(self.room, model.Room) self.assertIsInstance(self.room1, model.Room) def test_Room_max_occupation(self): self.assertEqual(20, self.room.max_occupants) def test_Room_name(self): self.assertEqual('new_room1', self.room1.name) self.room1.name = "changedname" self.assertEqual('changedname', self.room1.name) def test_office_ocupants(self): self.assertEqual(6, self.office.max_occupants) def test_livingspace_ocupants(self): self.assertEqual(4, self.livingspace.max_occupants) def test_sublclass_Room(self): self.assertTrue(issubclass(model.Office, model.Room)) self.assertTrue(issubclass(model.LivingSpace, model.Room)) def test_room_current_population(self): self.assertEqual(self.room.current_population, 0)
Refactor model test to test added properties from context import models from models import model import unittest class test_logic_core(unittest.TestCase): def setUp(self): self.room = model.Room(20, 'new_room') self.room1 = model.Room(6, 'new_room1') self.livingspace = model.LivingSpace('orange') self.office = model.Office('manjaro') def test_Room_instance(self): self.assertIsInstance(self.room, model.Room) self.assertIsInstance(self.room1, model.Room) def test_Room_max_occupation(self): self.assertEqual(20, self.room.max_occupants) def test_Room_name(self): self.assertEqual('new_room1', self.room1.name) def test_office_ocupants(self): self.assertEqual(6, self.office.max_occupants) def test_livingspace_ocupants(self): self.assertEqual(4, self.livingspace.max_occupants) def test_sublclass_Room(self): self.assertTrue(issubclass(model.Office, model.Room)) self.assertTrue(issubclass(model.LivingSpace, model.Room))
701238e19f4eaa6ce1f1c14e6e56d9544e402ed7
test/test_language.py
test/test_language.py
import unittest from charset_normalizer.normalizer import CharsetNormalizerMatches as CnM from glob import glob from os.path import basename class TestLanguageDetection(unittest.TestCase): SHOULD_BE = { 'sample.1.ar.srt': 'Arabic', 'sample.1.fr.srt': 'French', 'sample.1.gr.srt': 'Greek', 'sample.1.he.srt': 'Hebrew', 'sample.1.hi.srt': 'English', 'sample.1.ru.srt': 'Russian', 'sample.1.tu.srt': 'Turkish', 'sample.2.ar.srt': 'Arabic', 'sample.3.ar.srt': 'Arabic', 'sample.4.ar.srt': 'Arabic', 'sample.5.ar.srt': 'Arabic', 'sample-chinese.txt': 'Classical Chinese', 'sample-greek.txt': 'Greek', 'sample-greek-2.txt': 'Greek', 'sample-hebrew.txt': 'English', 'sample-hebrew-2.txt': 'Hebrew', 'sample-hebrew-3.txt': 'Hebrew', 'sample-russian.txt': 'Russian', 'sample-russian-2.txt': 'Russian', 'sample-turkish.txt': 'Turkish', 'sample-korean.txt': 'Korean', 'sample-spanish.txt': 'Spanish', 'sample-bulgarian.txt': 'Bulgarian', 'sample-english.bom.txt': 'English' } def test_language_detection(self): for path_name in glob('./data/*.srt') + glob('./data/*.txt'): with self.subTest(path_name): r_ = CnM.from_path(path_name).best().first() self.assertEqual( TestLanguageDetection.SHOULD_BE[basename(path_name)], r_.language ) if __name__ == '__main__': unittest.main()
Add test to verify if language was detected properly
Add test to verify if language was detected properly
Python
mit
Ousret/charset_normalizer,ousret/charset_normalizer,Ousret/charset_normalizer,ousret/charset_normalizer
import unittest from charset_normalizer.normalizer import CharsetNormalizerMatches as CnM from glob import glob from os.path import basename class TestLanguageDetection(unittest.TestCase): SHOULD_BE = { 'sample.1.ar.srt': 'Arabic', 'sample.1.fr.srt': 'French', 'sample.1.gr.srt': 'Greek', 'sample.1.he.srt': 'Hebrew', 'sample.1.hi.srt': 'English', 'sample.1.ru.srt': 'Russian', 'sample.1.tu.srt': 'Turkish', 'sample.2.ar.srt': 'Arabic', 'sample.3.ar.srt': 'Arabic', 'sample.4.ar.srt': 'Arabic', 'sample.5.ar.srt': 'Arabic', 'sample-chinese.txt': 'Classical Chinese', 'sample-greek.txt': 'Greek', 'sample-greek-2.txt': 'Greek', 'sample-hebrew.txt': 'English', 'sample-hebrew-2.txt': 'Hebrew', 'sample-hebrew-3.txt': 'Hebrew', 'sample-russian.txt': 'Russian', 'sample-russian-2.txt': 'Russian', 'sample-turkish.txt': 'Turkish', 'sample-korean.txt': 'Korean', 'sample-spanish.txt': 'Spanish', 'sample-bulgarian.txt': 'Bulgarian', 'sample-english.bom.txt': 'English' } def test_language_detection(self): for path_name in glob('./data/*.srt') + glob('./data/*.txt'): with self.subTest(path_name): r_ = CnM.from_path(path_name).best().first() self.assertEqual( TestLanguageDetection.SHOULD_BE[basename(path_name)], r_.language ) if __name__ == '__main__': unittest.main()
Add test to verify if language was detected properly
b2fbb48049abbfff7f1636059f8ad7eda07667c7
test/single_system/all.py
test/single_system/all.py
import sys, unittest import bmc_test import power_test import xmlrunner tests = [] tests.extend(bmc_test.tests) #tests.extend(power_test.tests) if __name__ == '__main__': for test in tests: test.system = sys.argv[1] suite = unittest.TestLoader().loadTestsFromTestCase(test) xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite)
import sys, unittest, os import bmc_test import power_test import xmlrunner tests = [] tests.extend(bmc_test.tests) #tests.extend(power_test.tests) if __name__ == '__main__': for test in tests: test.system = sys.argv[1] suite = unittest.TestLoader().loadTestsFromTestCase(test) result = xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite) if result.failures or result.errors: os.sys.exit(1)
Return a bad error code when a test fails
Return a bad error code when a test fails
Python
bsd-3-clause
Cynerva/pyipmi,emaadmanzoor/pyipmi
import sys, unittest, os import bmc_test import power_test import xmlrunner tests = [] tests.extend(bmc_test.tests) #tests.extend(power_test.tests) if __name__ == '__main__': for test in tests: test.system = sys.argv[1] suite = unittest.TestLoader().loadTestsFromTestCase(test) result = xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite) if result.failures or result.errors: os.sys.exit(1)
Return a bad error code when a test fails import sys, unittest import bmc_test import power_test import xmlrunner tests = [] tests.extend(bmc_test.tests) #tests.extend(power_test.tests) if __name__ == '__main__': for test in tests: test.system = sys.argv[1] suite = unittest.TestLoader().loadTestsFromTestCase(test) xmlrunner.XMLTestRunner(verbose = 1, output='test-reports').run(suite)
3786d778f583f96cb4dce37a175d2c460a020724
cnxauthoring/events.py
cnxauthoring/events.py
# -*- coding: utf-8 -*- # ### # Copyright (c) 2013, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from pyramid.events import NewRequest def add_cors_headers(request, response): settings = request.registry.settings acac = settings['cors.access_control_allow_credentials'] acao = settings['cors.access_control_allow_origin'].split() acah = settings['cors.access_control_allow_headers'] acam = settings['cors.access_control_allow_methods'] if acac: response.headerlist.append( ('Access-Control-Allow-Credentials', acac)) if acao: if request.host in acao: response.headerlist.append( ('Access-Control-Allow-Origin', request.host)) else: response.headerlist.append( ('Access-Control-Allow-Origin', acao[0])) if acah: response.headerlist.append( ('Access-Control-Allow-Headers', acah)) if acam: response.headerlist.append( ('Access-Control-Allow-Methods', acam)) def new_request_subscriber(event): request = event.request request.add_response_callback(add_cors_headers) def main(config): config.add_subscriber(new_request_subscriber, NewRequest)
# -*- coding: utf-8 -*- # ### # Copyright (c) 2013, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from pyramid.events import NewRequest def add_cors_headers(request, response): settings = request.registry.settings acac = settings['cors.access_control_allow_credentials'] acao = settings['cors.access_control_allow_origin'].split() acah = settings['cors.access_control_allow_headers'] acam = settings['cors.access_control_allow_methods'] if acac: response.headerlist.append( ('Access-Control-Allow-Credentials', acac)) if acao: if request.headers.get('Origin') in acao: response.headerlist.append( ('Access-Control-Allow-Origin', request.headers.get('Origin'))) else: response.headerlist.append( ('Access-Control-Allow-Origin', acao[0])) if acah: response.headerlist.append( ('Access-Control-Allow-Headers', acah)) if acam: response.headerlist.append( ('Access-Control-Allow-Methods', acam)) def new_request_subscriber(event): request = event.request request.add_response_callback(add_cors_headers) def main(config): config.add_subscriber(new_request_subscriber, NewRequest)
Fix Access-Control-Allow-Origin to return the request origin
Fix Access-Control-Allow-Origin to return the request origin request.host is the host part of the request url. For example, if webview is trying to access http://localhost:8080/users/profile, request. It's the Origin field in the headers that we should be matching.
Python
agpl-3.0
Connexions/cnx-authoring
# -*- coding: utf-8 -*- # ### # Copyright (c) 2013, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from pyramid.events import NewRequest def add_cors_headers(request, response): settings = request.registry.settings acac = settings['cors.access_control_allow_credentials'] acao = settings['cors.access_control_allow_origin'].split() acah = settings['cors.access_control_allow_headers'] acam = settings['cors.access_control_allow_methods'] if acac: response.headerlist.append( ('Access-Control-Allow-Credentials', acac)) if acao: if request.headers.get('Origin') in acao: response.headerlist.append( ('Access-Control-Allow-Origin', request.headers.get('Origin'))) else: response.headerlist.append( ('Access-Control-Allow-Origin', acao[0])) if acah: response.headerlist.append( ('Access-Control-Allow-Headers', acah)) if acam: response.headerlist.append( ('Access-Control-Allow-Methods', acam)) def new_request_subscriber(event): request = event.request request.add_response_callback(add_cors_headers) def main(config): config.add_subscriber(new_request_subscriber, NewRequest)
Fix Access-Control-Allow-Origin to return the request origin request.host is the host part of the request url. For example, if webview is trying to access http://localhost:8080/users/profile, request. It's the Origin field in the headers that we should be matching. # -*- coding: utf-8 -*- # ### # Copyright (c) 2013, Rice University # This software is subject to the provisions of the GNU Affero General # Public License version 3 (AGPLv3). # See LICENCE.txt for details. # ### from pyramid.events import NewRequest def add_cors_headers(request, response): settings = request.registry.settings acac = settings['cors.access_control_allow_credentials'] acao = settings['cors.access_control_allow_origin'].split() acah = settings['cors.access_control_allow_headers'] acam = settings['cors.access_control_allow_methods'] if acac: response.headerlist.append( ('Access-Control-Allow-Credentials', acac)) if acao: if request.host in acao: response.headerlist.append( ('Access-Control-Allow-Origin', request.host)) else: response.headerlist.append( ('Access-Control-Allow-Origin', acao[0])) if acah: response.headerlist.append( ('Access-Control-Allow-Headers', acah)) if acam: response.headerlist.append( ('Access-Control-Allow-Methods', acam)) def new_request_subscriber(event): request = event.request request.add_response_callback(add_cors_headers) def main(config): config.add_subscriber(new_request_subscriber, NewRequest)
de42731ab97a7d4272c44cc750891906aa5b4417
buildlet/runner/ipythonparallel.py
buildlet/runner/ipythonparallel.py
""" Task runner using IPython parallel interface. See `The IPython task interface`_ and `IPython Documentation`_ in `IPython Documentation`_. .. _The IPython task interface: http://ipython.org/ipython-doc/dev/parallel/parallel_task.html .. _DAG Dependencies: http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html .. _IPython Documentation: http://ipython.org/ipython-doc/dev/ """ import IPython.parallel from .simple import SimpleRunner from .mixinparallel import MixInParallelRunner class IPythonParallelRunner(MixInParallelRunner, SimpleRunner): def submit_tasks(self): self.client = IPython.parallel.Client() self.view = view = self.client.load_balanced_view() self.results = results = {} for node in self.sorted_nodes(): deps = [results[n] for n in self.graph.predecessors(node)] with view.temp_flags(after=deps): results[node] = view.apply_async(self.run_func, self.nodetaskmap[node]) def wait_tasks(self): self.view.wait(self.results.values())
""" Task runner using IPython parallel interface. See `The IPython task interface`_ and `IPython Documentation`_ in `IPython Documentation`_. .. _The IPython task interface: http://ipython.org/ipython-doc/dev/parallel/parallel_task.html .. _DAG Dependencies: http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html .. _IPython Documentation: http://ipython.org/ipython-doc/dev/ """ import IPython.parallel from .simple import SimpleRunner from .mixinparallel import MixInParallelRunner class IPythonParallelRunner(MixInParallelRunner, SimpleRunner): def submit_tasks(self): self.client = IPython.parallel.Client() self.view = view = self.client.load_balanced_view() self.results = results = {} for node in self.sorted_nodes(): deps = [results[n] for n in self.graph.predecessors(node)] with view.temp_flags(after=deps): results[node] = view.apply_async(self.run_func, self.nodetaskmap[node]) def wait_tasks(self): for r in self.results.values(): r.get()
Raise error if any in IPythonParallelRunner.wait_tasks
Raise error if any in IPythonParallelRunner.wait_tasks
Python
bsd-3-clause
tkf/buildlet
""" Task runner using IPython parallel interface. See `The IPython task interface`_ and `IPython Documentation`_ in `IPython Documentation`_. .. _The IPython task interface: http://ipython.org/ipython-doc/dev/parallel/parallel_task.html .. _DAG Dependencies: http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html .. _IPython Documentation: http://ipython.org/ipython-doc/dev/ """ import IPython.parallel from .simple import SimpleRunner from .mixinparallel import MixInParallelRunner class IPythonParallelRunner(MixInParallelRunner, SimpleRunner): def submit_tasks(self): self.client = IPython.parallel.Client() self.view = view = self.client.load_balanced_view() self.results = results = {} for node in self.sorted_nodes(): deps = [results[n] for n in self.graph.predecessors(node)] with view.temp_flags(after=deps): results[node] = view.apply_async(self.run_func, self.nodetaskmap[node]) def wait_tasks(self): for r in self.results.values(): r.get()
Raise error if any in IPythonParallelRunner.wait_tasks """ Task runner using IPython parallel interface. See `The IPython task interface`_ and `IPython Documentation`_ in `IPython Documentation`_. .. _The IPython task interface: http://ipython.org/ipython-doc/dev/parallel/parallel_task.html .. _DAG Dependencies: http://ipython.org/ipython-doc/dev/parallel/dag_dependencies.html .. _IPython Documentation: http://ipython.org/ipython-doc/dev/ """ import IPython.parallel from .simple import SimpleRunner from .mixinparallel import MixInParallelRunner class IPythonParallelRunner(MixInParallelRunner, SimpleRunner): def submit_tasks(self): self.client = IPython.parallel.Client() self.view = view = self.client.load_balanced_view() self.results = results = {} for node in self.sorted_nodes(): deps = [results[n] for n in self.graph.predecessors(node)] with view.temp_flags(after=deps): results[node] = view.apply_async(self.run_func, self.nodetaskmap[node]) def wait_tasks(self): self.view.wait(self.results.values())
20b450c4cd0ff9c57d894fa263056ff4cd2dbf07
vim_turing_machine/machines/merge_business_hours/vim_merge_business_hours.py
vim_turing_machine/machines/merge_business_hours/vim_merge_business_hours.py
from vim_turing_machine.machines.merge_business_hours.merge_business_hours import merge_business_hours_transitions from vim_turing_machine.vim_machine import VimTuringMachine if __name__ == '__main__': merge_business_hours = VimTuringMachine(merge_business_hours_transitions(), debug=True) merge_business_hours.run(initial_tape=sys.argv[1], max_steps=50)
Add a vim version of merge business hours
Add a vim version of merge business hours
Python
mit
ealter/vim_turing_machine,ealter/vim_turing_machine
from vim_turing_machine.machines.merge_business_hours.merge_business_hours import merge_business_hours_transitions from vim_turing_machine.vim_machine import VimTuringMachine if __name__ == '__main__': merge_business_hours = VimTuringMachine(merge_business_hours_transitions(), debug=True) merge_business_hours.run(initial_tape=sys.argv[1], max_steps=50)
Add a vim version of merge business hours
35c44f0f585d11dea632e509b9eec20d4697dc9d
functions/eitu/timeedit_to_csv.py
functions/eitu/timeedit_to_csv.py
import requests import csv import ics_parser URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics' URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics' def fetch_and_parse(url): return ics_parser.parse(requests.get(url).text) # Fetch and parse iCalendar events study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES) activities = fetch_and_parse(URL_ACTIVITIES) events = study_activities + activities # Remove duplicate events events = {e['UID']: e for e in events}.values() # Write csv with open('timeedit.csv', 'w') as csvfile: fieldnames = set() for e in events: fieldnames = fieldnames | set(e.keys()) writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames))) writer.writeheader() for e in events: writer.writerow(e)
import requests import csv from datetime import datetime import ics_parser URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics' URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics' def fetch_and_parse(url): return ics_parser.parse(requests.get(url).text) # Fetch and parse iCalendar events study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES) activities = fetch_and_parse(URL_ACTIVITIES) events = study_activities + activities # Remove duplicates and sort events = {e['UID']: e for e in events}.values() events = sorted(events, key=lambda e: e['DTSTART']) # Write csv with open('timeedit.csv', 'w') as csvfile: fieldnames = set() for e in events: fieldnames = fieldnames | set(e.keys()) writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames))) writer.writeheader() for e in events: for key, value in e.items(): if isinstance(value, datetime): e[key] = value.isoformat() writer.writerow(e)
Sort events by start and iso format datetimes
Sort events by start and iso format datetimes
Python
mit
christianknu/eitu,christianknu/eitu,eitu/eitu,christianknu/eitu,eitu/eitu
import requests import csv from datetime import datetime import ics_parser URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics' URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics' def fetch_and_parse(url): return ics_parser.parse(requests.get(url).text) # Fetch and parse iCalendar events study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES) activities = fetch_and_parse(URL_ACTIVITIES) events = study_activities + activities # Remove duplicates and sort events = {e['UID']: e for e in events}.values() events = sorted(events, key=lambda e: e['DTSTART']) # Write csv with open('timeedit.csv', 'w') as csvfile: fieldnames = set() for e in events: fieldnames = fieldnames | set(e.keys()) writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames))) writer.writeheader() for e in events: for key, value in e.items(): if isinstance(value, datetime): e[key] = value.isoformat() writer.writerow(e)
Sort events by start and iso format datetimes import requests import csv import ics_parser URL_STUDY_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6Q7Z6QQw0Z5gQ9f50on7Xx5YY00ZQ1ZYQycZw.ics' URL_ACTIVITIES = 'https://dk.timeedit.net/web/itu/db1/public/ri6g7058yYQZXxQ5oQgZZ0vZ56Y1Q0f5c0nZQwYQ.ics' def fetch_and_parse(url): return ics_parser.parse(requests.get(url).text) # Fetch and parse iCalendar events study_activities = fetch_and_parse(URL_STUDY_ACTIVITIES) activities = fetch_and_parse(URL_ACTIVITIES) events = study_activities + activities # Remove duplicate events events = {e['UID']: e for e in events}.values() # Write csv with open('timeedit.csv', 'w') as csvfile: fieldnames = set() for e in events: fieldnames = fieldnames | set(e.keys()) writer = csv.DictWriter(csvfile, fieldnames=sorted(list(fieldnames))) writer.writeheader() for e in events: writer.writerow(e)
0fdb93fb73142315fe404b9a161ef19af0d920cd
tests/test_bawlerd.py
tests/test_bawlerd.py
import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config
import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config def test_read_config_files(self): config_base = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'configs') locations = [ os.path.join(config_base, 'etc'), os.path.join(config_base, 'home'), ] config = bawlerd.conf.read_config_files( bawlerd.conf.build_config_location_list(locations=locations)) assert config['common']['listen_timeout'] == 40 assert 'logging' in config
Add simple test for config builder
Add simple test for config builder Signed-off-by: Michal Kuffa <[email protected]>
Python
bsd-3-clause
beezz/pg_bawler,beezz/pg_bawler
import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config def test_read_config_files(self): config_base = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'configs') locations = [ os.path.join(config_base, 'etc'), os.path.join(config_base, 'home'), ] config = bawlerd.conf.read_config_files( bawlerd.conf.build_config_location_list(locations=locations)) assert config['common']['listen_timeout'] == 40 assert 'logging' in config
Add simple test for config builder Signed-off-by: Michal Kuffa <[email protected]> import io import os from textwrap import dedent from pg_bawler import bawlerd class TestBawlerdConfig: def test_build_config_location_list(self): assert not bawlerd.conf.build_config_location_list(locations=()) user_conf = os.path.join( os.path.expanduser('~'), bawlerd.conf.DEFAULT_CONFIG_FILENAME) system_conf = os.path.join( '/etc/pg_bawler', bawlerd.conf.DEFAULT_CONFIG_FILENAME) assert user_conf in bawlerd.conf.build_config_location_list() assert system_conf in bawlerd.conf.build_config_location_list() def test__load_file(self): config = bawlerd.conf._load_file(io.StringIO(dedent("""\ logging: formatters: standard: format: \"%(asctime)s %(levelname)s] %(name)s: %(message)s\" handlers: default: level: "INFO" formatter: standard class: logging.StreamHandler loggers: "": handlers: ["default"] level: INFO propagate: True """))) assert 'logging' in config
33e1c781b0e430cb1e0df19d02ed06a193f9d202
waterbutler/identity.py
waterbutler/identity.py
import asyncio from waterbutler import settings @asyncio.coroutine def fetch_rest_identity(params): response = yield from aiohttp.request( 'get', settings.IDENTITY_API_URL, params=params, headers={'Content-Type': 'application/json'}, ) # TOOD Handle Errors nicely if response.status != 200: data = yield from response.read() raise web.HTTPError(response.status) data = yield from response.json() return data IDENTITY_METHODS = { 'rest': fetch_rest_identity } get_identity = IDENTITY_METHODS[settings.IDENTITY_METHOD]
import asyncio import aiohttp from waterbutler import settings IDENTITY_METHODS = {} def get_identity_func(name): try: return IDENTITY_METHODS[name] except KeyError: raise NotImplementedError('No identity getter for {0}'.format(name)) def register_identity(name): def _register_identity(func): IDENTITY_METHODS[name] = func return func return _register_identity def get_identity(name, **kwargs): return get_identity_func(name)(**kwargs) @register_identity('rest') @asyncio.coroutine def fetch_rest_identity(**params): response = yield from aiohttp.request( 'get', settings.IDENTITY_API_URL, params=params, headers={'Content-Type': 'application/json'}, ) # TOOD Handle Errors nicely if response.status != 200: data = yield from response.read() raise web.HTTPError(response.status) data = yield from response.json() return data
Make use of a register decorator
Make use of a register decorator
Python
apache-2.0
CenterForOpenScience/waterbutler,kwierman/waterbutler,TomBaxter/waterbutler,rafaeldelucena/waterbutler,Ghalko/waterbutler,RCOSDP/waterbutler,hmoco/waterbutler,felliott/waterbutler,rdhyee/waterbutler,Johnetordoff/waterbutler,icereval/waterbutler,chrisseto/waterbutler,cosenal/waterbutler
import asyncio import aiohttp from waterbutler import settings IDENTITY_METHODS = {} def get_identity_func(name): try: return IDENTITY_METHODS[name] except KeyError: raise NotImplementedError('No identity getter for {0}'.format(name)) def register_identity(name): def _register_identity(func): IDENTITY_METHODS[name] = func return func return _register_identity def get_identity(name, **kwargs): return get_identity_func(name)(**kwargs) @register_identity('rest') @asyncio.coroutine def fetch_rest_identity(**params): response = yield from aiohttp.request( 'get', settings.IDENTITY_API_URL, params=params, headers={'Content-Type': 'application/json'}, ) # TOOD Handle Errors nicely if response.status != 200: data = yield from response.read() raise web.HTTPError(response.status) data = yield from response.json() return data
Make use of a register decorator import asyncio from waterbutler import settings @asyncio.coroutine def fetch_rest_identity(params): response = yield from aiohttp.request( 'get', settings.IDENTITY_API_URL, params=params, headers={'Content-Type': 'application/json'}, ) # TOOD Handle Errors nicely if response.status != 200: data = yield from response.read() raise web.HTTPError(response.status) data = yield from response.json() return data IDENTITY_METHODS = { 'rest': fetch_rest_identity } get_identity = IDENTITY_METHODS[settings.IDENTITY_METHOD]
256dc6da740050f71615f00924cd85346aaa1e99
rotational-cipher/rotational_cipher.py
rotational-cipher/rotational_cipher.py
import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): rules = shift_rules(n) return "".join(map(lambda k: rules.get(k, k), s)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)}
import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): rules = shift_rules(n) return "".join(rules.get(ch, ch) for ch in s) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)}
Use a comprehension instead of a lambda function
Use a comprehension instead of a lambda function
Python
agpl-3.0
CubicComet/exercism-python-solutions
import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): rules = shift_rules(n) return "".join(rules.get(ch, ch) for ch in s) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)}
Use a comprehension instead of a lambda function import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): rules = shift_rules(n) return "".join(map(lambda k: rules.get(k, k), s)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)}
6547d653491adb6ab46e4a3a5f8251129719d3f7
login/middleware.py
login/middleware.py
from django.conf import settings from django.http import HttpResponseRedirect DETACH_PATH = '/user/detach' ACTIVATE_PATH = '/user/activate' class DetachMiddleware(object): def process_request(self, request): if not request.path == '/login/' \ and not request.path.startswith('/api'): if not request.user.is_anonymous \ and not request.user.is_native \ and not request.path == DETACH_PATH: return HttpResponseRedirect(DETACH_PATH) if not request.user.is_anonymous \ and not request.user.is_mail_verified \ and not request.path.startswith(ACTIVATE_PATH): return HttpResponseRedirect(ACTIVATE_PATH)
from django.conf import settings from django.http import HttpResponseRedirect DETACH_PATH = '/user/detach' ACTIVATE_PATH = '/user/activate' class DetachMiddleware(object): def process_request(self, request): if not request.path == '/login/' \ and not request.path.startswith('/api') \ and not request.user.is_anonymous: if not request.user.is_native: if not request.path == DETACH_PATH: return HttpResponseRedirect(DETACH_PATH) elif not request.user.is_mail_verified \ and not request.path.startswith(ACTIVATE_PATH): return HttpResponseRedirect(ACTIVATE_PATH)
Remove infinite loop if user is neither native nor verified
Remove infinite loop if user is neither native nor verified
Python
agpl-3.0
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
from django.conf import settings from django.http import HttpResponseRedirect DETACH_PATH = '/user/detach' ACTIVATE_PATH = '/user/activate' class DetachMiddleware(object): def process_request(self, request): if not request.path == '/login/' \ and not request.path.startswith('/api') \ and not request.user.is_anonymous: if not request.user.is_native: if not request.path == DETACH_PATH: return HttpResponseRedirect(DETACH_PATH) elif not request.user.is_mail_verified \ and not request.path.startswith(ACTIVATE_PATH): return HttpResponseRedirect(ACTIVATE_PATH)
Remove infinite loop if user is neither native nor verified from django.conf import settings from django.http import HttpResponseRedirect DETACH_PATH = '/user/detach' ACTIVATE_PATH = '/user/activate' class DetachMiddleware(object): def process_request(self, request): if not request.path == '/login/' \ and not request.path.startswith('/api'): if not request.user.is_anonymous \ and not request.user.is_native \ and not request.path == DETACH_PATH: return HttpResponseRedirect(DETACH_PATH) if not request.user.is_anonymous \ and not request.user.is_mail_verified \ and not request.path.startswith(ACTIVATE_PATH): return HttpResponseRedirect(ACTIVATE_PATH)
95529efca6a2e3c3544aeb306aaf62a02f2f5408
primes.py
primes.py
import sys Max=int(sys.argv[1]) # get Max from command line args P = {x: True for x in range(2,Max)} # first assume numbers are prime for i in range(2, int(Max** (0.5))): # until square root of Max if P[i]: # for j in range(i*i, Max, i): # mark all multiples of a prime P[j]=False # as not beeing a prime numprimes = 0; # Count all primes for i,isprime in P.items(): if isprime: numprimes=numprimes+1 print(numprimes) # print number of primes
import array import math import sys n = int(sys.argv[1]) nums = array.array('i', [False] * 2 + [True] * (n - 2)) upper_lim = int(math.sqrt(n)) i = 2 while i <= upper_lim: if nums[i]: m = i**2 while m < n: nums[m] = False m += i i += 1 print(len([x for x in nums if nums]))
Make Python code equivalent to Ruby
Make Python code equivalent to Ruby Using a dictionary instead is really unfair. Small variation: m must not be equal to n. Not sure how the algorithm is meant is exactly...
Python
mit
oliworx/chartbench,oliworx/chartbench,oliworx/chartbench,oliworx/chartbench,oliworx/chartbench
import array import math import sys n = int(sys.argv[1]) nums = array.array('i', [False] * 2 + [True] * (n - 2)) upper_lim = int(math.sqrt(n)) i = 2 while i <= upper_lim: if nums[i]: m = i**2 while m < n: nums[m] = False m += i i += 1 print(len([x for x in nums if nums]))
Make Python code equivalent to Ruby Using a dictionary instead is really unfair. Small variation: m must not be equal to n. Not sure how the algorithm is meant is exactly... import sys Max=int(sys.argv[1]) # get Max from command line args P = {x: True for x in range(2,Max)} # first assume numbers are prime for i in range(2, int(Max** (0.5))): # until square root of Max if P[i]: # for j in range(i*i, Max, i): # mark all multiples of a prime P[j]=False # as not beeing a prime numprimes = 0; # Count all primes for i,isprime in P.items(): if isprime: numprimes=numprimes+1 print(numprimes) # print number of primes
93b1f8e67b1154fd595a938ca41877eb76c7e995
lcd.py
lcd.py
from telnetlib import Telnet import time tn = Telnet('192.168.1.15', 13666, None) #tn.interact() tn.write("hello\n") tn.write("screen_add s1\n") tn.write("screen_set s1 -priority 1\n") tn.write("widget_add s1 w1 string\n") tn.write("widget_add s1 w2 string\n") tn.write("widget_set s1 w1 1 1 {It is a truth u}\n") tn.write("widget_set s1 w2 1 2 {niversally ackno}\n") print "sleeping" time.sleep(5) def lcd_string(x, telnet_obj, delay=5): L = [] for i in range(len(x)): if i % (15+16) == 0: L.append(x[i:i+15+16]) for s in L: s1 = s[0:15] s2 = s[15:] telnet_obj.write("widget_set s1 w1 1 1 {" + s1 + "}\n") telnet_obj.write("widget_set s1 w2 1 2 {" + s2 + "}\n") time.sleep(delay) lcd_string('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz', tn)
#!/usr/bin/env python from telnetlib import Telnet import time import sys tn = Telnet('192.168.1.15', 13666, None) pipe_contents = sys.stdin.read() pipe_contents = pipe_contents.replace('\n', ' ') tn.write("hello\n") tn.write("screen_add s1\n") tn.write("screen_set s1 -priority 1\n") tn.write("widget_add s1 w1 string\n") tn.write("widget_add s1 w2 string\n") def lcd_string(x, telnet_obj, delay=2): L = [] for i in range(len(x)): if i % (15+16) == 0: L.append(x[i:i+15+16]) for s in L: s1 = s[0:15] s2 = s[15:] telnet_obj.write("widget_set s1 w1 1 1 {" + s1 + "}\n") telnet_obj.write("widget_set s1 w2 1 2 {" + s2 + "}\n") time.sleep(delay) lcd_string(pipe_contents, tn)
Read standard input instead of hard-coded strings.
Read standard input instead of hard-coded strings.
Python
mit
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
#!/usr/bin/env python from telnetlib import Telnet import time import sys tn = Telnet('192.168.1.15', 13666, None) pipe_contents = sys.stdin.read() pipe_contents = pipe_contents.replace('\n', ' ') tn.write("hello\n") tn.write("screen_add s1\n") tn.write("screen_set s1 -priority 1\n") tn.write("widget_add s1 w1 string\n") tn.write("widget_add s1 w2 string\n") def lcd_string(x, telnet_obj, delay=2): L = [] for i in range(len(x)): if i % (15+16) == 0: L.append(x[i:i+15+16]) for s in L: s1 = s[0:15] s2 = s[15:] telnet_obj.write("widget_set s1 w1 1 1 {" + s1 + "}\n") telnet_obj.write("widget_set s1 w2 1 2 {" + s2 + "}\n") time.sleep(delay) lcd_string(pipe_contents, tn)
Read standard input instead of hard-coded strings. from telnetlib import Telnet import time tn = Telnet('192.168.1.15', 13666, None) #tn.interact() tn.write("hello\n") tn.write("screen_add s1\n") tn.write("screen_set s1 -priority 1\n") tn.write("widget_add s1 w1 string\n") tn.write("widget_add s1 w2 string\n") tn.write("widget_set s1 w1 1 1 {It is a truth u}\n") tn.write("widget_set s1 w2 1 2 {niversally ackno}\n") print "sleeping" time.sleep(5) def lcd_string(x, telnet_obj, delay=5): L = [] for i in range(len(x)): if i % (15+16) == 0: L.append(x[i:i+15+16]) for s in L: s1 = s[0:15] s2 = s[15:] telnet_obj.write("widget_set s1 w1 1 1 {" + s1 + "}\n") telnet_obj.write("widget_set s1 w2 1 2 {" + s2 + "}\n") time.sleep(delay) lcd_string('abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz', tn)
52c5f4ddfde8db6179f11c3bec2bc8be69eed238
flake8_docstrings.py
flake8_docstrings.py
# -*- coding: utf-8 -*- """pep257 docstrings convention needs error code and class parser for be included as module into flake8 """ import pep257 __version__ = '0.2.1.post1' class pep257Checker(object): """flake8 needs a class to check python file.""" name = 'pep257' version = __version__ def __init__(self, tree, filename='(none)', builtins=None): self.tree = tree self.filename = filename def run(self): """Use directly check() api from pep257.""" for error in pep257.check([self.filename]): # Ignore AllError, Environment error. if isinstance(error, pep257.Error): yield (error.line, 0, error.message, type(self))
# -*- coding: utf-8 -*- """pep257 docstrings convention needs error code and class parser for be included as module into flake8 """ import io import pep8 import pep257 __version__ = '0.2.1.post1' class pep257Checker(object): """flake8 needs a class to check python file.""" name = 'pep257' version = __version__ STDIN_NAMES = set(['stdin', '-', '(none)', None]) def __init__(self, tree, filename='(none)', builtins=None): self.tree = tree self.filename = filename self.source = self.load_source() self.checker = pep257.PEP257Checker() def run(self): """Use directly check() api from pep257.""" for error in self.checker.check_source(self.source, self.filename): # Ignore AllError, Environment error. if isinstance(error, pep257.Error): yield (error.line, 0, error.message, type(self)) def load_source(self): if self.filename in self.STDIN_NAMES: self.filename = 'stdin' self.source = pep8.stdin_get_value() else: with io.open(self.filename, encoding='utf-8') as fd: self.source = fd.read()
Handle stdin in the plugin
Handle stdin in the plugin Closes #2
Python
mit
PyCQA/flake8-docstrings
# -*- coding: utf-8 -*- """pep257 docstrings convention needs error code and class parser for be included as module into flake8 """ import io import pep8 import pep257 __version__ = '0.2.1.post1' class pep257Checker(object): """flake8 needs a class to check python file.""" name = 'pep257' version = __version__ STDIN_NAMES = set(['stdin', '-', '(none)', None]) def __init__(self, tree, filename='(none)', builtins=None): self.tree = tree self.filename = filename self.source = self.load_source() self.checker = pep257.PEP257Checker() def run(self): """Use directly check() api from pep257.""" for error in self.checker.check_source(self.source, self.filename): # Ignore AllError, Environment error. if isinstance(error, pep257.Error): yield (error.line, 0, error.message, type(self)) def load_source(self): if self.filename in self.STDIN_NAMES: self.filename = 'stdin' self.source = pep8.stdin_get_value() else: with io.open(self.filename, encoding='utf-8') as fd: self.source = fd.read()
Handle stdin in the plugin Closes #2 # -*- coding: utf-8 -*- """pep257 docstrings convention needs error code and class parser for be included as module into flake8 """ import pep257 __version__ = '0.2.1.post1' class pep257Checker(object): """flake8 needs a class to check python file.""" name = 'pep257' version = __version__ def __init__(self, tree, filename='(none)', builtins=None): self.tree = tree self.filename = filename def run(self): """Use directly check() api from pep257.""" for error in pep257.check([self.filename]): # Ignore AllError, Environment error. if isinstance(error, pep257.Error): yield (error.line, 0, error.message, type(self))
638f6fb659792ec69b9df25391001241d12c39bd
src/python/grpcio_tests/tests_aio/unit/init_test.py
src/python/grpcio_tests/tests_aio/unit/init_test.py
# Copyright 2019 The gRPC Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import unittest import grpc from tests_aio.unit._test_base import AioTestBase class TestInit(AioTestBase): async def test_grpc_dot_aio(self): channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) unittest.main(verbosity=2)
# Copyright 2019 The gRPC Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import unittest from tests_aio.unit._test_base import AioTestBase class TestInit(AioTestBase): async def test_grpc(self): import grpc # pylint: disable=wrong-import-position channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) async def test_grpc_dot_aio(self): import grpc.aio # pylint: disable=wrong-import-position channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) async def test_aio_from_grpc(self): from grpc import aio # pylint: disable=wrong-import-position channel = aio.insecure_channel('dummy') self.assertIsInstance(channel, aio.Channel) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) unittest.main(verbosity=2)
Expand alternatives to import aio module
Expand alternatives to import aio module
Python
apache-2.0
donnadionne/grpc,nicolasnoble/grpc,jtattermusch/grpc,vjpai/grpc,stanley-cheung/grpc,donnadionne/grpc,donnadionne/grpc,donnadionne/grpc,ejona86/grpc,stanley-cheung/grpc,stanley-cheung/grpc,nicolasnoble/grpc,stanley-cheung/grpc,jtattermusch/grpc,stanley-cheung/grpc,ejona86/grpc,stanley-cheung/grpc,ctiller/grpc,vjpai/grpc,stanley-cheung/grpc,nicolasnoble/grpc,grpc/grpc,grpc/grpc,stanley-cheung/grpc,ejona86/grpc,ejona86/grpc,ctiller/grpc,donnadionne/grpc,vjpai/grpc,vjpai/grpc,nicolasnoble/grpc,nicolasnoble/grpc,ejona86/grpc,donnadionne/grpc,jtattermusch/grpc,vjpai/grpc,grpc/grpc,grpc/grpc,donnadionne/grpc,vjpai/grpc,vjpai/grpc,ejona86/grpc,ctiller/grpc,jtattermusch/grpc,grpc/grpc,jtattermusch/grpc,grpc/grpc,donnadionne/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,ejona86/grpc,donnadionne/grpc,ctiller/grpc,nicolasnoble/grpc,nicolasnoble/grpc,grpc/grpc,donnadionne/grpc,donnadionne/grpc,nicolasnoble/grpc,ejona86/grpc,ejona86/grpc,ctiller/grpc,vjpai/grpc,jtattermusch/grpc,jtattermusch/grpc,ctiller/grpc,grpc/grpc,donnadionne/grpc,jtattermusch/grpc,stanley-cheung/grpc,ctiller/grpc,grpc/grpc,ctiller/grpc,ejona86/grpc,vjpai/grpc,grpc/grpc,stanley-cheung/grpc,ctiller/grpc,vjpai/grpc,ejona86/grpc,nicolasnoble/grpc,stanley-cheung/grpc,vjpai/grpc,nicolasnoble/grpc,ctiller/grpc,grpc/grpc,ctiller/grpc,vjpai/grpc,nicolasnoble/grpc,jtattermusch/grpc,nicolasnoble/grpc,jtattermusch/grpc,ejona86/grpc,grpc/grpc,jtattermusch/grpc
# Copyright 2019 The gRPC Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import unittest from tests_aio.unit._test_base import AioTestBase class TestInit(AioTestBase): async def test_grpc(self): import grpc # pylint: disable=wrong-import-position channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) async def test_grpc_dot_aio(self): import grpc.aio # pylint: disable=wrong-import-position channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) async def test_aio_from_grpc(self): from grpc import aio # pylint: disable=wrong-import-position channel = aio.insecure_channel('dummy') self.assertIsInstance(channel, aio.Channel) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) unittest.main(verbosity=2)
Expand alternatives to import aio module # Copyright 2019 The gRPC Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import unittest import grpc from tests_aio.unit._test_base import AioTestBase class TestInit(AioTestBase): async def test_grpc_dot_aio(self): channel = grpc.aio.insecure_channel('dummy') self.assertIsInstance(channel, grpc.aio.Channel) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) unittest.main(verbosity=2)
20a92ff1ffe143193d95235c7a5ea8e9edb0df64
yowsup/layers/protocol_acks/protocolentities/ack_outgoing.py
yowsup/layers/protocol_acks/protocolentities/ack_outgoing.py
from yowsup.structs import ProtocolEntity, ProtocolTreeNode from .ack import AckProtocolEntity class OutgoingAckProtocolEntity(AckProtocolEntity): ''' <ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}"> </ack> ''' def __init__(self, _id, _class, _type, _to): super(OutgoingAckProtocolEntity, self).__init__(_id, _class) self.setOutgoingData(_type, _to) def setOutgoingData(self, _type, _to): self._type = _type self._to = _to def toProtocolTreeNode(self): node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode() if self._type: node.setAttribute("type", self._type) node.setAttribute("to", self._to) return node def __str__(self): out = super(OutgoingAckProtocolEntity, self).__str__() out += "Type: %s\n" % self._type out += "To: %s\n" % self._to return out @staticmethod def fromProtocolTreeNode(node): entity = AckProtocolEntity.fromProtocolTreeNode(node) entity.__class__ = OutgoingAckProtocolEntity entity.setOutgoingData( node.getAttributeValue("type"), node.getAttributeValue("to") ) return entity
from yowsup.structs import ProtocolEntity, ProtocolTreeNode from .ack import AckProtocolEntity class OutgoingAckProtocolEntity(AckProtocolEntity): ''' <ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}"> </ack> <ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}"> </ack> ''' def __init__(self, _id, _class, _type, _to, _participant = None): super(OutgoingAckProtocolEntity, self).__init__(_id, _class) self.setOutgoingData(_type, _to, _participant) def setOutgoingData(self, _type, _to, _participant): self._type = _type self._to = _to self._participant = _participant def toProtocolTreeNode(self): node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode() if self._type: node.setAttribute("type", self._type) node.setAttribute("to", self._to) if self._participant: node.setAttribute("participant", self._participant) return node def __str__(self): out = super(OutgoingAckProtocolEntity, self).__str__() out += "Type: %s\n" % self._type out += "To: %s\n" % self._to if self._participant: out += "Participant: %s\n" % self._participant return out @staticmethod def fromProtocolTreeNode(node): entity = AckProtocolEntity.fromProtocolTreeNode(node) entity.__class__ = OutgoingAckProtocolEntity entity.setOutgoingData( node.getAttributeValue("type"), node.getAttributeValue("to"), node.getAttributeValue("participant") ) return entity
Include participant in outgoing ack
Include participant in outgoing ack
Python
mit
ongair/yowsup,biji/yowsup
from yowsup.structs import ProtocolEntity, ProtocolTreeNode from .ack import AckProtocolEntity class OutgoingAckProtocolEntity(AckProtocolEntity): ''' <ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}"> </ack> <ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}"> </ack> ''' def __init__(self, _id, _class, _type, _to, _participant = None): super(OutgoingAckProtocolEntity, self).__init__(_id, _class) self.setOutgoingData(_type, _to, _participant) def setOutgoingData(self, _type, _to, _participant): self._type = _type self._to = _to self._participant = _participant def toProtocolTreeNode(self): node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode() if self._type: node.setAttribute("type", self._type) node.setAttribute("to", self._to) if self._participant: node.setAttribute("participant", self._participant) return node def __str__(self): out = super(OutgoingAckProtocolEntity, self).__str__() out += "Type: %s\n" % self._type out += "To: %s\n" % self._to if self._participant: out += "Participant: %s\n" % self._participant return out @staticmethod def fromProtocolTreeNode(node): entity = AckProtocolEntity.fromProtocolTreeNode(node) entity.__class__ = OutgoingAckProtocolEntity entity.setOutgoingData( node.getAttributeValue("type"), node.getAttributeValue("to"), node.getAttributeValue("participant") ) return entity
Include participant in outgoing ack from yowsup.structs import ProtocolEntity, ProtocolTreeNode from .ack import AckProtocolEntity class OutgoingAckProtocolEntity(AckProtocolEntity): ''' <ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}"> </ack> ''' def __init__(self, _id, _class, _type, _to): super(OutgoingAckProtocolEntity, self).__init__(_id, _class) self.setOutgoingData(_type, _to) def setOutgoingData(self, _type, _to): self._type = _type self._to = _to def toProtocolTreeNode(self): node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode() if self._type: node.setAttribute("type", self._type) node.setAttribute("to", self._to) return node def __str__(self): out = super(OutgoingAckProtocolEntity, self).__str__() out += "Type: %s\n" % self._type out += "To: %s\n" % self._to return out @staticmethod def fromProtocolTreeNode(node): entity = AckProtocolEntity.fromProtocolTreeNode(node) entity.__class__ = OutgoingAckProtocolEntity entity.setOutgoingData( node.getAttributeValue("type"), node.getAttributeValue("to") ) return entity
ea17a76c4ada65dac9e909b930c938a24ddb99b2
tests/formatter/test_csver.py
tests/formatter/test_csver.py
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect) def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
Fix no header test for csv formatter
Fix no header test for csv formatter
Python
mit
eiri/echolalia-prototype
import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect) def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
Fix no header test for csv formatter import unittest, argparse from echolalia.formatter.csver import Formatter class CsverTestCase(unittest.TestCase): def setUp(self): self.parser = argparse.ArgumentParser() self.data = [{'char': chr(i), 'order': i - 96} for i in xrange(97, 100)] self.formatter = Formatter() def test_add_args(self): new_parser = self.formatter.add_args(self.parser) self.assertEqual(new_parser, self.parser) args = new_parser.parse_args(['--with_header']) self.assertTrue(args.with_header) args = new_parser.parse_args([]) self.assertFalse(args.with_header) def test_marshall_no_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args([]) result = self.formatter.marshall(args, self.data) expect = "a,1\r\nb,2\r\nc,3\r\n" def test_marshall_with_header(self): new_parser = self.formatter.add_args(self.parser) args = new_parser.parse_args(['--with_header']) result = self.formatter.marshall(args, self.data) expect = "char,order\r\na,1\r\nb,2\r\nc,3\r\n" self.assertEqual(result, expect)
3b467abc665a1807d8a1adbba1be78d40f77b4ce
tests/unit/dataactcore/factories/job.py
tests/unit/dataactcore/factories/job.py
import factory from factory import fuzzy from datetime import date, datetime, timezone from dataactcore.models import jobModels class SubmissionFactory(factory.Factory): class Meta: model = jobModels.Submission submission_id = None datetime_utc = fuzzy.FuzzyDateTime( datetime(2010, 1, 1, tzinfo=timezone.utc)) user_id = fuzzy.FuzzyInteger(1, 9999) cgac_code = fuzzy.FuzzyText() reporting_start_date = fuzzy.FuzzyDate(date(2010, 1, 1)) reporting_end_date = fuzzy.FuzzyDate(date(2010, 1, 1)) reporting_fiscal_year = fuzzy.FuzzyInteger(2010, 2040) reporting_fiscal_period = fuzzy.FuzzyInteger(1, 4) is_quarter_format = False publishable = False number_of_errors = 0 number_of_warnings = 0
Add factory for Submission model
Add factory for Submission model
Python
cc0-1.0
chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend
import factory from factory import fuzzy from datetime import date, datetime, timezone from dataactcore.models import jobModels class SubmissionFactory(factory.Factory): class Meta: model = jobModels.Submission submission_id = None datetime_utc = fuzzy.FuzzyDateTime( datetime(2010, 1, 1, tzinfo=timezone.utc)) user_id = fuzzy.FuzzyInteger(1, 9999) cgac_code = fuzzy.FuzzyText() reporting_start_date = fuzzy.FuzzyDate(date(2010, 1, 1)) reporting_end_date = fuzzy.FuzzyDate(date(2010, 1, 1)) reporting_fiscal_year = fuzzy.FuzzyInteger(2010, 2040) reporting_fiscal_period = fuzzy.FuzzyInteger(1, 4) is_quarter_format = False publishable = False number_of_errors = 0 number_of_warnings = 0
Add factory for Submission model
05f0969ee8b9374c2fe5bce2c753fb4619432f0d
tests/integration/runners/jobs.py
tests/integration/runners/jobs.py
# -*- coding: utf-8 -*- ''' Tests for the salt-run command ''' # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration class ManageTest(integration.ShellCase): ''' Test the manage runner ''' def test_active(self): ''' jobs.active ''' ret = self.run_run_plus('jobs.active') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], ['{}']) def test_lookup_jid(self): ''' jobs.lookup_jid ''' ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], []) def test_list_jobs(self): ''' jobs.list_jobs ''' ret = self.run_run_plus('jobs.list_jobs') self.assertIsInstance(ret['fun'], dict) if __name__ == '__main__': from integration import run_tests run_tests(ManageTest)
# -*- coding: utf-8 -*- ''' Tests for the salt-run command ''' # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration class ManageTest(integration.ShellCase): ''' Test the manage runner ''' def test_active(self): ''' jobs.active ''' ret = self.run_run_plus('jobs.active') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], []) def test_lookup_jid(self): ''' jobs.lookup_jid ''' ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], []) def test_list_jobs(self): ''' jobs.list_jobs ''' ret = self.run_run_plus('jobs.list_jobs') self.assertIsInstance(ret['fun'], dict) if __name__ == '__main__': from integration import run_tests run_tests(ManageTest)
Fix the output now that we are using the default output (nested) instead of hard coding it to yaml
Fix the output now that we are using the default output (nested) instead of hard coding it to yaml
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# -*- coding: utf-8 -*- ''' Tests for the salt-run command ''' # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration class ManageTest(integration.ShellCase): ''' Test the manage runner ''' def test_active(self): ''' jobs.active ''' ret = self.run_run_plus('jobs.active') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], []) def test_lookup_jid(self): ''' jobs.lookup_jid ''' ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], []) def test_list_jobs(self): ''' jobs.list_jobs ''' ret = self.run_run_plus('jobs.list_jobs') self.assertIsInstance(ret['fun'], dict) if __name__ == '__main__': from integration import run_tests run_tests(ManageTest)
Fix the output now that we are using the default output (nested) instead of hard coding it to yaml # -*- coding: utf-8 -*- ''' Tests for the salt-run command ''' # Import Salt Testing libs from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs import integration class ManageTest(integration.ShellCase): ''' Test the manage runner ''' def test_active(self): ''' jobs.active ''' ret = self.run_run_plus('jobs.active') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], ['{}']) def test_lookup_jid(self): ''' jobs.lookup_jid ''' ret = self.run_run_plus('jobs.lookup_jid', '', '23974239742394') self.assertEqual(ret['fun'], {}) self.assertEqual(ret['out'], []) def test_list_jobs(self): ''' jobs.list_jobs ''' ret = self.run_run_plus('jobs.list_jobs') self.assertIsInstance(ret['fun'], dict) if __name__ == '__main__': from integration import run_tests run_tests(ManageTest)
0177066012b3373753cba8baf86f00a365d7147b
findaconf/tests/config.py
findaconf/tests/config.py
# coding: utf-8 from decouple import config from findaconf.tests.fake_data import fake_conference, seed def set_app(app, db=False): unset_app(db) app.config['TESTING'] = True app.config['WTF_CSRF_ENABLED'] = False if db: app.config['SQLALCHEMY_DATABASE_URI'] = config( 'DATABASE_URL_TEST', default='sqlite:///' + app.config['BASEDIR'].child('findaconf', 'tests', 'tests.db') ) test_app = app.test_client() if db: db.create_all() seed(app, db) [db.session.add(fake_conference(db)) for i in range(1, 43)] db.session.commit() return test_app def unset_app(db=False): if db: db.session.remove() db.drop_all()
# coding: utf-8 from decouple import config from findaconf.tests.fake_data import fake_conference, seed def set_app(app, db=False): # set test vars app.config['TESTING'] = True app.config['WTF_CSRF_ENABLED'] = False # set test db if db: app.config['SQLALCHEMY_DATABASE_URI'] = config( 'DATABASE_URL_TEST', default='sqlite:///' + app.config['BASEDIR'].child('findaconf', 'tests', 'tests.db') ) # create test app test_app = app.test_client() # create and feed db tables if db: # start from a clean db db.session.remove() db.drop_all() # create tables and feed them db.create_all() seed(app, db) [db.session.add(fake_conference(db)) for i in range(1, 43)] db.session.commit() # return test app return test_app def unset_app(db=False): if db: db.session.remove() db.drop_all()
Fix bug that used dev db instead of test db
Fix bug that used dev db instead of test db
Python
mit
cuducos/findaconf,koorukuroo/findaconf,cuducos/findaconf,koorukuroo/findaconf,koorukuroo/findaconf,cuducos/findaconf
# coding: utf-8 from decouple import config from findaconf.tests.fake_data import fake_conference, seed def set_app(app, db=False): # set test vars app.config['TESTING'] = True app.config['WTF_CSRF_ENABLED'] = False # set test db if db: app.config['SQLALCHEMY_DATABASE_URI'] = config( 'DATABASE_URL_TEST', default='sqlite:///' + app.config['BASEDIR'].child('findaconf', 'tests', 'tests.db') ) # create test app test_app = app.test_client() # create and feed db tables if db: # start from a clean db db.session.remove() db.drop_all() # create tables and feed them db.create_all() seed(app, db) [db.session.add(fake_conference(db)) for i in range(1, 43)] db.session.commit() # return test app return test_app def unset_app(db=False): if db: db.session.remove() db.drop_all()
Fix bug that used dev db instead of test db # coding: utf-8 from decouple import config from findaconf.tests.fake_data import fake_conference, seed def set_app(app, db=False): unset_app(db) app.config['TESTING'] = True app.config['WTF_CSRF_ENABLED'] = False if db: app.config['SQLALCHEMY_DATABASE_URI'] = config( 'DATABASE_URL_TEST', default='sqlite:///' + app.config['BASEDIR'].child('findaconf', 'tests', 'tests.db') ) test_app = app.test_client() if db: db.create_all() seed(app, db) [db.session.add(fake_conference(db)) for i in range(1, 43)] db.session.commit() return test_app def unset_app(db=False): if db: db.session.remove() db.drop_all()
d666c5c818fbfc00f642cfeb24cb90aab94035cd
keyring/devpi_client.py
keyring/devpi_client.py
import contextlib import functools import pluggy import keyring from keyring.errors import KeyringError hookimpl = pluggy.HookimplMarker("devpiclient") # https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205 suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {}) def restore_signature(func): # workaround for pytest-dev/pluggy#358 @functools.wraps(func) def wrapper(url, username): return func(url, username) return wrapper @hookimpl() @restore_signature @suppress(KeyringError) def devpiclient_get_password(url, username): """ >>> pluggy._hooks.varnames(devpiclient_get_password) (('url', 'username'), ()) >>> """ return keyring.get_password(url, username)
import contextlib import functools import pluggy import keyring.errors hookimpl = pluggy.HookimplMarker("devpiclient") # https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205 suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {}) def restore_signature(func): # workaround for pytest-dev/pluggy#358 @functools.wraps(func) def wrapper(url, username): return func(url, username) return wrapper @hookimpl() @restore_signature @suppress(keyring.errors.KeyringError) def devpiclient_get_password(url, username): """ >>> pluggy._hooks.varnames(devpiclient_get_password) (('url', 'username'), ()) >>> """ return keyring.get_password(url, username)
Remove superfluous import by using the exception from the namespace.
Remove superfluous import by using the exception from the namespace.
Python
mit
jaraco/keyring
import contextlib import functools import pluggy import keyring.errors hookimpl = pluggy.HookimplMarker("devpiclient") # https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205 suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {}) def restore_signature(func): # workaround for pytest-dev/pluggy#358 @functools.wraps(func) def wrapper(url, username): return func(url, username) return wrapper @hookimpl() @restore_signature @suppress(keyring.errors.KeyringError) def devpiclient_get_password(url, username): """ >>> pluggy._hooks.varnames(devpiclient_get_password) (('url', 'username'), ()) >>> """ return keyring.get_password(url, username)
Remove superfluous import by using the exception from the namespace. import contextlib import functools import pluggy import keyring from keyring.errors import KeyringError hookimpl = pluggy.HookimplMarker("devpiclient") # https://github.com/jaraco/jaraco.context/blob/c3a9b739/jaraco/context.py#L205 suppress = type('suppress', (contextlib.suppress, contextlib.ContextDecorator), {}) def restore_signature(func): # workaround for pytest-dev/pluggy#358 @functools.wraps(func) def wrapper(url, username): return func(url, username) return wrapper @hookimpl() @restore_signature @suppress(KeyringError) def devpiclient_get_password(url, username): """ >>> pluggy._hooks.varnames(devpiclient_get_password) (('url', 'username'), ()) >>> """ return keyring.get_password(url, username)
06b9982ea716daa627a0beb700721c7ca53601fd
run.py
run.py
#!/usr/bin/env python if __name__ == '__main__': import os import sys if sys.version_info[0:2] < (3, 4): raise SystemExit('python 3.4+ is required') root_path = os.path.abspath(os.path.dirname(__file__)) try: import mtp_common # NB: this version does not need to be updated unless mtp_common changes significantly if mtp_common.VERSION < (10,): raise ImportError except ImportError: try: import pkg_resources except ImportError: raise SystemExit('setuptools and pip are required') try: pip = pkg_resources.load_entry_point('pip', 'console_scripts', 'pip') except pkg_resources.ResolutionError: raise SystemExit('setuptools and pip are required') print('Pre-installing MTP-common and base requirements') pip(['install', '--requirement', f'{root_path}/requirements/base.txt']) from mtp_common.build_tasks.executor import Executor import mtp_transaction_uploader.build_tasks # noqa exit(Executor(root_path=root_path).run())
#!/usr/bin/env python if __name__ == '__main__': import os import sys if sys.version_info[0:2] < (3, 6): raise SystemExit('Python 3.6+ is required') root_path = os.path.abspath(os.path.dirname(__file__)) try: import mtp_common # NB: this version does not need to be updated unless mtp_common changes significantly if mtp_common.VERSION < (10,): raise ImportError except ImportError: try: import pkg_resources except ImportError: raise SystemExit('setuptools and pip are required') try: pip = pkg_resources.load_entry_point('pip', 'console_scripts', 'pip') except pkg_resources.ResolutionError: raise SystemExit('setuptools and pip are required') print('Pre-installing MTP-common and base requirements') pip(['install', '--requirement', f'{root_path}/requirements/base.txt']) from mtp_common.build_tasks.executor import Executor import mtp_transaction_uploader.build_tasks # noqa exit(Executor(root_path=root_path).run())
Support only python versions 3.6+ explicitly …which has been the assumption for a while as 3.6 features are already in use and base docker images use 3.6.
Support only python versions 3.6+ explicitly …which has been the assumption for a while as 3.6 features are already in use and base docker images use 3.6.
Python
mit
ministryofjustice/money-to-prisoners-transaction-uploader
#!/usr/bin/env python if __name__ == '__main__': import os import sys if sys.version_info[0:2] < (3, 6): raise SystemExit('Python 3.6+ is required') root_path = os.path.abspath(os.path.dirname(__file__)) try: import mtp_common # NB: this version does not need to be updated unless mtp_common changes significantly if mtp_common.VERSION < (10,): raise ImportError except ImportError: try: import pkg_resources except ImportError: raise SystemExit('setuptools and pip are required') try: pip = pkg_resources.load_entry_point('pip', 'console_scripts', 'pip') except pkg_resources.ResolutionError: raise SystemExit('setuptools and pip are required') print('Pre-installing MTP-common and base requirements') pip(['install', '--requirement', f'{root_path}/requirements/base.txt']) from mtp_common.build_tasks.executor import Executor import mtp_transaction_uploader.build_tasks # noqa exit(Executor(root_path=root_path).run())
Support only python versions 3.6+ explicitly …which has been the assumption for a while as 3.6 features are already in use and base docker images use 3.6. #!/usr/bin/env python if __name__ == '__main__': import os import sys if sys.version_info[0:2] < (3, 4): raise SystemExit('python 3.4+ is required') root_path = os.path.abspath(os.path.dirname(__file__)) try: import mtp_common # NB: this version does not need to be updated unless mtp_common changes significantly if mtp_common.VERSION < (10,): raise ImportError except ImportError: try: import pkg_resources except ImportError: raise SystemExit('setuptools and pip are required') try: pip = pkg_resources.load_entry_point('pip', 'console_scripts', 'pip') except pkg_resources.ResolutionError: raise SystemExit('setuptools and pip are required') print('Pre-installing MTP-common and base requirements') pip(['install', '--requirement', f'{root_path}/requirements/base.txt']) from mtp_common.build_tasks.executor import Executor import mtp_transaction_uploader.build_tasks # noqa exit(Executor(root_path=root_path).run())
20654d833deb332dbbe683e6d4e38cef1cc58dd3
webcomix/tests/test_comic_availability.py
webcomix/tests/test_comic_availability.py
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages)
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
Refactor comic availability test to reflect changes to Comic class
Refactor comic availability test to reflect changes to Comic class
Python
mit
J-CPelletier/WebComicToCBZ,J-CPelletier/webcomix,J-CPelletier/webcomix
import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): comic = Comic(comic_name, *comic_info) first_pages = comic.verify_xpath() check_first_pages(first_pages)
Refactor comic availability test to reflect changes to Comic class import pytest from webcomix.comic import Comic from webcomix.supported_comics import supported_comics from webcomix.util import check_first_pages @pytest.mark.slow def test_supported_comics(): for comic_name, comic_info in supported_comics.items(): first_pages = Comic.verify_xpath(*comic_info) check_first_pages(first_pages)
52da8be7ffe6ea2ba09acf3ce44b9a79758b115b
glance/version.py
glance/version.py
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version version_info = pbr.version.VersionInfo('glance')
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. GLANCE_VENDOR = "OpenStack Foundation" GLANCE_PRODUCT = "OpenStack Glance" GLANCE_PACKAGE = None # OS distro package version suffix loaded = False class VersionInfo(object): release = "REDHATGLANCERELEASE" version = "REDHATGLANCEVERSION" def version_string(self): return self.version def cached_version_string(self): return self.version def release_string(self): return self.release def canonical_version_string(self): return self.version def version_string_with_vcs(self): return self.release version_info = VersionInfo()
Remove runtime dep on python pbr
Remove runtime dep on python pbr
Python
apache-2.0
redhat-openstack/glance,redhat-openstack/glance
# Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. GLANCE_VENDOR = "OpenStack Foundation" GLANCE_PRODUCT = "OpenStack Glance" GLANCE_PACKAGE = None # OS distro package version suffix loaded = False class VersionInfo(object): release = "REDHATGLANCERELEASE" version = "REDHATGLANCEVERSION" def version_string(self): return self.version def cached_version_string(self): return self.version def release_string(self): return self.release def canonical_version_string(self): return self.version def version_string_with_vcs(self): return self.release version_info = VersionInfo()
Remove runtime dep on python pbr # Copyright 2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version version_info = pbr.version.VersionInfo('glance')
9261db252969c69ede633d4a4c02bb87c7bc1434
quilt/__init__.py
quilt/__init__.py
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80: # python-quilt - A Python implementation of the quilt patch system # # Copyright (C) 2012 Björn Ricks <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA __version_info__ = ("0", "1", "dev1") __version__ = '.'.join(__version_info__)
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80: # python-quilt - A Python implementation of the quilt patch system # # Copyright (C) 2012 Björn Ricks <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """ A python implementation of quilt """ __version_info__ = ("0", "1", "dev1") __version__ = '.'.join(__version_info__)
Add docstring for main module
Add docstring for main module
Python
mit
bjoernricks/python-quilt,vadmium/python-quilt
# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80: # python-quilt - A Python implementation of the quilt patch system # # Copyright (C) 2012 Björn Ricks <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA """ A python implementation of quilt """ __version_info__ = ("0", "1", "dev1") __version__ = '.'.join(__version_info__)
Add docstring for main module # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80: # python-quilt - A Python implementation of the quilt patch system # # Copyright (C) 2012 Björn Ricks <[email protected]> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301 USA __version_info__ = ("0", "1", "dev1") __version__ = '.'.join(__version_info__)
dfa76a4ad4a15e4068135b5f82ef5a00763c4b57
open_humans/models.py
open_humans/models.py
from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver class Profile(models.Model): user = models.OneToOneField(User) about_me = models.TextField() @receiver(post_save, sender=User, dispatch_uid='create_profile') def cb_create_profile(sender, instance, created, raw, **kwargs): """ Create an account for the newly created user. """ # If we're loading a user via a fixture then `raw` will be true and in that # case we won't want to create a Profile to go with it if created and not raw: Profile.objects.create(user=instance)
Add Profile model and post-save hook
Add Profile model and post-save hook
Python
mit
OpenHumans/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans
from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver class Profile(models.Model): user = models.OneToOneField(User) about_me = models.TextField() @receiver(post_save, sender=User, dispatch_uid='create_profile') def cb_create_profile(sender, instance, created, raw, **kwargs): """ Create an account for the newly created user. """ # If we're loading a user via a fixture then `raw` will be true and in that # case we won't want to create a Profile to go with it if created and not raw: Profile.objects.create(user=instance)
Add Profile model and post-save hook
1b921e83d000d024e38b0d7f81984b699cb49fac
fmriprep/cli/sample_openfmri_tasks_list.py
fmriprep/cli/sample_openfmri_tasks_list.py
# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ A tool to generate a tasks_list.sh file for running fmriprep on subjects downloaded with datalad with sample_openfmri.py """ import os import glob CMDLINE = """\ {fmriprep_cmd} {bids_dir}/{dataset_dir} {dataset_dir}/out/ participant \ -w {dataset_dir}/work --participant_label {participant_label} \ --mem-mb 96000 --nthreads 68 --omp-nthreads 12\ """ def get_parser(): """Build parser object""" from argparse import ArgumentParser from argparse import RawTextHelpFormatter parser = ArgumentParser( description='OpenfMRI participants sampler, for FMRIPREP\'s testing purposes', formatter_class=RawTextHelpFormatter) parser.add_argument('openfmri_dir', action='store', help='the root folder of a the openfmri dataset') parser.add_argument('sample_file', action='store', help='a YAML file containing the subsample schedule') # optional arguments parser.add_argument('--anat-only', action='store_true', default=False, help='run only anatomical workflow') parser.add_argument('-o', '--output-file', default='tasks_list.sh', action='store', help='write output file') parser.add_argument('--cmd-call', action='store', help='command to be run') return parser def main(): """Entry point""" import yaml opts = get_parser().parse_args() with open(opts.sample_file) as sfh: sampledict = yaml.load(sfh) cmdline = CMDLINE if opts.anat_only: cmdline += ' --anat-only' fmriprep_cmd = 'fmriprep' if opts.cmd_call is None: singularity_dir = os.getenv('SINGULARITY_BIN') singularity_img = sorted( glob.glob(os.path.join(singularity_dir, 'poldracklab_fmriprep_1*'))) if singularity_img: fmriprep_cmd = 'singularity run %s' % singularity_img[-1] task_cmds = [] for dset, sublist in sampledict.items(): os.mkdir(dset) for sub in sublist: cmd = cmdline.format( fmriprep_cmd=fmriprep_cmd, bids_dir=opts.openfmri_dir, dataset_dir=dset, participant_label=sub, ) task_cmds.append(cmd) with open(opts.output_file, 'w') as tlfile: tlfile.write('\n'.join(task_cmds)) if __name__ == '__main__': main()
Add simple script to write tasks_list file
[skip ci] Add simple script to write tasks_list file
Python
bsd-3-clause
poldracklab/fmriprep,poldracklab/preprocessing-workflow,oesteban/preprocessing-workflow,poldracklab/fmriprep,poldracklab/fmriprep,oesteban/fmriprep,oesteban/fmriprep,oesteban/fmriprep,poldracklab/preprocessing-workflow,oesteban/preprocessing-workflow
# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ A tool to generate a tasks_list.sh file for running fmriprep on subjects downloaded with datalad with sample_openfmri.py """ import os import glob CMDLINE = """\ {fmriprep_cmd} {bids_dir}/{dataset_dir} {dataset_dir}/out/ participant \ -w {dataset_dir}/work --participant_label {participant_label} \ --mem-mb 96000 --nthreads 68 --omp-nthreads 12\ """ def get_parser(): """Build parser object""" from argparse import ArgumentParser from argparse import RawTextHelpFormatter parser = ArgumentParser( description='OpenfMRI participants sampler, for FMRIPREP\'s testing purposes', formatter_class=RawTextHelpFormatter) parser.add_argument('openfmri_dir', action='store', help='the root folder of a the openfmri dataset') parser.add_argument('sample_file', action='store', help='a YAML file containing the subsample schedule') # optional arguments parser.add_argument('--anat-only', action='store_true', default=False, help='run only anatomical workflow') parser.add_argument('-o', '--output-file', default='tasks_list.sh', action='store', help='write output file') parser.add_argument('--cmd-call', action='store', help='command to be run') return parser def main(): """Entry point""" import yaml opts = get_parser().parse_args() with open(opts.sample_file) as sfh: sampledict = yaml.load(sfh) cmdline = CMDLINE if opts.anat_only: cmdline += ' --anat-only' fmriprep_cmd = 'fmriprep' if opts.cmd_call is None: singularity_dir = os.getenv('SINGULARITY_BIN') singularity_img = sorted( glob.glob(os.path.join(singularity_dir, 'poldracklab_fmriprep_1*'))) if singularity_img: fmriprep_cmd = 'singularity run %s' % singularity_img[-1] task_cmds = [] for dset, sublist in sampledict.items(): os.mkdir(dset) for sub in sublist: cmd = cmdline.format( fmriprep_cmd=fmriprep_cmd, bids_dir=opts.openfmri_dir, dataset_dir=dset, participant_label=sub, ) task_cmds.append(cmd) with open(opts.output_file, 'w') as tlfile: tlfile.write('\n'.join(task_cmds)) if __name__ == '__main__': main()
[skip ci] Add simple script to write tasks_list file
7865d7a37562be8b0af9b3668043d8c08138814b
examples/get_each_args.py
examples/get_each_args.py
#! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
Add clint to import paths
Add clint to import paths
Python
isc
kennethreitz/clint
#! /usr/bin/env python # -*- coding: utf-8 -*- import sys import os sys.path.insert(0, os.path.abspath('..')) from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
Add clint to import paths #! /usr/bin/env python # -*- coding: utf-8 -*- from clint.arguments import Args from clint.textui import puts, colored all_args = Args().grouped for item in all_args: if item is not '_': puts(colored.red("key:%s"%item)) print(all_args[item].all)
8773f652a1cf78299e3dd5ba9296ca2a50143caa
aiopg/__init__.py
aiopg/__init__.py
import re import sys from collections import namedtuple from .connection import connect, Connection from .cursor import Cursor from .pool import create_pool, Pool __all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool', 'version', 'version_info') __version__ = '0.3.2' version = __version__ + ' , Python ' + sys.version VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial') def _parse_version(ver): RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.' '(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$') match = re.match(RE, ver) try: major = int(match.group('major')) minor = int(match.group('minor')) micro = int(match.group('micro')) levels = {'rc': 'candidate', 'a': 'alpha', 'b': 'beta', None: 'final'} releaselevel = levels[match.group('releaselevel')] serial = int(match.group('serial')) if match.group('serial') else 0 return VersionInfo(major, minor, micro, releaselevel, serial) except Exception: raise ImportError("Invalid package version {}".format(ver)) version_info = _parse_version(__version__) # make pyflakes happy (connect, create_pool, Connection, Cursor, Pool)
import re import sys from collections import namedtuple from .connection import connect, Connection from .cursor import Cursor from .pool import create_pool, Pool __all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool', 'version', 'version_info') __version__ = '0.4.0a0' version = __version__ + ' , Python ' + sys.version VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial') def _parse_version(ver): RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.' '(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$') match = re.match(RE, ver) try: major = int(match.group('major')) minor = int(match.group('minor')) micro = int(match.group('micro')) levels = {'rc': 'candidate', 'a': 'alpha', 'b': 'beta', None: 'final'} releaselevel = levels[match.group('releaselevel')] serial = int(match.group('serial')) if match.group('serial') else 0 return VersionInfo(major, minor, micro, releaselevel, serial) except Exception: raise ImportError("Invalid package version {}".format(ver)) version_info = _parse_version(__version__) # make pyflakes happy (connect, create_pool, Connection, Cursor, Pool)
Revert master version to 0.4.0a0
Revert master version to 0.4.0a0
Python
bsd-2-clause
eirnym/aiopg,graingert/aiopg,hyzhak/aiopg,aio-libs/aiopg,luhn/aiopg,nerandell/aiopg
import re import sys from collections import namedtuple from .connection import connect, Connection from .cursor import Cursor from .pool import create_pool, Pool __all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool', 'version', 'version_info') __version__ = '0.4.0a0' version = __version__ + ' , Python ' + sys.version VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial') def _parse_version(ver): RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.' '(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$') match = re.match(RE, ver) try: major = int(match.group('major')) minor = int(match.group('minor')) micro = int(match.group('micro')) levels = {'rc': 'candidate', 'a': 'alpha', 'b': 'beta', None: 'final'} releaselevel = levels[match.group('releaselevel')] serial = int(match.group('serial')) if match.group('serial') else 0 return VersionInfo(major, minor, micro, releaselevel, serial) except Exception: raise ImportError("Invalid package version {}".format(ver)) version_info = _parse_version(__version__) # make pyflakes happy (connect, create_pool, Connection, Cursor, Pool)
Revert master version to 0.4.0a0 import re import sys from collections import namedtuple from .connection import connect, Connection from .cursor import Cursor from .pool import create_pool, Pool __all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool', 'version', 'version_info') __version__ = '0.3.2' version = __version__ + ' , Python ' + sys.version VersionInfo = namedtuple('VersionInfo', 'major minor micro releaselevel serial') def _parse_version(ver): RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.' '(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$') match = re.match(RE, ver) try: major = int(match.group('major')) minor = int(match.group('minor')) micro = int(match.group('micro')) levels = {'rc': 'candidate', 'a': 'alpha', 'b': 'beta', None: 'final'} releaselevel = levels[match.group('releaselevel')] serial = int(match.group('serial')) if match.group('serial') else 0 return VersionInfo(major, minor, micro, releaselevel, serial) except Exception: raise ImportError("Invalid package version {}".format(ver)) version_info = _parse_version(__version__) # make pyflakes happy (connect, create_pool, Connection, Cursor, Pool)
eacfca844e5ab590acfcd193e2ca1fa379e10009
alg_strongly_connected_components.py
alg_strongly_connected_components.py
from __future__ import absolute_import from __future__ import print_function from __future__ import division def strongly_connected_components(): """Strongly connected components for graph. Procedure: - Call (Depth First Search) DFS on graph G to compute finish times for each vertex. - Compute the transpose graph G^T of graph G. - Call DFS on G^T, but in the main loop of DFS, feed the vertex in the decreasing order of finish times. - Outpu the vertices of each tree in the DFS forest as separate strongly connected components. """ pass def main(): # 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}. adj_dict = { 'A': ['B'], 'B': ['C', 'E'], 'C': ['C', 'F'], 'D': ['B', 'G'], 'E': ['A', 'D'], 'F': ['H'], 'G': ['E'], 'H': ['I'], 'I': ['F'] } strongly_connected_components(adj_dict) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import print_function from __future__ import division def _previsit(): pass def _postvisit(): pass def dfs(): pass def _transpose_graph(): pass def _inverse_postvisit_vertex(): pass def strongly_connected_components(): """Strongly connected components for graph. Procedure: - Call (Depth First Search) DFS on graph G to compute finish times for each vertex. - Compute the transpose graph G^T of graph G. - Call DFS on G^T, but in the main loop of DFS, feed the vertex in the decreasing order of postvisit times. - Outpu the vertices of each tree in the DFS forest as separate strongly connected components. """ pass def main(): # 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}. adj_dict = { 'A': ['B'], 'B': ['C', 'E'], 'C': ['C', 'F'], 'D': ['B', 'G'], 'E': ['A', 'D'], 'F': ['H'], 'G': ['E'], 'H': ['I'], 'I': ['F'] } strongly_connected_components(adj_dict) if __name__ == '__main__': main()
Add strongly connected components's methods
Add strongly connected components's methods
Python
bsd-2-clause
bowen0701/algorithms_data_structures
from __future__ import absolute_import from __future__ import print_function from __future__ import division def _previsit(): pass def _postvisit(): pass def dfs(): pass def _transpose_graph(): pass def _inverse_postvisit_vertex(): pass def strongly_connected_components(): """Strongly connected components for graph. Procedure: - Call (Depth First Search) DFS on graph G to compute finish times for each vertex. - Compute the transpose graph G^T of graph G. - Call DFS on G^T, but in the main loop of DFS, feed the vertex in the decreasing order of postvisit times. - Outpu the vertices of each tree in the DFS forest as separate strongly connected components. """ pass def main(): # 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}. adj_dict = { 'A': ['B'], 'B': ['C', 'E'], 'C': ['C', 'F'], 'D': ['B', 'G'], 'E': ['A', 'D'], 'F': ['H'], 'G': ['E'], 'H': ['I'], 'I': ['F'] } strongly_connected_components(adj_dict) if __name__ == '__main__': main()
Add strongly connected components's methods from __future__ import absolute_import from __future__ import print_function from __future__ import division def strongly_connected_components(): """Strongly connected components for graph. Procedure: - Call (Depth First Search) DFS on graph G to compute finish times for each vertex. - Compute the transpose graph G^T of graph G. - Call DFS on G^T, but in the main loop of DFS, feed the vertex in the decreasing order of finish times. - Outpu the vertices of each tree in the DFS forest as separate strongly connected components. """ pass def main(): # 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}. adj_dict = { 'A': ['B'], 'B': ['C', 'E'], 'C': ['C', 'F'], 'D': ['B', 'G'], 'E': ['A', 'D'], 'F': ['H'], 'G': ['E'], 'H': ['I'], 'I': ['F'] } strongly_connected_components(adj_dict) if __name__ == '__main__': main()
a15e363718ab41c5e02b9eaa919fb689cd266af6
nose2/tests/_common.py
nose2/tests/_common.py
"""Common functionality.""" import os.path import tempfile import shutil import sys class TestCase(unittest2.TestCase): """TestCase extension. If the class variable _RUN_IN_TEMP is True (default: False), tests will be performed in a temporary directory, which is deleted afterwards. """ _RUN_IN_TEMP = False def setUp(self): super(TestCase, self).setUp() if self._RUN_IN_TEMP: self.__orig_dir = os.getcwd() work_dir = self.__work_dir = tempfile.mkdtemp() os.chdir(self.__work_dir) # Make sure it's possible to import modules from current directory sys.path.insert(0, work_dir) def tearDown(self): super(TestCase, self).tearDown() if self._RUN_IN_TEMP: os.chdir(self.__orig_dir) shutil.rmtree(self.__work_dir, ignore_errors=True) class _FakeEventBase(object): """Baseclass for fake Events."""
Add common module for our tests
Add common module for our tests
Python
bsd-2-clause
ptthiem/nose2,ojengwa/nose2,ezigman/nose2,ojengwa/nose2,leth/nose2,ezigman/nose2,ptthiem/nose2,little-dude/nose2,leth/nose2,little-dude/nose2
"""Common functionality.""" import os.path import tempfile import shutil import sys class TestCase(unittest2.TestCase): """TestCase extension. If the class variable _RUN_IN_TEMP is True (default: False), tests will be performed in a temporary directory, which is deleted afterwards. """ _RUN_IN_TEMP = False def setUp(self): super(TestCase, self).setUp() if self._RUN_IN_TEMP: self.__orig_dir = os.getcwd() work_dir = self.__work_dir = tempfile.mkdtemp() os.chdir(self.__work_dir) # Make sure it's possible to import modules from current directory sys.path.insert(0, work_dir) def tearDown(self): super(TestCase, self).tearDown() if self._RUN_IN_TEMP: os.chdir(self.__orig_dir) shutil.rmtree(self.__work_dir, ignore_errors=True) class _FakeEventBase(object): """Baseclass for fake Events."""
Add common module for our tests
8a573dae750b1b9415df0c9e2c019750171e66f0
migrations.py
migrations.py
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print '{} -> {}'.format(oldpath, newpath) print old_json print new_json
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print old_json print new_json print '{} -> {}'.format(oldpath, newpath)
Move json print methods into if statement
Move json print methods into if statement
Python
apache-2.0
erinspace/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,fabianvf/scrapi,fabianvf/scrapi,ostwald/scrapi,mehanig/scrapi,alexgarciac/scrapi,jeffreyliu3230/scrapi,felliott/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print old_json print new_json print '{} -> {}'.format(oldpath, newpath)
Move json print methods into if statement import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print '{} -> {}'.format(oldpath, newpath) print old_json print new_json
84c4097caf0db678859252c58c1822d12d11c924
polly/plugins/publish/upload_avalon_asset.py
polly/plugins/publish/upload_avalon_asset.py
from pyblish import api from avalon.api import Session class UploadAvalonAsset(api.InstancePlugin): """Write to files and metadata This plug-in exposes your data to others by encapsulating it into a new version. """ label = "Upload" order = api.IntegratorOrder + 0.1 depends = ["IntegrateAvalonAsset"] optional = True active = bool(Session.get("AVALON_UPLOAD")) families = [ "mindbender.model", "mindbender.rig", "mindbender.animation", "mindbender.lookdev", "mindbender.historyLookdev", "mindbender.group", "mindbender.imagesequence", ] def process(self, instance): from avalon import api from avalon.vendor import requests # Dependencies AVALON_LOCATION = api.Session["AVALON_LOCATION"] AVALON_USERNAME = api.Session["AVALON_USERNAME"] AVALON_PASSWORD = api.Session["AVALON_PASSWORD"] for src in instance.data["output"]: assert src.startswith(api.registered_root()), ( "Output didn't reside on root, this is a bug" ) dst = src.replace( api.registered_root(), AVALON_LOCATION + "/upload" ).replace("\\", "/") self.log.info("Uploading %s -> %s" % (src, dst)) auth = requests.auth.HTTPBasicAuth( AVALON_USERNAME, AVALON_PASSWORD ) with open(src) as f: response = requests.put( dst, data=f, auth=auth, headers={"Content-Type": "application/octet-stream"} ) if not response.ok: raise Exception(response.text)
Implement automatic upload, enabled via AVALON_UPLOAD
Implement automatic upload, enabled via AVALON_UPLOAD
Python
mit
mindbender-studio/config
from pyblish import api from avalon.api import Session class UploadAvalonAsset(api.InstancePlugin): """Write to files and metadata This plug-in exposes your data to others by encapsulating it into a new version. """ label = "Upload" order = api.IntegratorOrder + 0.1 depends = ["IntegrateAvalonAsset"] optional = True active = bool(Session.get("AVALON_UPLOAD")) families = [ "mindbender.model", "mindbender.rig", "mindbender.animation", "mindbender.lookdev", "mindbender.historyLookdev", "mindbender.group", "mindbender.imagesequence", ] def process(self, instance): from avalon import api from avalon.vendor import requests # Dependencies AVALON_LOCATION = api.Session["AVALON_LOCATION"] AVALON_USERNAME = api.Session["AVALON_USERNAME"] AVALON_PASSWORD = api.Session["AVALON_PASSWORD"] for src in instance.data["output"]: assert src.startswith(api.registered_root()), ( "Output didn't reside on root, this is a bug" ) dst = src.replace( api.registered_root(), AVALON_LOCATION + "/upload" ).replace("\\", "/") self.log.info("Uploading %s -> %s" % (src, dst)) auth = requests.auth.HTTPBasicAuth( AVALON_USERNAME, AVALON_PASSWORD ) with open(src) as f: response = requests.put( dst, data=f, auth=auth, headers={"Content-Type": "application/octet-stream"} ) if not response.ok: raise Exception(response.text)
Implement automatic upload, enabled via AVALON_UPLOAD
d70360601669f9e58072cd121de79896690471fd
buildlet/datastore/tests/test_inmemory.py
buildlet/datastore/tests/test_inmemory.py
import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory
import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory, DataStoreNestableInMemoryAutoValue) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory class TestDataStoreNestableInMemoryAutoValue(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemoryAutoValue
Fix and add tests for datastore.inmemory
Fix and add tests for datastore.inmemory
Python
bsd-3-clause
tkf/buildlet
import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory, DataStoreNestableInMemoryAutoValue) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory class TestDataStoreNestableInMemoryAutoValue(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemoryAutoValue
Fix and add tests for datastore.inmemory import unittest from ..inmemory import ( DataValueInMemory, DataStreamInMemory, DataStoreNestableInMemory) from .mixintestcase import ( MixInValueTestCase, MixInStreamTestCase, MixInNestableAutoValueTestCase) class TestDataValueInMemory(MixInValueTestCase, unittest.TestCase): dstype = DataValueInMemory def test_set_get_singleton(self): obj = object() self.ds.set(obj) self.assertTrue(self.ds.get() is obj) class TestDataStreamInMemory(MixInStreamTestCase, unittest.TestCase): dstype = DataStreamInMemory class TestDataStoreNestableInMemory(MixInNestableAutoValueTestCase, unittest.TestCase): dstype = DataStoreNestableInMemory
5e3f3b83974c4826cddcfdb73f2d4eb4abe2aca1
examples/test_download_files.py
examples/test_download_files.py
from seleniumbase import BaseCase class DownloadTests(BaseCase): def test_download_files(self): self.open("https://pypi.org/project/seleniumbase/#files") pkg_header = self.get_text("h1.package-header__name") pkg_name = pkg_header.replace(" ", "-") whl_file = pkg_name + "-py2.py3-none-any.whl" self.click('div#files a[href$="%s"]' % whl_file) self.assert_downloaded_file(whl_file) tar_gz_file = pkg_name + ".tar.gz" self.click('div#files a[href$="%s"]' % tar_gz_file) self.assert_downloaded_file(tar_gz_file)
Add test for asserting downloaded files
Add test for asserting downloaded files
Python
mit
seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
from seleniumbase import BaseCase class DownloadTests(BaseCase): def test_download_files(self): self.open("https://pypi.org/project/seleniumbase/#files") pkg_header = self.get_text("h1.package-header__name") pkg_name = pkg_header.replace(" ", "-") whl_file = pkg_name + "-py2.py3-none-any.whl" self.click('div#files a[href$="%s"]' % whl_file) self.assert_downloaded_file(whl_file) tar_gz_file = pkg_name + ".tar.gz" self.click('div#files a[href$="%s"]' % tar_gz_file) self.assert_downloaded_file(tar_gz_file)
Add test for asserting downloaded files
391c1681eaeabfdbe65a64a1bb8b05beca30141e
wqflask/utility/db_tools.py
wqflask/utility/db_tools.py
from MySQLdb import escape_string as escape def create_in_clause(items): """Create an in clause for mysql""" in_clause = ', '.join("'{}'".format(x) for x in mescape(*items)) in_clause = '( {} )'.format(in_clause) return in_clause def mescape(*items): """Multiple escape""" escaped = [escape(str(item)) for item in items] #print("escaped is:", escaped) return escaped
from MySQLdb import escape_string as escape_ def create_in_clause(items): """Create an in clause for mysql""" in_clause = ', '.join("'{}'".format(x) for x in mescape(*items)) in_clause = '( {} )'.format(in_clause) return in_clause def mescape(*items): """Multiple escape""" return [escape_(str(item)).decode('utf8') for item in items] def escape(string_): return escape_(string_).decode('utf8')
Add global method to convert binary string to plain string
Add global method to convert binary string to plain string * wqflask/utility/db_tools.py: escape_string returns a binary string which introduces a bug when composing sql query string. The escaped strings have to be converted to plain text.
Python
agpl-3.0
pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2
from MySQLdb import escape_string as escape_ def create_in_clause(items): """Create an in clause for mysql""" in_clause = ', '.join("'{}'".format(x) for x in mescape(*items)) in_clause = '( {} )'.format(in_clause) return in_clause def mescape(*items): """Multiple escape""" return [escape_(str(item)).decode('utf8') for item in items] def escape(string_): return escape_(string_).decode('utf8')
Add global method to convert binary string to plain string * wqflask/utility/db_tools.py: escape_string returns a binary string which introduces a bug when composing sql query string. The escaped strings have to be converted to plain text. from MySQLdb import escape_string as escape def create_in_clause(items): """Create an in clause for mysql""" in_clause = ', '.join("'{}'".format(x) for x in mescape(*items)) in_clause = '( {} )'.format(in_clause) return in_clause def mescape(*items): """Multiple escape""" escaped = [escape(str(item)) for item in items] #print("escaped is:", escaped) return escaped
35778c48ba197803e2688732cf11d346838e7b7f
tests/integration/test_sqs.py
tests/integration/test_sqs.py
import os from asyncaws import SQS from tornado.testing import AsyncTestCase, gen_test aws_key_id = os.environ['AWS_ACCESS_KEY_ID'] aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY'] aws_region = os.environ['AWS_REGION'] class TestSQS(AsyncTestCase): sqs = SQS(aws_key_id, aws_key_secret, aws_region, async=False) @gen_test(timeout=60) def test_create_queue(self): queue_url = self.sqs.create_queue( "test-queue", {"MessageRetentionPeriod": 60}) self.assertIsInstance(queue_url, str) self.assertTrue(queue_url.startswith('http')) get_attr_result = self.sqs.get_queue_attributes( queue_url, ['MessageRetentionPeriod']) self.assertIsInstance(get_attr_result, dict) self.assertEqual(get_attr_result['MessageRetentionPeriod'], '60') add_perm_result = self.sqs.add_permission( queue_url, ['637085312181'], ["SendMessage"], "test-permission-id") self.assertIsInstance(add_perm_result, str) delete_result = self.sqs.delete_queue(queue_url) self.assertIsInstance(delete_result, str)
Add first integration test for SQS
Add first integration test for SQS
Python
mit
MA3STR0/AsyncAWS
import os from asyncaws import SQS from tornado.testing import AsyncTestCase, gen_test aws_key_id = os.environ['AWS_ACCESS_KEY_ID'] aws_key_secret = os.environ['AWS_SECRET_ACCESS_KEY'] aws_region = os.environ['AWS_REGION'] class TestSQS(AsyncTestCase): sqs = SQS(aws_key_id, aws_key_secret, aws_region, async=False) @gen_test(timeout=60) def test_create_queue(self): queue_url = self.sqs.create_queue( "test-queue", {"MessageRetentionPeriod": 60}) self.assertIsInstance(queue_url, str) self.assertTrue(queue_url.startswith('http')) get_attr_result = self.sqs.get_queue_attributes( queue_url, ['MessageRetentionPeriod']) self.assertIsInstance(get_attr_result, dict) self.assertEqual(get_attr_result['MessageRetentionPeriod'], '60') add_perm_result = self.sqs.add_permission( queue_url, ['637085312181'], ["SendMessage"], "test-permission-id") self.assertIsInstance(add_perm_result, str) delete_result = self.sqs.delete_queue(queue_url) self.assertIsInstance(delete_result, str)
Add first integration test for SQS
8259a733e1f039cea55cfc5aad7d69e0fb37c43c
tests.py
tests.py
from money_conversion.money import Money import unittest class MoneyClassTest(unittest.TestCase): def setUp(self): self.twenty_euro = Money(20, 'EUR') def test_convert_euro_to_usd(self): twenty_usd = self.twenty_euro.to_usd() self.assertIsInstance(twenty_usd, Money) self.assertEqual('USD', twenty_usd.currency) self.assertEqual(21.8, twenty_usd.amount) def test_convert_euro_to_brl(self): twenty_brl = self.twenty_euro.to_brl() self.assertIsInstance(twenty_brl, Money) self.assertEqual('BRL', twenty_brl.currency) self.assertEqual(85, twenty_brl.amount) if __name__ == '__main__': unittest.main()
from money_conversion.money import Money import unittest class MoneyClassTest(unittest.TestCase): def setUp(self): self.twenty_euro = Money(20, 'EUR') def test_convert_euro_to_usd(self): twenty_usd = self.twenty_euro.to_usd() self.assertIsInstance(twenty_usd, Money) self.assertEqual('USD', twenty_usd.currency) self.assertEqual(21.8, twenty_usd.amount) def test_convert_euro_to_brl(self): twenty_brl = self.twenty_euro.to_brl() self.assertIsInstance(twenty_brl, Money) self.assertEqual('BRL', twenty_brl.currency) self.assertEqual(85, twenty_brl.amount) def test_invalid_method_pattern_call(self): with self.assertRaises(AttributeError): twenty_brl = self.twenty_euro.batman() if __name__ == '__main__': unittest.main()
Add test that validates method call
Add test that validates method call
Python
mit
mdsrosa/money-conversion-py
from money_conversion.money import Money import unittest class MoneyClassTest(unittest.TestCase): def setUp(self): self.twenty_euro = Money(20, 'EUR') def test_convert_euro_to_usd(self): twenty_usd = self.twenty_euro.to_usd() self.assertIsInstance(twenty_usd, Money) self.assertEqual('USD', twenty_usd.currency) self.assertEqual(21.8, twenty_usd.amount) def test_convert_euro_to_brl(self): twenty_brl = self.twenty_euro.to_brl() self.assertIsInstance(twenty_brl, Money) self.assertEqual('BRL', twenty_brl.currency) self.assertEqual(85, twenty_brl.amount) def test_invalid_method_pattern_call(self): with self.assertRaises(AttributeError): twenty_brl = self.twenty_euro.batman() if __name__ == '__main__': unittest.main()
Add test that validates method call from money_conversion.money import Money import unittest class MoneyClassTest(unittest.TestCase): def setUp(self): self.twenty_euro = Money(20, 'EUR') def test_convert_euro_to_usd(self): twenty_usd = self.twenty_euro.to_usd() self.assertIsInstance(twenty_usd, Money) self.assertEqual('USD', twenty_usd.currency) self.assertEqual(21.8, twenty_usd.amount) def test_convert_euro_to_brl(self): twenty_brl = self.twenty_euro.to_brl() self.assertIsInstance(twenty_brl, Money) self.assertEqual('BRL', twenty_brl.currency) self.assertEqual(85, twenty_brl.amount) if __name__ == '__main__': unittest.main()
71db89cad06dc0aa81e0a7178712e8beb7e7cb01
turbustat/tests/test_cramer.py
turbustat/tests/test_cramer.py
# Licensed under an MIT open source license - see LICENSE ''' Test functions for Cramer ''' from unittest import TestCase import numpy as np import numpy.testing as npt from ..statistics import Cramer_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances class testCramer(TestCase): def test_cramer(self): self.tester = \ Cramer_Distance(dataset1["cube"], dataset2["cube"], noise_value1=0.1, noise_value2=0.1).distance_metric(normalize=False) npt.assert_allclose(self.tester.data_matrix1, computed_data["cramer_val"]) npt.assert_almost_equal(self.tester.distance, computed_distances['cramer_distance']) def test_cramer_spatial_diff(self): small_data = dataset1["cube"][0][:, :26, :26] self.tester2 = Cramer_Distance(small_data, dataset2["cube"]) self.tester2.distance_metric(normalize=False) self.tester3 = Cramer_Distance(dataset2["cube"], small_data) self.tester3.distance_metric(normalize=False) npt.assert_almost_equal(self.tester2.distance, self.tester3.distance)
# Licensed under an MIT open source license - see LICENSE ''' Test functions for Cramer ''' import numpy.testing as npt from ..statistics import Cramer_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances def test_cramer(): tester = \ Cramer_Distance(dataset1["cube"], dataset2["cube"], noise_value1=0.1, noise_value2=0.1).distance_metric(normalize=False) npt.assert_allclose(tester.data_matrix1, computed_data["cramer_val"]) npt.assert_almost_equal(tester.distance, computed_distances['cramer_distance']) def test_cramer_spatial_diff(): small_data = dataset1["cube"][0][:, :26, :26] tester2 = Cramer_Distance(small_data, dataset2["cube"]) tester2.distance_metric(normalize=False) tester3 = Cramer_Distance(dataset2["cube"], small_data) tester3.distance_metric(normalize=False) npt.assert_almost_equal(tester2.distance, tester3.distance)
Remove importing UnitCase from Cramer tests
Remove importing UnitCase from Cramer tests
Python
mit
Astroua/TurbuStat,e-koch/TurbuStat
# Licensed under an MIT open source license - see LICENSE ''' Test functions for Cramer ''' import numpy.testing as npt from ..statistics import Cramer_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances def test_cramer(): tester = \ Cramer_Distance(dataset1["cube"], dataset2["cube"], noise_value1=0.1, noise_value2=0.1).distance_metric(normalize=False) npt.assert_allclose(tester.data_matrix1, computed_data["cramer_val"]) npt.assert_almost_equal(tester.distance, computed_distances['cramer_distance']) def test_cramer_spatial_diff(): small_data = dataset1["cube"][0][:, :26, :26] tester2 = Cramer_Distance(small_data, dataset2["cube"]) tester2.distance_metric(normalize=False) tester3 = Cramer_Distance(dataset2["cube"], small_data) tester3.distance_metric(normalize=False) npt.assert_almost_equal(tester2.distance, tester3.distance)
Remove importing UnitCase from Cramer tests # Licensed under an MIT open source license - see LICENSE ''' Test functions for Cramer ''' from unittest import TestCase import numpy as np import numpy.testing as npt from ..statistics import Cramer_Distance from ._testing_data import \ dataset1, dataset2, computed_data, computed_distances class testCramer(TestCase): def test_cramer(self): self.tester = \ Cramer_Distance(dataset1["cube"], dataset2["cube"], noise_value1=0.1, noise_value2=0.1).distance_metric(normalize=False) npt.assert_allclose(self.tester.data_matrix1, computed_data["cramer_val"]) npt.assert_almost_equal(self.tester.distance, computed_distances['cramer_distance']) def test_cramer_spatial_diff(self): small_data = dataset1["cube"][0][:, :26, :26] self.tester2 = Cramer_Distance(small_data, dataset2["cube"]) self.tester2.distance_metric(normalize=False) self.tester3 = Cramer_Distance(dataset2["cube"], small_data) self.tester3.distance_metric(normalize=False) npt.assert_almost_equal(self.tester2.distance, self.tester3.distance)
57051d3e59a4664a536588c19ae0581cb92f1350
timed/redmine/admin.py
timed/redmine/admin.py
from django.contrib import admin from timed.projects.admin import ProjectAdmin from timed.projects.models import Project from timed_adfinis.redmine.models import RedmineProject admin.site.unregister(Project) class RedmineProjectInline(admin.StackedInline): model = RedmineProject @admin.register(Project) class ProjectAdmin(ProjectAdmin): """Adfinis specific project including Redmine issue configuration.""" inlines = ProjectAdmin.inlines + [RedmineProjectInline, ]
Add RedmineProject as inline of ProjectAdmin
Add RedmineProject as inline of ProjectAdmin
Python
agpl-3.0
adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend,adfinis-sygroup/timed-backend
from django.contrib import admin from timed.projects.admin import ProjectAdmin from timed.projects.models import Project from timed_adfinis.redmine.models import RedmineProject admin.site.unregister(Project) class RedmineProjectInline(admin.StackedInline): model = RedmineProject @admin.register(Project) class ProjectAdmin(ProjectAdmin): """Adfinis specific project including Redmine issue configuration.""" inlines = ProjectAdmin.inlines + [RedmineProjectInline, ]
Add RedmineProject as inline of ProjectAdmin
9732f5e1bb667b6683c9e97db03d293373909da6
tests/test_process.py
tests/test_process.py
import unittest import logging import time from util import get_hostname from tests.common import load_check from nose.plugins.attrib import attr logging.basicConfig() @attr('process') class ProcessTestCase(unittest.TestCase): def build_config(self, config, n): critical_low = [2, 2, 2, -1, 2, -2, 2] critical_high = [2, 2, 2, 3, -1, 4, -2] warning_low = [1, -1, 2, -1, 2, -1, 2] warning_high = [1, 3, -1, 2, -1, 3, -1] for i in range(7): name = 'ssh' + str(i) config['instances'].append({ 'name': name, 'search_string': ['ssh', 'sshd'], 'thresholds': { 'critical': [n - critical_low[i], n + critical_high[i]], 'warning': [n - warning_low[i], n + warning_high[i]] } }) return config def testCheck(self): config = { 'init_config': {}, 'instances': [] } self.agentConfig = { 'version': '0.1', 'api_key': 'toto' } search_string = ['ssh', 'sshd'] self.check = load_check('process', config, self.agentConfig) pids = self.check.find_pids(search_string) config = self.build_config(config, len(pids)) for i in range(7): self.check.check(config['instances'][i]) time.sleep(1) service_checks = self.check.get_service_checks() assert service_checks self.assertTrue(type(service_checks) == type([])) self.assertTrue(len(service_checks) > 0) self.assertEquals(len([t for t in service_checks if t['status']== 0]), 1, service_checks) self.assertEquals(len([t for t in service_checks if t['status']== 1]), 2, service_checks) self.assertEquals(len([t for t in service_checks if t['status']== 2]), 4, service_checks) if __name__ == "__main__": unittest.main()
Add tests for process check
Add tests for process check This test call the check method in process 7 times and check the process_check output. The result should be: 1 OK 2 WARNING 4 CRITICAL
Python
bsd-3-clause
jraede/dd-agent,benmccann/dd-agent,packetloop/dd-agent,PagerDuty/dd-agent,benmccann/dd-agent,truthbk/dd-agent,jshum/dd-agent,citrusleaf/dd-agent,gphat/dd-agent,AniruddhaSAtre/dd-agent,AniruddhaSAtre/dd-agent,jvassev/dd-agent,urosgruber/dd-agent,jraede/dd-agent,Mashape/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,brettlangdon/dd-agent,darron/dd-agent,oneandoneis2/dd-agent,pfmooney/dd-agent,tebriel/dd-agent,gphat/dd-agent,remh/dd-agent,zendesk/dd-agent,brettlangdon/dd-agent,pmav99/praktoras,pmav99/praktoras,jshum/dd-agent,jyogi/purvar-agent,urosgruber/dd-agent,eeroniemi/dd-agent,Shopify/dd-agent,amalakar/dd-agent,packetloop/dd-agent,jamesandariese/dd-agent,benmccann/dd-agent,GabrielNicolasAvellaneda/dd-agent,polynomial/dd-agent,urosgruber/dd-agent,yuecong/dd-agent,gphat/dd-agent,PagerDuty/dd-agent,huhongbo/dd-agent,darron/dd-agent,a20012251/dd-agent,AniruddhaSAtre/dd-agent,oneandoneis2/dd-agent,lookout/dd-agent,darron/dd-agent,AntoCard/powerdns-recursor_check,pfmooney/dd-agent,polynomial/dd-agent,joelvanvelden/dd-agent,eeroniemi/dd-agent,jvassev/dd-agent,tebriel/dd-agent,brettlangdon/dd-agent,c960657/dd-agent,oneandoneis2/dd-agent,amalakar/dd-agent,JohnLZeller/dd-agent,jshum/dd-agent,cberry777/dd-agent,yuecong/dd-agent,pmav99/praktoras,a20012251/dd-agent,pmav99/praktoras,urosgruber/dd-agent,truthbk/dd-agent,cberry777/dd-agent,citrusleaf/dd-agent,jraede/dd-agent,yuecong/dd-agent,a20012251/dd-agent,GabrielNicolasAvellaneda/dd-agent,ess/dd-agent,a20012251/dd-agent,joelvanvelden/dd-agent,GabrielNicolasAvellaneda/dd-agent,lookout/dd-agent,ess/dd-agent,JohnLZeller/dd-agent,darron/dd-agent,relateiq/dd-agent,Mashape/dd-agent,lookout/dd-agent,yuecong/dd-agent,c960657/dd-agent,citrusleaf/dd-agent,remh/dd-agent,pfmooney/dd-agent,tebriel/dd-agent,mderomph-coolblue/dd-agent,JohnLZeller/dd-agent,remh/dd-agent,jshum/dd-agent,Wattpad/dd-agent,zendesk/dd-agent,packetloop/dd-agent,benmccann/dd-agent,eeroniemi/dd-agent,Shopify/dd-agent,ess/dd-agent,jvassev/dd-agent,darron/dd-agent,huhongbo/dd-agent,Shopify/dd-agent,guruxu/dd-agent,guruxu/dd-agent,Shopify/dd-agent,jamesandariese/dd-agent,Wattpad/dd-agent,joelvanvelden/dd-agent,pfmooney/dd-agent,Mashape/dd-agent,manolama/dd-agent,mderomph-coolblue/dd-agent,remh/dd-agent,manolama/dd-agent,polynomial/dd-agent,Wattpad/dd-agent,takus/dd-agent,yuecong/dd-agent,manolama/dd-agent,takus/dd-agent,Mashape/dd-agent,manolama/dd-agent,tebriel/dd-agent,polynomial/dd-agent,gphat/dd-agent,takus/dd-agent,takus/dd-agent,c960657/dd-agent,truthbk/dd-agent,AntoCard/powerdns-recursor_check,cberry777/dd-agent,Wattpad/dd-agent,jyogi/purvar-agent,citrusleaf/dd-agent,Mashape/dd-agent,guruxu/dd-agent,packetloop/dd-agent,indeedops/dd-agent,truthbk/dd-agent,mderomph-coolblue/dd-agent,eeroniemi/dd-agent,jyogi/purvar-agent,tebriel/dd-agent,AniruddhaSAtre/dd-agent,AntoCard/powerdns-recursor_check,relateiq/dd-agent,cberry777/dd-agent,jamesandariese/dd-agent,jvassev/dd-agent,guruxu/dd-agent,urosgruber/dd-agent,joelvanvelden/dd-agent,takus/dd-agent,c960657/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,amalakar/dd-agent,ess/dd-agent,brettlangdon/dd-agent,amalakar/dd-agent,indeedops/dd-agent,lookout/dd-agent,PagerDuty/dd-agent,indeedops/dd-agent,manolama/dd-agent,citrusleaf/dd-agent,zendesk/dd-agent,PagerDuty/dd-agent,jamesandariese/dd-agent,jyogi/purvar-agent,Wattpad/dd-agent,polynomial/dd-agent,indeedops/dd-agent,indeedops/dd-agent,eeroniemi/dd-agent,c960657/dd-agent,mderomph-coolblue/dd-agent,JohnLZeller/dd-agent,pfmooney/dd-agent,jshum/dd-agent,lookout/dd-agent,huhongbo/dd-agent,relateiq/dd-agent,jraede/dd-agent,jraede/dd-agent,zendesk/dd-agent,PagerDuty/dd-agent,gphat/dd-agent,zendesk/dd-agent,ess/dd-agent,Shopify/dd-agent,benmccann/dd-agent,cberry777/dd-agent,AniruddhaSAtre/dd-agent,truthbk/dd-agent,jamesandariese/dd-agent,JohnLZeller/dd-agent,AntoCard/powerdns-recursor_check,mderomph-coolblue/dd-agent,guruxu/dd-agent,amalakar/dd-agent,jyogi/purvar-agent,joelvanvelden/dd-agent,a20012251/dd-agent,packetloop/dd-agent,pmav99/praktoras,relateiq/dd-agent,relateiq/dd-agent,jvassev/dd-agent,AntoCard/powerdns-recursor_check,GabrielNicolasAvellaneda/dd-agent,GabrielNicolasAvellaneda/dd-agent,remh/dd-agent,brettlangdon/dd-agent
import unittest import logging import time from util import get_hostname from tests.common import load_check from nose.plugins.attrib import attr logging.basicConfig() @attr('process') class ProcessTestCase(unittest.TestCase): def build_config(self, config, n): critical_low = [2, 2, 2, -1, 2, -2, 2] critical_high = [2, 2, 2, 3, -1, 4, -2] warning_low = [1, -1, 2, -1, 2, -1, 2] warning_high = [1, 3, -1, 2, -1, 3, -1] for i in range(7): name = 'ssh' + str(i) config['instances'].append({ 'name': name, 'search_string': ['ssh', 'sshd'], 'thresholds': { 'critical': [n - critical_low[i], n + critical_high[i]], 'warning': [n - warning_low[i], n + warning_high[i]] } }) return config def testCheck(self): config = { 'init_config': {}, 'instances': [] } self.agentConfig = { 'version': '0.1', 'api_key': 'toto' } search_string = ['ssh', 'sshd'] self.check = load_check('process', config, self.agentConfig) pids = self.check.find_pids(search_string) config = self.build_config(config, len(pids)) for i in range(7): self.check.check(config['instances'][i]) time.sleep(1) service_checks = self.check.get_service_checks() assert service_checks self.assertTrue(type(service_checks) == type([])) self.assertTrue(len(service_checks) > 0) self.assertEquals(len([t for t in service_checks if t['status']== 0]), 1, service_checks) self.assertEquals(len([t for t in service_checks if t['status']== 1]), 2, service_checks) self.assertEquals(len([t for t in service_checks if t['status']== 2]), 4, service_checks) if __name__ == "__main__": unittest.main()
Add tests for process check This test call the check method in process 7 times and check the process_check output. The result should be: 1 OK 2 WARNING 4 CRITICAL
27ab83010f7cc8308debfec16fab38544a9c7ce7
running.py
running.py
import tcxparser from configparser import ConfigParser from datetime import datetime import urllib.request import dateutil.parser t = '1984-06-02T19:05:00.000Z' # Darksky weather API # Create config file manually parser = ConfigParser() parser.read('slowburn.config', encoding='utf-8') darksky_key = parser.get('darksky', 'key') tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx') run_time = tcx.completed_at def convert_time_to_unix(time): parsed_time = dateutil.parser.parse(time) time_in_unix = parsed_time.strftime('%s') return time_in_unix unix_run_time = convert_time_to_unix(run_time) darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/" + str(tcx.latitude) + "," + str(tcx.longitude) + "," + unix_run_time + "?exclude=currently,flags").read() print(darksky_request) class getWeather: def __init__(self, date, time): self.date = date self.time = time def goodbye(self, date): print("my name is " + date)
import tcxparser from configparser import ConfigParser from datetime import datetime import urllib.request import dateutil.parser import json # Darksky weather API # Create config file manually parser = ConfigParser() parser.read('slowburn.config', encoding='utf-8') darksky_key = parser.get('darksky', 'key') tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx') run_time = tcx.completed_at def convert_time_to_unix(time): parsed_time = dateutil.parser.parse(time) time_in_unix = parsed_time.strftime('%s') return time_in_unix unix_run_time = convert_time_to_unix(run_time) darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/" + str(tcx.latitude) + "," + str(tcx.longitude) + "," + unix_run_time + "?exclude=currently,flags").read() # Decode JSON darksky_json = json.loads(darksky_request.decode('utf-8')) for i in darksky_json['hourly']['data']: print(i['temperature']) class getWeather: def __init__(self, date, time): self.date = date self.time = time def goodbye(self, date): print("my name is " + date)
Print all hourly temperatures from run date
Print all hourly temperatures from run date
Python
mit
briansuhr/slowburn
import tcxparser from configparser import ConfigParser from datetime import datetime import urllib.request import dateutil.parser import json # Darksky weather API # Create config file manually parser = ConfigParser() parser.read('slowburn.config', encoding='utf-8') darksky_key = parser.get('darksky', 'key') tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx') run_time = tcx.completed_at def convert_time_to_unix(time): parsed_time = dateutil.parser.parse(time) time_in_unix = parsed_time.strftime('%s') return time_in_unix unix_run_time = convert_time_to_unix(run_time) darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/" + str(tcx.latitude) + "," + str(tcx.longitude) + "," + unix_run_time + "?exclude=currently,flags").read() # Decode JSON darksky_json = json.loads(darksky_request.decode('utf-8')) for i in darksky_json['hourly']['data']: print(i['temperature']) class getWeather: def __init__(self, date, time): self.date = date self.time = time def goodbye(self, date): print("my name is " + date)
Print all hourly temperatures from run date import tcxparser from configparser import ConfigParser from datetime import datetime import urllib.request import dateutil.parser t = '1984-06-02T19:05:00.000Z' # Darksky weather API # Create config file manually parser = ConfigParser() parser.read('slowburn.config', encoding='utf-8') darksky_key = parser.get('darksky', 'key') tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx') run_time = tcx.completed_at def convert_time_to_unix(time): parsed_time = dateutil.parser.parse(time) time_in_unix = parsed_time.strftime('%s') return time_in_unix unix_run_time = convert_time_to_unix(run_time) darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/" + str(tcx.latitude) + "," + str(tcx.longitude) + "," + unix_run_time + "?exclude=currently,flags").read() print(darksky_request) class getWeather: def __init__(self, date, time): self.date = date self.time = time def goodbye(self, date): print("my name is " + date)
7e98a76ac455a8c69950104766719cde313bbb74
tests/CrawlerProcess/asyncio_deferred_signal.py
tests/CrawlerProcess/asyncio_deferred_signal.py
import asyncio import sys import scrapy from scrapy.crawler import CrawlerProcess from twisted.internet.defer import Deferred class UppercasePipeline: async def _open_spider(self, spider): spider.logger.info("async pipeline opened!") await asyncio.sleep(0.1) def open_spider(self, spider): loop = asyncio.get_event_loop() return Deferred.fromFuture(loop.create_task(self._open_spider(spider))) def process_item(self, item, spider): return {"url": item["url"].upper()} class UrlSpider(scrapy.Spider): name = "url_spider" start_urls = ["data:,"] custom_settings = { "ITEM_PIPELINES": {UppercasePipeline: 100}, } def parse(self, response): yield {"url": response.url} if __name__ == "__main__": try: ASYNCIO_EVENT_LOOP = sys.argv[1] except IndexError: ASYNCIO_EVENT_LOOP = None process = CrawlerProcess(settings={ "TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor", "ASYNCIO_EVENT_LOOP": ASYNCIO_EVENT_LOOP, }) process.crawl(UrlSpider) process.start()
import asyncio import sys from scrapy import Spider from scrapy.crawler import CrawlerProcess from scrapy.utils.defer import deferred_from_coro from twisted.internet.defer import Deferred class UppercasePipeline: async def _open_spider(self, spider): spider.logger.info("async pipeline opened!") await asyncio.sleep(0.1) def open_spider(self, spider): loop = asyncio.get_event_loop() return deferred_from_coro(self._open_spider(spider)) def process_item(self, item, spider): return {"url": item["url"].upper()} class UrlSpider(Spider): name = "url_spider" start_urls = ["data:,"] custom_settings = { "ITEM_PIPELINES": {UppercasePipeline: 100}, } def parse(self, response): yield {"url": response.url} if __name__ == "__main__": try: ASYNCIO_EVENT_LOOP = sys.argv[1] except IndexError: ASYNCIO_EVENT_LOOP = None process = CrawlerProcess(settings={ "TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor", "ASYNCIO_EVENT_LOOP": ASYNCIO_EVENT_LOOP, }) process.crawl(UrlSpider) process.start()
Use deferred_from_coro in asyncio test
Use deferred_from_coro in asyncio test
Python
bsd-3-clause
elacuesta/scrapy,elacuesta/scrapy,scrapy/scrapy,pablohoffman/scrapy,dangra/scrapy,pawelmhm/scrapy,pablohoffman/scrapy,dangra/scrapy,scrapy/scrapy,pawelmhm/scrapy,dangra/scrapy,pawelmhm/scrapy,pablohoffman/scrapy,elacuesta/scrapy,scrapy/scrapy
import asyncio import sys from scrapy import Spider from scrapy.crawler import CrawlerProcess from scrapy.utils.defer import deferred_from_coro from twisted.internet.defer import Deferred class UppercasePipeline: async def _open_spider(self, spider): spider.logger.info("async pipeline opened!") await asyncio.sleep(0.1) def open_spider(self, spider): loop = asyncio.get_event_loop() return deferred_from_coro(self._open_spider(spider)) def process_item(self, item, spider): return {"url": item["url"].upper()} class UrlSpider(Spider): name = "url_spider" start_urls = ["data:,"] custom_settings = { "ITEM_PIPELINES": {UppercasePipeline: 100}, } def parse(self, response): yield {"url": response.url} if __name__ == "__main__": try: ASYNCIO_EVENT_LOOP = sys.argv[1] except IndexError: ASYNCIO_EVENT_LOOP = None process = CrawlerProcess(settings={ "TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor", "ASYNCIO_EVENT_LOOP": ASYNCIO_EVENT_LOOP, }) process.crawl(UrlSpider) process.start()
Use deferred_from_coro in asyncio test import asyncio import sys import scrapy from scrapy.crawler import CrawlerProcess from twisted.internet.defer import Deferred class UppercasePipeline: async def _open_spider(self, spider): spider.logger.info("async pipeline opened!") await asyncio.sleep(0.1) def open_spider(self, spider): loop = asyncio.get_event_loop() return Deferred.fromFuture(loop.create_task(self._open_spider(spider))) def process_item(self, item, spider): return {"url": item["url"].upper()} class UrlSpider(scrapy.Spider): name = "url_spider" start_urls = ["data:,"] custom_settings = { "ITEM_PIPELINES": {UppercasePipeline: 100}, } def parse(self, response): yield {"url": response.url} if __name__ == "__main__": try: ASYNCIO_EVENT_LOOP = sys.argv[1] except IndexError: ASYNCIO_EVENT_LOOP = None process = CrawlerProcess(settings={ "TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor", "ASYNCIO_EVENT_LOOP": ASYNCIO_EVENT_LOOP, }) process.crawl(UrlSpider) process.start()
abe40e3c82ef1f351275a59b2e537f43530caa0c
app/cleanup_stories.py
app/cleanup_stories.py
from pymongo import MongoClient from fetch_stories import get_mongo_client, close_mongo_client from bson import ObjectId from datetime import datetime, timedelta def remove_old_stories(): client = get_mongo_client() db = client.get_default_database() article_collection = db['articles'] two_days_ago = datetime.utcnow() - timedelta(days=2) two_days_ago = ObjectId.from_datetime(two_days_ago) query = { '_id' : { '$lt' : two_days_ago} } article_collection.remove(query) close_mongo_client(client) def main(): remove_old_stories() if __name__ == '__main__': main()
Clean up db script (remove articles older than two days).
Clean up db script (remove articles older than two days).
Python
mit
hw3jung/Gucci,hw3jung/Gucci
from pymongo import MongoClient from fetch_stories import get_mongo_client, close_mongo_client from bson import ObjectId from datetime import datetime, timedelta def remove_old_stories(): client = get_mongo_client() db = client.get_default_database() article_collection = db['articles'] two_days_ago = datetime.utcnow() - timedelta(days=2) two_days_ago = ObjectId.from_datetime(two_days_ago) query = { '_id' : { '$lt' : two_days_ago} } article_collection.remove(query) close_mongo_client(client) def main(): remove_old_stories() if __name__ == '__main__': main()
Clean up db script (remove articles older than two days).
7c49517c3c24d239c2bd44d82916b4f3d90ca1e2
utilities/__init__.py
utilities/__init__.py
#! /usr/bin/env python from subprocess import Popen, PIPE def launch(cmd): """ Fork the specified command, returning a tuple of (stdout, stderr) """ return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() def get_stdout(cmd): """ Fork the specified command, returning stdout """ return launch(cmd)[0] def get_stderr(cmd): """ Fork the specified command, returning stderr """ return launch(cmd)[1]
#! /usr/bin/env python from subprocess import Popen, PIPE def popen(cmd): """ Fork the specified command, returning a tuple of (stdout, stderr) """ return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() def get_stdout(cmd): """ Fork the specified command, returning stdout """ return popen(cmd)[0] def get_stderr(cmd): """ Fork the specified command, returning stderr """ return popen(cmd)[1]
Switch to using popen as the function name to stick more to subprocess naming
Switch to using popen as the function name to stick more to subprocess naming
Python
mit
IanLee1521/utilities
#! /usr/bin/env python from subprocess import Popen, PIPE def popen(cmd): """ Fork the specified command, returning a tuple of (stdout, stderr) """ return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() def get_stdout(cmd): """ Fork the specified command, returning stdout """ return popen(cmd)[0] def get_stderr(cmd): """ Fork the specified command, returning stderr """ return popen(cmd)[1]
Switch to using popen as the function name to stick more to subprocess naming #! /usr/bin/env python from subprocess import Popen, PIPE def launch(cmd): """ Fork the specified command, returning a tuple of (stdout, stderr) """ return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate() def get_stdout(cmd): """ Fork the specified command, returning stdout """ return launch(cmd)[0] def get_stderr(cmd): """ Fork the specified command, returning stderr """ return launch(cmd)[1]
9a52024ff5b8175ee8b8d4665d3c8c667003019b
glitter/blocks/redactor/tests.py
glitter/blocks/redactor/tests.py
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase class SimpleTest(TestCase): def test_basic_addition(self): """ Tests that 1 + 1 always equals 2. """ self.assertEqual(1 + 1, 2)
# -*- coding: utf-8 -*- from __future__ import unicode_literals """ This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.test import TestCase from glitter.models import Version, ContentBlock from glitter.pages.models import Page from .models import Redactor class RedactorTestCase(TestCase): def setUp(self): User = get_user_model() page = Page.objects.create(url='/redactor/', title='Test page') self.page_content_type = ContentType.objects.get_for_model(Page) self.editor = User.objects.create_user(username='redactor', password='redactor') page_version = Version.objects.create( content_type=self.page_content_type, object_id=page.id, template_name='glitter/sample.html', owner=self.editor ) self.redactor_block = Redactor.objects.create( content='Test' ) self.content_block = ContentBlock.objects.create( obj_version=page_version, column='content', position=1, content_type=ContentType.objects.get_for_model(self.redactor_block), object_id=self.redactor_block.id ) self.redactor_block.content_block = self.content_block self.redactor_block.save() def test_existance(self): redactor = Redactor.objects.get(id=self.redactor_block.id) self.assertEqual(redactor.id, self.redactor_block.id)
Add test for redactor block creation
Add test for redactor block creation
Python
bsd-3-clause
developersociety/django-glitter,blancltd/django-glitter,developersociety/django-glitter,developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter
# -*- coding: utf-8 -*- from __future__ import unicode_literals """ This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.test import TestCase from glitter.models import Version, ContentBlock from glitter.pages.models import Page from .models import Redactor class RedactorTestCase(TestCase): def setUp(self): User = get_user_model() page = Page.objects.create(url='/redactor/', title='Test page') self.page_content_type = ContentType.objects.get_for_model(Page) self.editor = User.objects.create_user(username='redactor', password='redactor') page_version = Version.objects.create( content_type=self.page_content_type, object_id=page.id, template_name='glitter/sample.html', owner=self.editor ) self.redactor_block = Redactor.objects.create( content='Test' ) self.content_block = ContentBlock.objects.create( obj_version=page_version, column='content', position=1, content_type=ContentType.objects.get_for_model(self.redactor_block), object_id=self.redactor_block.id ) self.redactor_block.content_block = self.content_block self.redactor_block.save() def test_existance(self): redactor = Redactor.objects.get(id=self.redactor_block.id) self.assertEqual(redactor.id, self.redactor_block.id)
Add test for redactor block creation """ This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase class SimpleTest(TestCase): def test_basic_addition(self): """ Tests that 1 + 1 always equals 2. """ self.assertEqual(1 + 1, 2)
ca6d80429cb8ccdac7669b444e5b4d2e88aed098
site/cgi/csv-columns.py
site/cgi/csv-columns.py
#!/usr/bin/python # -*- coding: utf-8 -*- # Give back the columns of a CSV and the in # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import csv import sys import codecs import cgitb CSV_DIR = '../csv/' # CSV upload directory # UTF-8 hack # from http://stackoverflow.com/a/11764727 reload(sys) sys.setdefaultencoding('utf-8') sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # If you need input too, read from char_stream as you would sys.stdin #char_stream = codecs.getreader('utf-8')(sys.stdin) # python 2.x sys.stdout.encoding by default is None # better option would be setting os.environ.get('PYTHONIOENCODING') to UTF-8 cgitb.enable() # pretty debugging form = cgi.FieldStorage() filename = form.getvalue('dataset') f = open(CSV_DIR + filename, 'r') r = csv.reader(f, dialect=csv.excel) # Create CSV row reader col_names = next(r) c2 = [ n.encode('utf-8') for n in col_names ] response = { 'columns' : c2 } print '''\ Status: 200\r Content-Type: application/json;charset=UTF-8\r \r { 'columns' : [%s] }\r ''' % ( "'" + "','".join(col_names).encode('utf-8') + "'", )
#!/usr/bin/python # -*- coding: utf-8 -*- # Give back the columns of a CSV and the in # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import csv import sys import codecs import cgitb CSV_DIR = '../csv/' # CSV upload directory # UTF-8 hack # from http://stackoverflow.com/a/11764727 reload(sys) sys.setdefaultencoding('utf-8') sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # If you need input too, read from char_stream as you would sys.stdin #char_stream = codecs.getreader('utf-8')(sys.stdin) # python 2.x sys.stdout.encoding by default is None # better option would be setting os.environ.get('PYTHONIOENCODING') to UTF-8 cgitb.enable() # pretty debugging form = cgi.FieldStorage() filename = form.getvalue('dataset') f = open(CSV_DIR + filename, 'r') r = csv.reader(f, dialect=csv.excel) # Create CSV row reader col_names = next(r) print '''\ Status: 200\r Content-Type: application/json;charset=UTF-8\r \r { "columns" : [%s] }\r ''' % ( '"' + '","'.join(col_names).encode('utf-8') + '"', )
Fix column listing, use double quotes for JSON remove old stuff
Fix column listing, use double quotes for JSON remove old stuff
Python
agpl-3.0
alejosanchez/CSVBenford,alejosanchez/CSVBenford
#!/usr/bin/python # -*- coding: utf-8 -*- # Give back the columns of a CSV and the in # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import csv import sys import codecs import cgitb CSV_DIR = '../csv/' # CSV upload directory # UTF-8 hack # from http://stackoverflow.com/a/11764727 reload(sys) sys.setdefaultencoding('utf-8') sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # If you need input too, read from char_stream as you would sys.stdin #char_stream = codecs.getreader('utf-8')(sys.stdin) # python 2.x sys.stdout.encoding by default is None # better option would be setting os.environ.get('PYTHONIOENCODING') to UTF-8 cgitb.enable() # pretty debugging form = cgi.FieldStorage() filename = form.getvalue('dataset') f = open(CSV_DIR + filename, 'r') r = csv.reader(f, dialect=csv.excel) # Create CSV row reader col_names = next(r) print '''\ Status: 200\r Content-Type: application/json;charset=UTF-8\r \r { "columns" : [%s] }\r ''' % ( '"' + '","'.join(col_names).encode('utf-8') + '"', )
Fix column listing, use double quotes for JSON remove old stuff #!/usr/bin/python # -*- coding: utf-8 -*- # Give back the columns of a CSV and the in # http://www.tutorialspoint.com/python/python_cgi_programming.htm import cgi import csv import sys import codecs import cgitb CSV_DIR = '../csv/' # CSV upload directory # UTF-8 hack # from http://stackoverflow.com/a/11764727 reload(sys) sys.setdefaultencoding('utf-8') sys.stdout = codecs.getwriter('utf-8')(sys.stdout) # If you need input too, read from char_stream as you would sys.stdin #char_stream = codecs.getreader('utf-8')(sys.stdin) # python 2.x sys.stdout.encoding by default is None # better option would be setting os.environ.get('PYTHONIOENCODING') to UTF-8 cgitb.enable() # pretty debugging form = cgi.FieldStorage() filename = form.getvalue('dataset') f = open(CSV_DIR + filename, 'r') r = csv.reader(f, dialect=csv.excel) # Create CSV row reader col_names = next(r) c2 = [ n.encode('utf-8') for n in col_names ] response = { 'columns' : c2 } print '''\ Status: 200\r Content-Type: application/json;charset=UTF-8\r \r { 'columns' : [%s] }\r ''' % ( "'" + "','".join(col_names).encode('utf-8') + "'", )
9674a0869c2a333f74178e305677259e7ac379c3
examples/ignore_websocket.py
examples/ignore_websocket.py
# This script makes mitmproxy switch to passthrough mode for all HTTP # responses with "Connection: Upgrade" header. This is useful to make # WebSockets work in untrusted environments. # # Note: Chrome (and possibly other browsers), when explicitly configured # to use a proxy (i.e. mitmproxy's regular mode), send a CONNECT request # to the proxy before they initiate the websocket connection. # To make WebSockets work in these cases, supply # `--ignore :80$` as an additional parameter. # (see http://mitmproxy.org/doc/features/passthrough.html) from libmproxy.protocol.http import HTTPRequest from libmproxy.protocol.tcp import TCPHandler from libmproxy.protocol import KILL from libmproxy.script import concurrent def start(context, argv): HTTPRequest._headers_to_strip_off.remove("Connection") HTTPRequest._headers_to_strip_off.remove("Upgrade") def done(context): HTTPRequest._headers_to_strip_off.append("Connection") HTTPRequest._headers_to_strip_off.append("Upgrade") @concurrent def response(context, flow): if flow.response.headers.get_first("Connection", None) == "Upgrade": # We need to send the response manually now... flow.client_conn.send(flow.response.assemble()) # ...and then delegate to tcp passthrough. TCPHandler(flow.live.c, log=False).handle_messages() flow.reply(KILL)
# This script makes mitmproxy switch to passthrough mode for all HTTP # responses with "Connection: Upgrade" header. This is useful to make # WebSockets work in untrusted environments. # # Note: Chrome (and possibly other browsers), when explicitly configured # to use a proxy (i.e. mitmproxy's regular mode), send a CONNECT request # to the proxy before they initiate the websocket connection. # To make WebSockets work in these cases, supply # `--ignore :80$` as an additional parameter. # (see http://mitmproxy.org/doc/features/passthrough.html) from libmproxy.protocol.http import HTTPRequest from libmproxy.protocol.tcp import TCPHandler from libmproxy.protocol import KILL from libmproxy.script import concurrent def start(context, argv): HTTPRequest._headers_to_strip_off.remove("Connection") HTTPRequest._headers_to_strip_off.remove("Upgrade") def done(context): HTTPRequest._headers_to_strip_off.append("Connection") HTTPRequest._headers_to_strip_off.append("Upgrade") @concurrent def response(context, flow): value = flow.response.headers.get_first("Connection", None) if value and value.upper() == "UPGRADE": # We need to send the response manually now... flow.client_conn.send(flow.response.assemble()) # ...and then delegate to tcp passthrough. TCPHandler(flow.live.c, log=False).handle_messages() flow.reply(KILL)
Make the Websocket's connection header value case-insensitive
Make the Websocket's connection header value case-insensitive
Python
mit
liorvh/mitmproxy,ccccccccccc/mitmproxy,dwfreed/mitmproxy,mhils/mitmproxy,ryoqun/mitmproxy,Kriechi/mitmproxy,azureplus/mitmproxy,dufferzafar/mitmproxy,ikoz/mitmproxy,jpic/mitmproxy,tfeagle/mitmproxy,rauburtin/mitmproxy,MatthewShao/mitmproxy,pombredanne/mitmproxy,pombredanne/mitmproxy,laurmurclar/mitmproxy,StevenVanAcker/mitmproxy,fimad/mitmproxy,elitest/mitmproxy,claimsmall/mitmproxy,ikoz/mitmproxy,bazzinotti/mitmproxy,liorvh/mitmproxy,zbuc/mitmproxy,devasia1000/mitmproxy,ikoz/mitmproxy,StevenVanAcker/mitmproxy,jvillacorta/mitmproxy,tdickers/mitmproxy,StevenVanAcker/mitmproxy,syjzwjj/mitmproxy,ryoqun/mitmproxy,Endika/mitmproxy,0xwindows/InfoLeak,devasia1000/mitmproxy,elitest/mitmproxy,ParthGanatra/mitmproxy,mitmproxy/mitmproxy,noikiy/mitmproxy,jvillacorta/mitmproxy,onlywade/mitmproxy,sethp-jive/mitmproxy,cortesi/mitmproxy,dweinstein/mitmproxy,azureplus/mitmproxy,dufferzafar/mitmproxy,Fuzion24/mitmproxy,ADemonisis/mitmproxy,noikiy/mitmproxy,scriptmediala/mitmproxy,macmantrl/mitmproxy,guiquanz/mitmproxy,gzzhanghao/mitmproxy,byt3bl33d3r/mitmproxy,cortesi/mitmproxy,owers19856/mitmproxy,tdickers/mitmproxy,devasia1000/mitmproxy,syjzwjj/mitmproxy,Endika/mitmproxy,ccccccccccc/mitmproxy,xbzbing/mitmproxy,ujjwal96/mitmproxy,elitest/mitmproxy,liorvh/mitmproxy,inscriptionweb/mitmproxy,inscriptionweb/mitmproxy,tekii/mitmproxy,guiquanz/mitmproxy,vhaupert/mitmproxy,mosajjal/mitmproxy,ADemonisis/mitmproxy,sethp-jive/mitmproxy,ddworken/mitmproxy,vhaupert/mitmproxy,tfeagle/mitmproxy,jpic/mitmproxy,fimad/mitmproxy,legendtang/mitmproxy,xbzbing/mitmproxy,ujjwal96/mitmproxy,ddworken/mitmproxy,Kriechi/mitmproxy,inscriptionweb/mitmproxy,azureplus/mitmproxy,pombredanne/mitmproxy,tfeagle/mitmproxy,legendtang/mitmproxy,byt3bl33d3r/mitmproxy,rauburtin/mitmproxy,Fuzion24/mitmproxy,gzzhanghao/mitmproxy,noikiy/mitmproxy,elitest/mitmproxy,mhils/mitmproxy,ParthGanatra/mitmproxy,mosajjal/mitmproxy,owers19856/mitmproxy,tekii/mitmproxy,cortesi/mitmproxy,macmantrl/mitmproxy,bazzinotti/mitmproxy,dxq-git/mitmproxy,mitmproxy/mitmproxy,jpic/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,dweinstein/mitmproxy,fimad/mitmproxy,dxq-git/mitmproxy,xbzbing/mitmproxy,claimsmall/mitmproxy,dwfreed/mitmproxy,xaxa89/mitmproxy,vhaupert/mitmproxy,ujjwal96/mitmproxy,Endika/mitmproxy,ParthGanatra/mitmproxy,meizhoubao/mitmproxy,meizhoubao/mitmproxy,dweinstein/mitmproxy,mhils/mitmproxy,Fuzion24/mitmproxy,gzzhanghao/mitmproxy,azureplus/mitmproxy,dxq-git/mitmproxy,ddworken/mitmproxy,ADemonisis/mitmproxy,0xwindows/InfoLeak,dufferzafar/mitmproxy,zlorb/mitmproxy,tekii/mitmproxy,scriptmediala/mitmproxy,dwfreed/mitmproxy,zlorb/mitmproxy,bazzinotti/mitmproxy,StevenVanAcker/mitmproxy,syjzwjj/mitmproxy,ccccccccccc/mitmproxy,xbzbing/mitmproxy,syjzwjj/mitmproxy,Endika/mitmproxy,onlywade/mitmproxy,sethp-jive/mitmproxy,xaxa89/mitmproxy,xaxa89/mitmproxy,jpic/mitmproxy,guiquanz/mitmproxy,rauburtin/mitmproxy,jvillacorta/mitmproxy,owers19856/mitmproxy,ZeYt/mitmproxy,ZeYt/mitmproxy,zbuc/mitmproxy,zlorb/mitmproxy,Kriechi/mitmproxy,ZeYt/mitmproxy,Kriechi/mitmproxy,ZeYt/mitmproxy,ryoqun/mitmproxy,devasia1000/mitmproxy,claimsmall/mitmproxy,laurmurclar/mitmproxy,MatthewShao/mitmproxy,noikiy/mitmproxy,onlywade/mitmproxy,macmantrl/mitmproxy,scriptmediala/mitmproxy,mitmproxy/mitmproxy,zlorb/mitmproxy,mhils/mitmproxy,sethp-jive/mitmproxy,dxq-git/mitmproxy,MatthewShao/mitmproxy,mitmproxy/mitmproxy,tdickers/mitmproxy,legendtang/mitmproxy,laurmurclar/mitmproxy,macmantrl/mitmproxy,tfeagle/mitmproxy,byt3bl33d3r/mitmproxy,ujjwal96/mitmproxy,Fuzion24/mitmproxy,owers19856/mitmproxy,ikoz/mitmproxy,mosajjal/mitmproxy,vhaupert/mitmproxy,zbuc/mitmproxy,onlywade/mitmproxy,0xwindows/InfoLeak,mitmproxy/mitmproxy,inscriptionweb/mitmproxy,ParthGanatra/mitmproxy,0xwindows/InfoLeak,guiquanz/mitmproxy,byt3bl33d3r/mitmproxy,meizhoubao/mitmproxy,ryoqun/mitmproxy,legendtang/mitmproxy,tdickers/mitmproxy,laurmurclar/mitmproxy,cortesi/mitmproxy,liorvh/mitmproxy,jvillacorta/mitmproxy,dwfreed/mitmproxy,gzzhanghao/mitmproxy,scriptmediala/mitmproxy,dweinstein/mitmproxy,meizhoubao/mitmproxy,rauburtin/mitmproxy,ccccccccccc/mitmproxy,tekii/mitmproxy,bazzinotti/mitmproxy,zbuc/mitmproxy,pombredanne/mitmproxy,claimsmall/mitmproxy,ddworken/mitmproxy,xaxa89/mitmproxy,fimad/mitmproxy,dufferzafar/mitmproxy,ADemonisis/mitmproxy,MatthewShao/mitmproxy
# This script makes mitmproxy switch to passthrough mode for all HTTP # responses with "Connection: Upgrade" header. This is useful to make # WebSockets work in untrusted environments. # # Note: Chrome (and possibly other browsers), when explicitly configured # to use a proxy (i.e. mitmproxy's regular mode), send a CONNECT request # to the proxy before they initiate the websocket connection. # To make WebSockets work in these cases, supply # `--ignore :80$` as an additional parameter. # (see http://mitmproxy.org/doc/features/passthrough.html) from libmproxy.protocol.http import HTTPRequest from libmproxy.protocol.tcp import TCPHandler from libmproxy.protocol import KILL from libmproxy.script import concurrent def start(context, argv): HTTPRequest._headers_to_strip_off.remove("Connection") HTTPRequest._headers_to_strip_off.remove("Upgrade") def done(context): HTTPRequest._headers_to_strip_off.append("Connection") HTTPRequest._headers_to_strip_off.append("Upgrade") @concurrent def response(context, flow): value = flow.response.headers.get_first("Connection", None) if value and value.upper() == "UPGRADE": # We need to send the response manually now... flow.client_conn.send(flow.response.assemble()) # ...and then delegate to tcp passthrough. TCPHandler(flow.live.c, log=False).handle_messages() flow.reply(KILL)
Make the Websocket's connection header value case-insensitive # This script makes mitmproxy switch to passthrough mode for all HTTP # responses with "Connection: Upgrade" header. This is useful to make # WebSockets work in untrusted environments. # # Note: Chrome (and possibly other browsers), when explicitly configured # to use a proxy (i.e. mitmproxy's regular mode), send a CONNECT request # to the proxy before they initiate the websocket connection. # To make WebSockets work in these cases, supply # `--ignore :80$` as an additional parameter. # (see http://mitmproxy.org/doc/features/passthrough.html) from libmproxy.protocol.http import HTTPRequest from libmproxy.protocol.tcp import TCPHandler from libmproxy.protocol import KILL from libmproxy.script import concurrent def start(context, argv): HTTPRequest._headers_to_strip_off.remove("Connection") HTTPRequest._headers_to_strip_off.remove("Upgrade") def done(context): HTTPRequest._headers_to_strip_off.append("Connection") HTTPRequest._headers_to_strip_off.append("Upgrade") @concurrent def response(context, flow): if flow.response.headers.get_first("Connection", None) == "Upgrade": # We need to send the response manually now... flow.client_conn.send(flow.response.assemble()) # ...and then delegate to tcp passthrough. TCPHandler(flow.live.c, log=False).handle_messages() flow.reply(KILL)
9f922f939ec19d0d9a9a91abb3e8b0d5b010c246
djangoautoconf/management/commands/dump_settings.py
djangoautoconf/management/commands/dump_settings.py
import os from django.core.management import BaseCommand from django.conf import settings def dump_attrs(obj_instance): for attr in dir(obj_instance): if attr != attr.upper(): continue yield attr, getattr(obj_instance, attr) class Command(BaseCommand): args = '' help = 'Create command cache for environment where os.listdir is not working' def handle(self, *args, **options): try: os.remove("local/total_settings.py") except: pass with open("local/total_settings.py", "w") as f: for key, value in dump_attrs(settings): if type(value) in (list, tuple, dict): print >>f, key, "=", value elif type(value) in (str, ): print >>f, key, "=", '"'+str(value)+'"' else: print >>f, key, "=", str(value)
import os from django.core.management import BaseCommand from django.conf import settings def dump_attrs(obj_instance): for attr in dir(obj_instance): if attr != attr.upper(): continue yield attr, getattr(obj_instance, attr) class Command(BaseCommand): args = '' help = 'Create command cache for environment where os.listdir is not working' def handle(self, *args, **options): try: os.remove("local/total_settings.py") except: pass with open("local/total_settings.py", "w") as f: for key, value in dump_attrs(settings): if type(value) in (list, tuple, dict): print >>f, key, "=", value elif type(value) in (str, ): print >>f, key, "=", '"'+str(value)+'"' else: print >>f, key, "=", '"'+str(value)+'"'
Work around for dump setting issue.
Work around for dump setting issue.
Python
bsd-3-clause
weijia/djangoautoconf,weijia/djangoautoconf
import os from django.core.management import BaseCommand from django.conf import settings def dump_attrs(obj_instance): for attr in dir(obj_instance): if attr != attr.upper(): continue yield attr, getattr(obj_instance, attr) class Command(BaseCommand): args = '' help = 'Create command cache for environment where os.listdir is not working' def handle(self, *args, **options): try: os.remove("local/total_settings.py") except: pass with open("local/total_settings.py", "w") as f: for key, value in dump_attrs(settings): if type(value) in (list, tuple, dict): print >>f, key, "=", value elif type(value) in (str, ): print >>f, key, "=", '"'+str(value)+'"' else: print >>f, key, "=", '"'+str(value)+'"'
Work around for dump setting issue. import os from django.core.management import BaseCommand from django.conf import settings def dump_attrs(obj_instance): for attr in dir(obj_instance): if attr != attr.upper(): continue yield attr, getattr(obj_instance, attr) class Command(BaseCommand): args = '' help = 'Create command cache for environment where os.listdir is not working' def handle(self, *args, **options): try: os.remove("local/total_settings.py") except: pass with open("local/total_settings.py", "w") as f: for key, value in dump_attrs(settings): if type(value) in (list, tuple, dict): print >>f, key, "=", value elif type(value) in (str, ): print >>f, key, "=", '"'+str(value)+'"' else: print >>f, key, "=", str(value)
77038432486071c9459c5ce43492905e158b7713
Topo/LoopTopo.py
Topo/LoopTopo.py
''' SDN project testing topo s1 / \ s2--s3 | | host.. host... ''' from mininet.topo import Topo class LoopTopo( Topo ): def __init__( self , n=2 ): # Initialize topology Topo.__init__( self) # Add Host h1 = self.addHost( 'h1' ) h2 = self.addHost( 'h2' ) h3 = self.addHost( 'h3' ) h4 = self.addHost( 'h4' ) h5 = self.addHost( 'h5' ) h6 = self.addHost( 'h6' ) # Add Switch s1 = self.addSwitch( 's1' ) s2 = self.addSwitch( 's2' ) s3 = self.addSwitch( 's3' ) # Add Link self.addLink( s1, s2 ) self.addLink( s1, s3 ) self.addLink( s2, s3 ) self.addLink( s2, h1 ) self.addLink( s2, h2 ) self.addLink( s2, h3 ) self.addLink( s3, h4 ) self.addLink( s3, h5 ) self.addLink( s3, h6 ) topos = { 'LoopTopo': ( lambda: LoopTopo() ) }
''' SDN project testing topo s1 / \ s2--s3 | | host.. host... ''' from mininet.topo import Topo class LoopTopo( Topo ): def __init__( self , n=2 ): # Initialize topology Topo.__init__( self) # Add Host h1 = self.addHost( 'h1' ) h2 = self.addHost( 'h2' ) h3 = self.addHost( 'h3' ) h4 = self.addHost( 'h4' ) h5 = self.addHost( 'h5' ) h6 = self.addHost( 'h6' ) # Add Switch s1 = self.addSwitch( 's1' ) s2 = self.addSwitch( 's2' ) s3 = self.addSwitch( 's3' ) # Add Link self.addLink( s1, s2 ) self.addLink( s1, s3 ) self.addLink( s2, s3 ) self.addLink( s2, h1 ) self.addLink( s2, h2 ) self.addLink( s2, h3 ) self.addLink( s3, h4 ) self.addLink( s3, h5 ) self.addLink( s3, h6 ) topos = { 'Loop': ( lambda: LoopTopo() ) }
Rename the name of topo.
Rename the name of topo.
Python
mit
ray6/sdn,ray6/sdn,ray6/sdn
''' SDN project testing topo s1 / \ s2--s3 | | host.. host... ''' from mininet.topo import Topo class LoopTopo( Topo ): def __init__( self , n=2 ): # Initialize topology Topo.__init__( self) # Add Host h1 = self.addHost( 'h1' ) h2 = self.addHost( 'h2' ) h3 = self.addHost( 'h3' ) h4 = self.addHost( 'h4' ) h5 = self.addHost( 'h5' ) h6 = self.addHost( 'h6' ) # Add Switch s1 = self.addSwitch( 's1' ) s2 = self.addSwitch( 's2' ) s3 = self.addSwitch( 's3' ) # Add Link self.addLink( s1, s2 ) self.addLink( s1, s3 ) self.addLink( s2, s3 ) self.addLink( s2, h1 ) self.addLink( s2, h2 ) self.addLink( s2, h3 ) self.addLink( s3, h4 ) self.addLink( s3, h5 ) self.addLink( s3, h6 ) topos = { 'Loop': ( lambda: LoopTopo() ) }
Rename the name of topo. ''' SDN project testing topo s1 / \ s2--s3 | | host.. host... ''' from mininet.topo import Topo class LoopTopo( Topo ): def __init__( self , n=2 ): # Initialize topology Topo.__init__( self) # Add Host h1 = self.addHost( 'h1' ) h2 = self.addHost( 'h2' ) h3 = self.addHost( 'h3' ) h4 = self.addHost( 'h4' ) h5 = self.addHost( 'h5' ) h6 = self.addHost( 'h6' ) # Add Switch s1 = self.addSwitch( 's1' ) s2 = self.addSwitch( 's2' ) s3 = self.addSwitch( 's3' ) # Add Link self.addLink( s1, s2 ) self.addLink( s1, s3 ) self.addLink( s2, s3 ) self.addLink( s2, h1 ) self.addLink( s2, h2 ) self.addLink( s2, h3 ) self.addLink( s3, h4 ) self.addLink( s3, h5 ) self.addLink( s3, h6 ) topos = { 'LoopTopo': ( lambda: LoopTopo() ) }
ba0ea7491fab383992013a8379592657eedfe1ce
scripts/contrib/model_info.py
scripts/contrib/model_info.py
#!/usr/bin/env python3 import sys import argparse import numpy as np import yaml DESC = "Prints version and model type from model.npz file." S2S_SPECIAL_NODE = "special:model.yml" def main(): args = parse_args() model = np.load(args.model) if S2S_SPECIAL_NODE not in model: print("No special Marian YAML node found in the model") exit(1) yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii') if not args.key: print(yaml_text) exit(0) # fix the invalid trailing unicode character '#x0000' added to the YAML # string by the C++ cnpy library try: yaml_node = yaml.load(yaml_text) except yaml.reader.ReaderError: yaml_node = yaml.load(yaml_text[:-1]) print(yaml_node[args.key]) def parse_args(): parser = argparse.ArgumentParser(description=DESC) parser.add_argument("-m", "--model", help="model file", required=True) parser.add_argument("-k", "--key", help="print value for specific key") return parser.parse_args() if __name__ == "__main__": main()
#!/usr/bin/env python3 import sys import argparse import numpy as np import yaml DESC = "Prints keys and values from model.npz file." S2S_SPECIAL_NODE = "special:model.yml" def main(): args = parse_args() model = np.load(args.model) if args.special: if S2S_SPECIAL_NODE not in model: print("No special Marian YAML node found in the model") exit(1) yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii') if not args.key: print(yaml_text) exit(0) # fix the invalid trailing unicode character '#x0000' added to the YAML # string by the C++ cnpy library try: yaml_node = yaml.load(yaml_text) except yaml.reader.ReaderError: yaml_node = yaml.load(yaml_text[:-1]) print(yaml_node[args.key]) else: if args.key: if args.key not in model: print("Key not found") exit(1) print(model[args.key]) else: for key in model: print(key) def parse_args(): parser = argparse.ArgumentParser(description=DESC) parser.add_argument("-m", "--model", help="model file", required=True) parser.add_argument("-k", "--key", help="print value for specific key") parser.add_argument("-s", "--special", action="store_true", help="print values from special:model.yml node") return parser.parse_args() if __name__ == "__main__": main()
Add printing value for any key from model.npz
Add printing value for any key from model.npz
Python
mit
emjotde/amunmt,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunmt,amunmt/marian,emjotde/amunn,amunmt/marian,emjotde/amunn,emjotde/amunmt,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/amunn,marian-nmt/marian-train,emjotde/Marian,marian-nmt/marian-train,emjotde/Marian,amunmt/marian
#!/usr/bin/env python3 import sys import argparse import numpy as np import yaml DESC = "Prints keys and values from model.npz file." S2S_SPECIAL_NODE = "special:model.yml" def main(): args = parse_args() model = np.load(args.model) if args.special: if S2S_SPECIAL_NODE not in model: print("No special Marian YAML node found in the model") exit(1) yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii') if not args.key: print(yaml_text) exit(0) # fix the invalid trailing unicode character '#x0000' added to the YAML # string by the C++ cnpy library try: yaml_node = yaml.load(yaml_text) except yaml.reader.ReaderError: yaml_node = yaml.load(yaml_text[:-1]) print(yaml_node[args.key]) else: if args.key: if args.key not in model: print("Key not found") exit(1) print(model[args.key]) else: for key in model: print(key) def parse_args(): parser = argparse.ArgumentParser(description=DESC) parser.add_argument("-m", "--model", help="model file", required=True) parser.add_argument("-k", "--key", help="print value for specific key") parser.add_argument("-s", "--special", action="store_true", help="print values from special:model.yml node") return parser.parse_args() if __name__ == "__main__": main()
Add printing value for any key from model.npz #!/usr/bin/env python3 import sys import argparse import numpy as np import yaml DESC = "Prints version and model type from model.npz file." S2S_SPECIAL_NODE = "special:model.yml" def main(): args = parse_args() model = np.load(args.model) if S2S_SPECIAL_NODE not in model: print("No special Marian YAML node found in the model") exit(1) yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii') if not args.key: print(yaml_text) exit(0) # fix the invalid trailing unicode character '#x0000' added to the YAML # string by the C++ cnpy library try: yaml_node = yaml.load(yaml_text) except yaml.reader.ReaderError: yaml_node = yaml.load(yaml_text[:-1]) print(yaml_node[args.key]) def parse_args(): parser = argparse.ArgumentParser(description=DESC) parser.add_argument("-m", "--model", help="model file", required=True) parser.add_argument("-k", "--key", help="print value for specific key") return parser.parse_args() if __name__ == "__main__": main()
9ee87588b2d6694cafea6415af50110ba5263d3e
bitbots_body_behaviour/src/bitbots_body_behaviour/body/actions/wait.py
bitbots_body_behaviour/src/bitbots_body_behaviour/body/actions/wait.py
# -*- coding:utf-8 -*- """ Wait ^^^^ .. moduleauthor:: Martin Poppinga <[email protected]> Just waits for something (i.e. that preconditions will be fullfilled) """ import rospy from bitbots_body_behaviour.body.actions.go_to import Stand from bitbots_stackmachine.abstract_action_module import AbstractActionModule from humanoid_league_msgs.msg import HeadMode class Wait(AbstractActionModule): def __init__(self, connector, args=10): super(Wait, self).__init__(connector) self.time = rospy.get_time() + args def perform(self, connector, reevaluate=False): if connector.world_model.ball_seen(): connector.blackboard.set_head_duty(HeadMode.BALL_MODE) self.push(Stand) if self.time > rospy.get_time(): self.pop()
# -*- coding:utf-8 -*- """ Wait ^^^^ .. moduleauthor:: Martin Poppinga <[email protected]> Just waits for something (i.e. that preconditions will be fullfilled) """ import rospy from bitbots_body_behaviour.body.actions.go_to import Stand from bitbots_stackmachine.abstract_action_module import AbstractActionModule from humanoid_league_msgs.msg import HeadMode class Wait(AbstractActionModule): def __init__(self, connector, args=10): super(Wait, self).__init__(connector) self.time = rospy.get_time() + args def perform(self, connector, reevaluate=False): if connector.world_model.ball_seen(): connector.blackboard.set_head_duty(HeadMode.BALL_MODE) self.push(Stand) if self.time < rospy.get_time(): self.pop()
Fix Bug in Wait logic
Fix Bug in Wait logic
Python
bsd-3-clause
bit-bots/bitbots_behaviour
# -*- coding:utf-8 -*- """ Wait ^^^^ .. moduleauthor:: Martin Poppinga <[email protected]> Just waits for something (i.e. that preconditions will be fullfilled) """ import rospy from bitbots_body_behaviour.body.actions.go_to import Stand from bitbots_stackmachine.abstract_action_module import AbstractActionModule from humanoid_league_msgs.msg import HeadMode class Wait(AbstractActionModule): def __init__(self, connector, args=10): super(Wait, self).__init__(connector) self.time = rospy.get_time() + args def perform(self, connector, reevaluate=False): if connector.world_model.ball_seen(): connector.blackboard.set_head_duty(HeadMode.BALL_MODE) self.push(Stand) if self.time < rospy.get_time(): self.pop()
Fix Bug in Wait logic # -*- coding:utf-8 -*- """ Wait ^^^^ .. moduleauthor:: Martin Poppinga <[email protected]> Just waits for something (i.e. that preconditions will be fullfilled) """ import rospy from bitbots_body_behaviour.body.actions.go_to import Stand from bitbots_stackmachine.abstract_action_module import AbstractActionModule from humanoid_league_msgs.msg import HeadMode class Wait(AbstractActionModule): def __init__(self, connector, args=10): super(Wait, self).__init__(connector) self.time = rospy.get_time() + args def perform(self, connector, reevaluate=False): if connector.world_model.ball_seen(): connector.blackboard.set_head_duty(HeadMode.BALL_MODE) self.push(Stand) if self.time > rospy.get_time(): self.pop()
dba74cdd2fb2a8e5be1b56bba3fdcadc40827f73
links/utils/testing_helpers.py
links/utils/testing_helpers.py
from django.test import TestCase from django.core.urlresolvers import reverse from rest_framework.test import APIClient class APITestCase(TestCase): def setUp(self): self.client = APIClient() class AuthenticatedAPITestCase(APITestCase): def setUp(self): super(AuthenticatedAPITestCase, self).setUp() response = self.client.post(reverse('registration'), { 'email': '[email protected]', 'password': 'something secret', 'first_name': 'Testy', 'last_name': 'McTesterson' }, format='json') token = response.data['token'] self.client.credentials(HTTP_AUTHORIZATION='Token ' + token)
Create some testing helper classes
Create some testing helper classes
Python
mit
projectweekend/Links-API,projectweekend/Links-API
from django.test import TestCase from django.core.urlresolvers import reverse from rest_framework.test import APIClient class APITestCase(TestCase): def setUp(self): self.client = APIClient() class AuthenticatedAPITestCase(APITestCase): def setUp(self): super(AuthenticatedAPITestCase, self).setUp() response = self.client.post(reverse('registration'), { 'email': '[email protected]', 'password': 'something secret', 'first_name': 'Testy', 'last_name': 'McTesterson' }, format='json') token = response.data['token'] self.client.credentials(HTTP_AUTHORIZATION='Token ' + token)
Create some testing helper classes
e4c5f68da949683232b520796b380e8b8f2163c7
test/tiles/bigwig_test.py
test/tiles/bigwig_test.py
import clodius.tiles.bigwig as hgbi import os.path as op def test_bigwig_tiles(): filename = op.join('data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig') meanval = hgbi.tiles(filename, ['x.0.0']) minval = hgbi.tiles(filename, ['x.0.0.min']) maxval = hgbi.tiles(filename, ['x.0.0.max']) assert meanval[0][1]['max_value'] > minval[0][1]['max_value'] assert maxval[0][1]['max_value'] > meanval[0][1]['max_value'] def test_tileset_info(): filename = op.join('data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig') tileset_info = hgbi.tileset_info(filename) # print('tileset_info', tileset_info)
import clodius.tiles.bigwig as hgbi import os.path as op def test_bigwig_tiles(): filename = op.join( 'data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig' ) meanval = hgbi.tiles(filename, ['x.0.0']) minval = hgbi.tiles(filename, ['x.0.0.min']) maxval = hgbi.tiles(filename, ['x.0.0.max']) assert meanval[0][1]['max_value'] > minval[0][1]['max_value'] assert maxval[0][1]['max_value'] > meanval[0][1]['max_value'] def test_tileset_info(): filename = op.join( 'data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig' ) tileset_info = hgbi.tileset_info(filename) assert len(tileset_info['aggregation_modes']) == 4 assert tileset_info['aggregation_modes']['mean'] assert tileset_info['aggregation_modes']['min'] assert tileset_info['aggregation_modes']['max'] assert tileset_info['aggregation_modes']['std']
Test for bigWig aggregation modes
Test for bigWig aggregation modes
Python
mit
hms-dbmi/clodius,hms-dbmi/clodius
import clodius.tiles.bigwig as hgbi import os.path as op def test_bigwig_tiles(): filename = op.join( 'data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig' ) meanval = hgbi.tiles(filename, ['x.0.0']) minval = hgbi.tiles(filename, ['x.0.0.min']) maxval = hgbi.tiles(filename, ['x.0.0.max']) assert meanval[0][1]['max_value'] > minval[0][1]['max_value'] assert maxval[0][1]['max_value'] > meanval[0][1]['max_value'] def test_tileset_info(): filename = op.join( 'data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig' ) tileset_info = hgbi.tileset_info(filename) assert len(tileset_info['aggregation_modes']) == 4 assert tileset_info['aggregation_modes']['mean'] assert tileset_info['aggregation_modes']['min'] assert tileset_info['aggregation_modes']['max'] assert tileset_info['aggregation_modes']['std']
Test for bigWig aggregation modes import clodius.tiles.bigwig as hgbi import os.path as op def test_bigwig_tiles(): filename = op.join('data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig') meanval = hgbi.tiles(filename, ['x.0.0']) minval = hgbi.tiles(filename, ['x.0.0.min']) maxval = hgbi.tiles(filename, ['x.0.0.max']) assert meanval[0][1]['max_value'] > minval[0][1]['max_value'] assert maxval[0][1]['max_value'] > meanval[0][1]['max_value'] def test_tileset_info(): filename = op.join('data', 'wgEncodeCaltechRnaSeqHuvecR1x75dTh1014IlnaPlusSignalRep2.bigWig') tileset_info = hgbi.tileset_info(filename) # print('tileset_info', tileset_info)
abd97f71e54515c057e94f7d21aa953faba3f5fc
taskflow/examples/delayed_return.py
taskflow/examples/delayed_return.py
# -*- coding: utf-8 -*- # Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os import sys from concurrent import futures logging.basicConfig(level=logging.ERROR) self_dir = os.path.abspath(os.path.dirname(__file__)) top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) sys.path.insert(0, top_dir) sys.path.insert(0, self_dir) # INTRO: in this example linear_flow we will attach a listener to an engine # and delay the return from a function until after the result of a task has # occured in that engine. The engine will continue running (in the background) # while the function will have returned. import taskflow.engines from taskflow.listeners import base from taskflow.patterns import linear_flow as lf from taskflow import states from taskflow import task from taskflow.utils import misc class PokeFutureListener(base.ListenerBase): def __init__(self, engine, future, task_name): super(PokeFutureListener, self).__init__( engine, task_listen_for=(misc.Notifier.ANY,), flow_listen_for=[]) self._future = future self._task_name = task_name def _task_receiver(self, state, details): if state in (states.SUCCESS, states.FAILURE): if details.get('task_name') == self._task_name: if state == states.SUCCESS: self._future.set_result(details['result']) else: failure = details['result'] self._future.set_exception(failure.exception) class Hi(task.Task): def execute(self): # raise IOError("I broken") return 'hi' class Bye(task.Task): def execute(self): return 'bye' def return_from_flow(pool): wf = lf.Flow("root").add(Hi("hi"), Bye("bye")) eng = taskflow.engines.load(wf, engine_conf='serial') f = futures.Future() watcher = PokeFutureListener(eng, f, 'hi') watcher.register() pool.submit(eng.run) return (eng, f.result()) with futures.ThreadPoolExecutor(1) as pool: engine, hi_result = return_from_flow(pool) print(hi_result) print(engine.storage.get_flow_state())
Add a example that activates a future when a result is ready
Add a example that activates a future when a result is ready To allow for an engine to continue to run while at the same time returning from a function when a component of that engine finishes a pattern can be used that ties and engines listeners to the function return, allowing for both to be used simulatenously. Change-Id: Iab49e0c7b233138bc2d02247ab7aa3d99a82cd67
Python
apache-2.0
openstack/taskflow,junneyang/taskflow,openstack/taskflow,jimbobhickville/taskflow,pombredanne/taskflow-1,junneyang/taskflow,pombredanne/taskflow-1,jimbobhickville/taskflow
# -*- coding: utf-8 -*- # Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os import sys from concurrent import futures logging.basicConfig(level=logging.ERROR) self_dir = os.path.abspath(os.path.dirname(__file__)) top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) sys.path.insert(0, top_dir) sys.path.insert(0, self_dir) # INTRO: in this example linear_flow we will attach a listener to an engine # and delay the return from a function until after the result of a task has # occured in that engine. The engine will continue running (in the background) # while the function will have returned. import taskflow.engines from taskflow.listeners import base from taskflow.patterns import linear_flow as lf from taskflow import states from taskflow import task from taskflow.utils import misc class PokeFutureListener(base.ListenerBase): def __init__(self, engine, future, task_name): super(PokeFutureListener, self).__init__( engine, task_listen_for=(misc.Notifier.ANY,), flow_listen_for=[]) self._future = future self._task_name = task_name def _task_receiver(self, state, details): if state in (states.SUCCESS, states.FAILURE): if details.get('task_name') == self._task_name: if state == states.SUCCESS: self._future.set_result(details['result']) else: failure = details['result'] self._future.set_exception(failure.exception) class Hi(task.Task): def execute(self): # raise IOError("I broken") return 'hi' class Bye(task.Task): def execute(self): return 'bye' def return_from_flow(pool): wf = lf.Flow("root").add(Hi("hi"), Bye("bye")) eng = taskflow.engines.load(wf, engine_conf='serial') f = futures.Future() watcher = PokeFutureListener(eng, f, 'hi') watcher.register() pool.submit(eng.run) return (eng, f.result()) with futures.ThreadPoolExecutor(1) as pool: engine, hi_result = return_from_flow(pool) print(hi_result) print(engine.storage.get_flow_state())
Add a example that activates a future when a result is ready To allow for an engine to continue to run while at the same time returning from a function when a component of that engine finishes a pattern can be used that ties and engines listeners to the function return, allowing for both to be used simulatenously. Change-Id: Iab49e0c7b233138bc2d02247ab7aa3d99a82cd67
a8818e2058fdfaec7f283a5115619d42d23b7dde
anchorhub/builtin/github/writer.py
anchorhub/builtin/github/writer.py
""" File that initializes a Writer object designed for GitHub style markdown files. """ from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
""" File that initializes a Writer object designed for GitHub style markdown files. """ from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) setext = MarkdownSetextWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
Use Setext strategy in GitHub built in Writer
Use Setext strategy in GitHub built in Writer
Python
apache-2.0
samjabrahams/anchorhub
""" File that initializes a Writer object designed for GitHub style markdown files. """ from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) setext = MarkdownSetextWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
Use Setext strategy in GitHub built in Writer """ File that initializes a Writer object designed for GitHub style markdown files. """ from anchorhub.writer import Writer from anchorhub.builtin.github.wstrategies import MarkdownATXWriterStrategy, \ MarkdownSetextWriterStrategy, MarkdownInlineLinkWriterStrategy import anchorhub.builtin.github.switches as ghswitches def make_github_markdown_writer(opts): """ Creates a Writer object used for parsing and writing Markdown files with a GitHub style anchor transformation :param opts: :return: A Writer object designed for parsing, modifying, and writing AnchorHub tags to converted anchors in Markdown files using GitHub style anchors """ assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXWriterStrategy(opts) inline = MarkdownInlineLinkWriterStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, inline] switches = [code_block_switch] return Writer(strategies, switches=switches)
8e907ad431dfe5395741d26ea46c50c118355d69
src/webassets/ext/werkzeug.py
src/webassets/ext/werkzeug.py
import logging from webassets.script import CommandLineEnvironment __all__ = ('make_assets_action',) def make_assets_action(environment, loaders=[]): """Creates a ``werkzeug.script`` action which interfaces with the webassets command line tools. Since Werkzeug does not provide a way to have subcommands, we need to model the assets subcommands as options. If ``loaders`` is given, the command will use these loaders to add bundles to the environment. This is mainly useful if you are defining your bundles inside templates only, and need to find them first using something like the Jinja2Loader. """ log = logging.getLogger('webassets') log.addHandler(logging.StreamHandler()) def action(rebuild=False, watch=False, clean=False, quiet=('q', False), verbose=('v', False)): if len(filter(bool, [rebuild, watch, clean])) != 1: print "Error: exactly one of --rebuild, --watch or --clean must be given" return 1 if rebuild: command = 'rebuild' elif watch: command = 'watch' elif clean: command = 'clean' log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO)) cmdenv = CommandLineEnvironment(environment, log) if loaders: log.info('Finding bundles...') for loader in loaders: environment.add(*[b for b in loader.load_bundles() if not b.is_container]) cmdenv.invoke(command) return action
import logging from webassets.script import CommandLineEnvironment __all__ = ('make_assets_action',) def make_assets_action(environment, loaders=[]): """Creates a ``werkzeug.script`` action which interfaces with the webassets command line tools. Since Werkzeug does not provide a way to have subcommands, we need to model the assets subcommands as options. If ``loaders`` is given, the command will use these loaders to add bundles to the environment. This is mainly useful if you are defining your bundles inside templates only, and need to find them first using something like the Jinja2Loader. """ log = logging.getLogger('webassets') log.addHandler(logging.StreamHandler()) def action(rebuild=False, watch=False, check=False, clean=False, quiet=('q', False), verbose=('v', False)): if len(filter(bool, [rebuild, watch, clean, check])) != 1: print "Error: exactly one of --rebuild, --watch, --check or --clean must be given" return 1 if rebuild: command = 'rebuild' elif watch: command = 'watch' elif clean: command = 'clean' elif check: command = 'check' log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO)) cmdenv = CommandLineEnvironment(environment, log) if loaders: log.info('Finding bundles...') for loader in loaders: environment.add(*[b for b in loader.load_bundles() if not b.is_container]) cmdenv.invoke(command) return action
Make the "check" command available via the Werkzeug extension.
Make the "check" command available via the Werkzeug extension.
Python
bsd-2-clause
scorphus/webassets,wijerasa/webassets,JDeuce/webassets,heynemann/webassets,heynemann/webassets,heynemann/webassets,aconrad/webassets,aconrad/webassets,glorpen/webassets,glorpen/webassets,john2x/webassets,florianjacob/webassets,0x1997/webassets,JDeuce/webassets,0x1997/webassets,wijerasa/webassets,glorpen/webassets,aconrad/webassets,john2x/webassets,florianjacob/webassets,scorphus/webassets
import logging from webassets.script import CommandLineEnvironment __all__ = ('make_assets_action',) def make_assets_action(environment, loaders=[]): """Creates a ``werkzeug.script`` action which interfaces with the webassets command line tools. Since Werkzeug does not provide a way to have subcommands, we need to model the assets subcommands as options. If ``loaders`` is given, the command will use these loaders to add bundles to the environment. This is mainly useful if you are defining your bundles inside templates only, and need to find them first using something like the Jinja2Loader. """ log = logging.getLogger('webassets') log.addHandler(logging.StreamHandler()) def action(rebuild=False, watch=False, check=False, clean=False, quiet=('q', False), verbose=('v', False)): if len(filter(bool, [rebuild, watch, clean, check])) != 1: print "Error: exactly one of --rebuild, --watch, --check or --clean must be given" return 1 if rebuild: command = 'rebuild' elif watch: command = 'watch' elif clean: command = 'clean' elif check: command = 'check' log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO)) cmdenv = CommandLineEnvironment(environment, log) if loaders: log.info('Finding bundles...') for loader in loaders: environment.add(*[b for b in loader.load_bundles() if not b.is_container]) cmdenv.invoke(command) return action
Make the "check" command available via the Werkzeug extension. import logging from webassets.script import CommandLineEnvironment __all__ = ('make_assets_action',) def make_assets_action(environment, loaders=[]): """Creates a ``werkzeug.script`` action which interfaces with the webassets command line tools. Since Werkzeug does not provide a way to have subcommands, we need to model the assets subcommands as options. If ``loaders`` is given, the command will use these loaders to add bundles to the environment. This is mainly useful if you are defining your bundles inside templates only, and need to find them first using something like the Jinja2Loader. """ log = logging.getLogger('webassets') log.addHandler(logging.StreamHandler()) def action(rebuild=False, watch=False, clean=False, quiet=('q', False), verbose=('v', False)): if len(filter(bool, [rebuild, watch, clean])) != 1: print "Error: exactly one of --rebuild, --watch or --clean must be given" return 1 if rebuild: command = 'rebuild' elif watch: command = 'watch' elif clean: command = 'clean' log.setLevel(logging.DEBUG if verbose else (logging.WARNING if quiet else logging.INFO)) cmdenv = CommandLineEnvironment(environment, log) if loaders: log.info('Finding bundles...') for loader in loaders: environment.add(*[b for b in loader.load_bundles() if not b.is_container]) cmdenv.invoke(command) return action
2dcfbc9dfecef4920a8dec9f3d2362f5ece13612
sympy/printing/tests/test_numpy.py
sympy/printing/tests/test_numpy.py
from sympy import Piecewise from sympy.abc import x from sympy.printing.lambdarepr import NumPyPrinter def test_numpy_piecewise_regression(): """ NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+. See gh-9747 and gh-9749 for details. """ p = Piecewise((1, x < 0), (0, True)) assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
Add test for NumPyPrinter regression
Add test for NumPyPrinter regression
Python
bsd-3-clause
kevalds51/sympy,aktech/sympy,maniteja123/sympy,madan96/sympy,atreyv/sympy,madan96/sympy,jbbskinny/sympy,Vishluck/sympy,iamutkarshtiwari/sympy,skidzo/sympy,chaffra/sympy,jbbskinny/sympy,saurabhjn76/sympy,kaichogami/sympy,sahmed95/sympy,abhiii5459/sympy,wyom/sympy,wyom/sympy,drufat/sympy,oliverlee/sympy,kumarkrishna/sympy,oliverlee/sympy,Vishluck/sympy,saurabhjn76/sympy,iamutkarshtiwari/sympy,shikil/sympy,kaichogami/sympy,VaibhavAgarwalVA/sympy,cswiercz/sympy,shikil/sympy,Davidjohnwilson/sympy,sahmed95/sympy,chaffra/sympy,yashsharan/sympy,Arafatk/sympy,mcdaniel67/sympy,rahuldan/sympy,jaimahajan1997/sympy,maniteja123/sympy,kumarkrishna/sympy,Vishluck/sympy,yashsharan/sympy,Shaswat27/sympy,kevalds51/sympy,chaffra/sympy,rahuldan/sympy,emon10005/sympy,jaimahajan1997/sympy,kaushik94/sympy,iamutkarshtiwari/sympy,debugger22/sympy,mafiya69/sympy,jerli/sympy,Titan-C/sympy,Shaswat27/sympy,sampadsaha5/sympy,cswiercz/sympy,grevutiu-gabriel/sympy,madan96/sympy,atreyv/sympy,Shaswat27/sympy,wanglongqi/sympy,kaichogami/sympy,Curious72/sympy,mafiya69/sympy,wyom/sympy,yukoba/sympy,lindsayad/sympy,ga7g08/sympy,hargup/sympy,AkademieOlympia/sympy,ga7g08/sympy,abhiii5459/sympy,yashsharan/sympy,debugger22/sympy,sampadsaha5/sympy,souravsingh/sympy,pandeyadarsh/sympy,moble/sympy,jbbskinny/sympy,ChristinaZografou/sympy,moble/sympy,drufat/sympy,Designist/sympy,kaushik94/sympy,debugger22/sympy,lindsayad/sympy,MechCoder/sympy,ChristinaZografou/sympy,emon10005/sympy,MechCoder/sympy,abhiii5459/sympy,ga7g08/sympy,mafiya69/sympy,postvakje/sympy,grevutiu-gabriel/sympy,farhaanbukhsh/sympy,skidzo/sympy,jerli/sympy,kevalds51/sympy,skidzo/sympy,yukoba/sympy,farhaanbukhsh/sympy,jerli/sympy,moble/sympy,drufat/sympy,ahhda/sympy,rahuldan/sympy,sahmed95/sympy,AkademieOlympia/sympy,mcdaniel67/sympy,jaimahajan1997/sympy,Curious72/sympy,mcdaniel67/sympy,postvakje/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,Titan-C/sympy,pandeyadarsh/sympy,cswiercz/sympy,sampadsaha5/sympy,oliverlee/sympy,aktech/sympy,Designist/sympy,maniteja123/sympy,saurabhjn76/sympy,Designist/sympy,Curious72/sympy,hargup/sympy,Arafatk/sympy,atreyv/sympy,wanglongqi/sympy,wanglongqi/sympy,MechCoder/sympy,souravsingh/sympy,ahhda/sympy,postvakje/sympy,yukoba/sympy,kaushik94/sympy,aktech/sympy,ChristinaZografou/sympy,farhaanbukhsh/sympy,kumarkrishna/sympy,Davidjohnwilson/sympy,shikil/sympy,emon10005/sympy,hargup/sympy,Arafatk/sympy,lindsayad/sympy,Titan-C/sympy,pandeyadarsh/sympy,Davidjohnwilson/sympy,VaibhavAgarwalVA/sympy,ahhda/sympy,grevutiu-gabriel/sympy,souravsingh/sympy
from sympy import Piecewise from sympy.abc import x from sympy.printing.lambdarepr import NumPyPrinter def test_numpy_piecewise_regression(): """ NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+. See gh-9747 and gh-9749 for details. """ p = Piecewise((1, x < 0), (0, True)) assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
Add test for NumPyPrinter regression
57ed6bb3994342fce594c9cbbb0ecde4ee8c117c
setup.py
setup.py
from setuptools import setup setup( name="Flask-Redistore", version="1.0", url="", license="BSD", author="Donald Stufft", author_email="[email protected]", description="Adds Redis support to your Flask applications", long_description=open("README.rst").read(), py_modules=["flask_redistore"], zip_safe=False, include_package_data=True, platforms="any", install_requires=[ "Flask", "redis", ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
import sys from setuptools import setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #import here, cause outside the eggs aren't loaded import pytest sys.exit(pytest.main(self.test_args)) setup( name="Flask-Redistore", version="1.0", url="", license="BSD", author="Donald Stufft", author_email="[email protected]", description="Adds Redis support to your Flask applications", long_description=open("README.rst").read(), py_modules=["flask_redistore"], zip_safe=False, include_package_data=True, platforms="any", install_requires=[ "Flask", "redis", ], extras_require={"tests": ["pytest"]}, tests_require=["pytest"], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ], cmdclass={"test": PyTest}, )
Enable running tests with py.test
Enable running tests with py.test
Python
bsd-2-clause
dstufft/Flask-Redistore
import sys from setuptools import setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand): def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #import here, cause outside the eggs aren't loaded import pytest sys.exit(pytest.main(self.test_args)) setup( name="Flask-Redistore", version="1.0", url="", license="BSD", author="Donald Stufft", author_email="[email protected]", description="Adds Redis support to your Flask applications", long_description=open("README.rst").read(), py_modules=["flask_redistore"], zip_safe=False, include_package_data=True, platforms="any", install_requires=[ "Flask", "redis", ], extras_require={"tests": ["pytest"]}, tests_require=["pytest"], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ], cmdclass={"test": PyTest}, )
Enable running tests with py.test from setuptools import setup setup( name="Flask-Redistore", version="1.0", url="", license="BSD", author="Donald Stufft", author_email="[email protected]", description="Adds Redis support to your Flask applications", long_description=open("README.rst").read(), py_modules=["flask_redistore"], zip_safe=False, include_package_data=True, platforms="any", install_requires=[ "Flask", "redis", ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
5cd3b53f677fd6ab6e77bee5b7d42cf2ac85e47f
feincms/apps.py
feincms/apps.py
# flake8: noqa from feincms.content.application.models import *
def __getattr__(key): # Work around Django 3.2's autoloading of *.apps modules (AppConfig # autodiscovery) if key in { "ApplicationContent", "app_reverse", "app_reverse_lazy", "permalink", "UnpackTemplateResponse", "standalone", "unpack", }: from feincms.content.application import models return getattr(models, key) raise AttributeError("Unknown attribute '%s'" % key)
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2
Python
bsd-3-clause
mjl/feincms,feincms/feincms,mjl/feincms,feincms/feincms,feincms/feincms,mjl/feincms
def __getattr__(key): # Work around Django 3.2's autoloading of *.apps modules (AppConfig # autodiscovery) if key in { "ApplicationContent", "app_reverse", "app_reverse_lazy", "permalink", "UnpackTemplateResponse", "standalone", "unpack", }: from feincms.content.application import models return getattr(models, key) raise AttributeError("Unknown attribute '%s'" % key)
Add a workaround for the AppConfig autodiscovery crashes with Django 3.2 # flake8: noqa from feincms.content.application.models import *
1b97aa2dae43a8988802ca532a3200f444f85db3
markups/common.py
markups/common.py
# This file is part of python-markups module # License: BSD # Copyright: (C) Dmitry Shachnev, 2012 import os.path # Some common constants and functions (LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3) CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser('~/.config')) MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js' MATHJAX_WEB_URL = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js' def get_pygments_stylesheet(selector): try: from pygments.formatters import HtmlFormatter except ImportError: return '' else: return HtmlFormatter().get_style_defs(selector) + '\n' def get_mathjax_url(webenv): if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv: return MATHJAX_LOCAL_URL else: return MATHJAX_WEB_URL
# This file is part of python-markups module # License: BSD # Copyright: (C) Dmitry Shachnev, 2012 import os.path # Some common constants and functions (LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3) CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser('~/.config')) MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js' MATHJAX_WEB_URL = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js' PYGMENTS_STYLE = 'default' def get_pygments_stylesheet(selector, style=None): try: from pygments.formatters import HtmlFormatter except ImportError: return '' else: return HtmlFormatter(style=(style or PYGMENTS_STYLE)).get_style_defs(selector) + '\n' def get_mathjax_url(webenv): if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv: return MATHJAX_LOCAL_URL else: return MATHJAX_WEB_URL
Add initial support for pygments styles
Add initial support for pygments styles
Python
bsd-3-clause
retext-project/pymarkups,mitya57/pymarkups
# This file is part of python-markups module # License: BSD # Copyright: (C) Dmitry Shachnev, 2012 import os.path # Some common constants and functions (LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3) CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser('~/.config')) MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js' MATHJAX_WEB_URL = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js' PYGMENTS_STYLE = 'default' def get_pygments_stylesheet(selector, style=None): try: from pygments.formatters import HtmlFormatter except ImportError: return '' else: return HtmlFormatter(style=(style or PYGMENTS_STYLE)).get_style_defs(selector) + '\n' def get_mathjax_url(webenv): if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv: return MATHJAX_LOCAL_URL else: return MATHJAX_WEB_URL
Add initial support for pygments styles # This file is part of python-markups module # License: BSD # Copyright: (C) Dmitry Shachnev, 2012 import os.path # Some common constants and functions (LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3) CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or os.path.expanduser('~/.config')) MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js' MATHJAX_WEB_URL = 'http://cdn.mathjax.org/mathjax/latest/MathJax.js' def get_pygments_stylesheet(selector): try: from pygments.formatters import HtmlFormatter except ImportError: return '' else: return HtmlFormatter().get_style_defs(selector) + '\n' def get_mathjax_url(webenv): if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv: return MATHJAX_LOCAL_URL else: return MATHJAX_WEB_URL
e5fa10e27d9c5911b0238d23fc13acc081accc79
utils/dates.py
utils/dates.py
# This file is part of e-Giełda. # Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński # # e-Giełda is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # You should have received a copy of the GNU Affero General Public License # along with e-Giełda. If not, see <http://www.gnu.org/licenses/>. from datetime import datetime, timedelta DT_FORMAT = '%Y-%m-%d %H:%M:%S%z' def datetime_html_format(date): return date.strftime("%Y-%m-%dT%H:%M") def datetime_to_string(datetime): return datetime.strftime(datetime, DT_FORMAT) def string_to_datetime(date): return datetime.strptime(date, DT_FORMAT) def date_range(start_date, end_date): return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
# This file is part of e-Giełda. # Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński # # e-Giełda is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # You should have received a copy of the GNU Affero General Public License # along with e-Giełda. If not, see <http://www.gnu.org/licenses/>. from datetime import datetime, timedelta DT_FORMAT = '%Y-%m-%d %H:%M:%S%z' def datetime_html_format(date): return date.strftime("%Y-%m-%dT%H:%M") def datetime_to_string(date): return date.strftime(DT_FORMAT) def string_to_datetime(date): return datetime.strptime(date, DT_FORMAT) def date_range(start_date, end_date): return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
Fix error on date save
Fix error on date save
Python
agpl-3.0
m4tx/egielda,m4tx/egielda,m4tx/egielda
# This file is part of e-Giełda. # Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński # # e-Giełda is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # You should have received a copy of the GNU Affero General Public License # along with e-Giełda. If not, see <http://www.gnu.org/licenses/>. from datetime import datetime, timedelta DT_FORMAT = '%Y-%m-%d %H:%M:%S%z' def datetime_html_format(date): return date.strftime("%Y-%m-%dT%H:%M") def datetime_to_string(date): return date.strftime(DT_FORMAT) def string_to_datetime(date): return datetime.strptime(date, DT_FORMAT) def date_range(start_date, end_date): return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
Fix error on date save # This file is part of e-Giełda. # Copyright (C) 2014-2015 Mateusz Maćkowski and Tomasz Zieliński # # e-Giełda is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # You should have received a copy of the GNU Affero General Public License # along with e-Giełda. If not, see <http://www.gnu.org/licenses/>. from datetime import datetime, timedelta DT_FORMAT = '%Y-%m-%d %H:%M:%S%z' def datetime_html_format(date): return date.strftime("%Y-%m-%dT%H:%M") def datetime_to_string(datetime): return datetime.strftime(datetime, DT_FORMAT) def string_to_datetime(date): return datetime.strptime(date, DT_FORMAT) def date_range(start_date, end_date): return list(start_date + timedelta(x) for x in range((end_date - start_date).days + 1))
3785b2804c88215114e0bb21f1aab6dc0554b30c
django_react_templatetags/ssr/hypernova.py
django_react_templatetags/ssr/hypernova.py
import logging import json from django.conf import settings import hypernova from hypernova.plugins.dev_mode import DevModePlugin logger = logging.getLogger(__name__) class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, [DevModePlugin(logger)] if settings.DEBUG else [], timeout=get_request_timeout(), headers=headers, ) inner_html = "" try: inner_html = renderer.render({component['name']: component['json']}) except Exception as e: msg = "SSR request to '{}' failed: {}".format( settings.REACT_RENDER_HOST, e.__class__.__name__ ) logger.exception(msg) return inner_html def get_request_timeout(): if not hasattr(settings, 'REACT_RENDER_TIMEOUT'): return 20 return settings.REACT_RENDER_TIMEOUT
import logging import json from django.conf import settings import hypernova logger = logging.getLogger(__name__) class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): # from hypernova.plugins.dev_mode import DevModePlugin renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, # [DevModePlugin(logger)] if settings.DEBUG else [], [], timeout=get_request_timeout(), headers=headers, ) inner_html = "" try: inner_html = renderer.render({component['name']: component['json']}) except Exception as e: msg = "SSR request to '{}' failed: {}".format( settings.REACT_RENDER_HOST, e.__class__.__name__ ) logger.exception(msg) return inner_html def get_request_timeout(): if not hasattr(settings, 'REACT_RENDER_TIMEOUT'): return 20 return settings.REACT_RENDER_TIMEOUT
Disable DevModePlugin until py3 fix is fixed upstream
Disable DevModePlugin until py3 fix is fixed upstream
Python
mit
Frojd/django-react-templatetags,Frojd/django-react-templatetags,Frojd/django-react-templatetags
import logging import json from django.conf import settings import hypernova logger = logging.getLogger(__name__) class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): # from hypernova.plugins.dev_mode import DevModePlugin renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, # [DevModePlugin(logger)] if settings.DEBUG else [], [], timeout=get_request_timeout(), headers=headers, ) inner_html = "" try: inner_html = renderer.render({component['name']: component['json']}) except Exception as e: msg = "SSR request to '{}' failed: {}".format( settings.REACT_RENDER_HOST, e.__class__.__name__ ) logger.exception(msg) return inner_html def get_request_timeout(): if not hasattr(settings, 'REACT_RENDER_TIMEOUT'): return 20 return settings.REACT_RENDER_TIMEOUT
Disable DevModePlugin until py3 fix is fixed upstream import logging import json from django.conf import settings import hypernova from hypernova.plugins.dev_mode import DevModePlugin logger = logging.getLogger(__name__) class HypernovaService(): def load_or_empty(self, component, headers={}, ssr_context=None): renderer = hypernova.Renderer( settings.REACT_RENDER_HOST, [DevModePlugin(logger)] if settings.DEBUG else [], timeout=get_request_timeout(), headers=headers, ) inner_html = "" try: inner_html = renderer.render({component['name']: component['json']}) except Exception as e: msg = "SSR request to '{}' failed: {}".format( settings.REACT_RENDER_HOST, e.__class__.__name__ ) logger.exception(msg) return inner_html def get_request_timeout(): if not hasattr(settings, 'REACT_RENDER_TIMEOUT'): return 20 return settings.REACT_RENDER_TIMEOUT
fbd37fe6404bfc1e7cec4b2137c19e7323cdde02
street_score/project/urls.py
street_score/project/urls.py
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
Correct the url for the rating instance resource
Correct the url for the rating instance resource
Python
mit
openplans/streetscore,openplans/streetscore,openplans/streetscore
from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(?P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
Correct the url for the rating instance resource from django.conf.urls import patterns, include, url from django.views import generic as views from . import resources # Uncomment the next two lines to enable the admin: from django.contrib.gis import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'project.views.home', name='home'), # url(r'^project/', include('project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), url(r'^$', views.TemplateView.as_view(template_name='index.html'), name='home'), url(r'^ratings/$', resources.RatingListView.as_view(), name='rating_list'), url(r'^ratings/(P<id>\d+)$', resources.RatingInstanceView.as_view(), name='rating_instance'), url(r'^survey_session', resources.SurveySessionView.as_view(), name='survey_session_instance') )
1ba88cf7d087c2783306854ea3fbc16c5fe17df4
wagtail/utils/compat.py
wagtail/utils/compat.py
def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model)
import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
Check Django version instead of hasattr
Check Django version instead of hasattr
Python
bsd-3-clause
mayapurmedia/wagtail,chrxr/wagtail,darith27/wagtail,mjec/wagtail,rv816/wagtail,rsalmaso/wagtail,stevenewey/wagtail,KimGlazebrook/wagtail-experiment,kurtw/wagtail,serzans/wagtail,m-sanders/wagtail,KimGlazebrook/wagtail-experiment,JoshBarr/wagtail,JoshBarr/wagtail,inonit/wagtail,kaedroho/wagtail,zerolab/wagtail,FlipperPA/wagtail,m-sanders/wagtail,wagtail/wagtail,mikedingjan/wagtail,janusnic/wagtail,takeshineshiro/wagtail,rv816/wagtail,inonit/wagtail,WQuanfeng/wagtail,iansprice/wagtail,rsalmaso/wagtail,serzans/wagtail,inonit/wagtail,Toshakins/wagtail,nimasmi/wagtail,nutztherookie/wagtail,rjsproxy/wagtail,bjesus/wagtail,Tivix/wagtail,marctc/wagtail,Tivix/wagtail,mikedingjan/wagtail,gasman/wagtail,gasman/wagtail,kaedroho/wagtail,bjesus/wagtail,quru/wagtail,timorieber/wagtail,iho/wagtail,stevenewey/wagtail,mephizzle/wagtail,taedori81/wagtail,nimasmi/wagtail,Klaudit/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,timorieber/wagtail,nrsimha/wagtail,thenewguy/wagtail,takeflight/wagtail,tangentlabs/wagtail,bjesus/wagtail,JoshBarr/wagtail,mayapurmedia/wagtail,FlipperPA/wagtail,Pennebaker/wagtail,mixxorz/wagtail,kurtrwall/wagtail,nilnvoid/wagtail,kurtw/wagtail,nrsimha/wagtail,takeflight/wagtail,takeflight/wagtail,darith27/wagtail,Klaudit/wagtail,kurtrwall/wagtail,rjsproxy/wagtail,marctc/wagtail,m-sanders/wagtail,nilnvoid/wagtail,bjesus/wagtail,thenewguy/wagtail,mephizzle/wagtail,jordij/wagtail,hanpama/wagtail,takeshineshiro/wagtail,hamsterbacke23/wagtail,quru/wagtail,FlipperPA/wagtail,hanpama/wagtail,nimasmi/wagtail,zerolab/wagtail,stevenewey/wagtail,taedori81/wagtail,darith27/wagtail,m-sanders/wagtail,torchbox/wagtail,kurtw/wagtail,hanpama/wagtail,nealtodd/wagtail,taedori81/wagtail,mikedingjan/wagtail,nrsimha/wagtail,Pennebaker/wagtail,mjec/wagtail,wagtail/wagtail,zerolab/wagtail,nilnvoid/wagtail,hanpama/wagtail,timorieber/wagtail,Klaudit/wagtail,quru/wagtail,mephizzle/wagtail,serzans/wagtail,Toshakins/wagtail,Pennebaker/wagtail,mixxorz/wagtail,jordij/wagtail,tangentlabs/wagtail,mjec/wagtail,WQuanfeng/wagtail,janusnic/wagtail,rsalmaso/wagtail,nealtodd/wagtail,thenewguy/wagtail,chrxr/wagtail,KimGlazebrook/wagtail-experiment,taedori81/wagtail,davecranwell/wagtail,Tivix/wagtail,nilnvoid/wagtail,torchbox/wagtail,hamsterbacke23/wagtail,nealtodd/wagtail,nutztherookie/wagtail,marctc/wagtail,KimGlazebrook/wagtail-experiment,janusnic/wagtail,nrsimha/wagtail,torchbox/wagtail,gogobook/wagtail,hamsterbacke23/wagtail,davecranwell/wagtail,rjsproxy/wagtail,torchbox/wagtail,takeshineshiro/wagtail,kurtrwall/wagtail,davecranwell/wagtail,jordij/wagtail,iho/wagtail,taedori81/wagtail,nimasmi/wagtail,rsalmaso/wagtail,mayapurmedia/wagtail,mixxorz/wagtail,wagtail/wagtail,chrxr/wagtail,gogobook/wagtail,thenewguy/wagtail,quru/wagtail,darith27/wagtail,thenewguy/wagtail,iansprice/wagtail,iho/wagtail,kaedroho/wagtail,gasman/wagtail,jordij/wagtail,hamsterbacke23/wagtail,WQuanfeng/wagtail,marctc/wagtail,rsalmaso/wagtail,Toshakins/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,gasman/wagtail,nutztherookie/wagtail,takeshineshiro/wagtail,janusnic/wagtail,rv816/wagtail,gogobook/wagtail,rjsproxy/wagtail,kurtrwall/wagtail,kaedroho/wagtail,mjec/wagtail,mephizzle/wagtail,nealtodd/wagtail,WQuanfeng/wagtail,iansprice/wagtail,Tivix/wagtail,wagtail/wagtail,JoshBarr/wagtail,stevenewey/wagtail,Pennebaker/wagtail,Klaudit/wagtail,wagtail/wagtail,iansprice/wagtail,tangentlabs/wagtail,chrxr/wagtail,timorieber/wagtail,rv816/wagtail,mayapurmedia/wagtail,jnns/wagtail,zerolab/wagtail,jnns/wagtail,zerolab/wagtail,serzans/wagtail,gasman/wagtail,mixxorz/wagtail,inonit/wagtail,davecranwell/wagtail,jnns/wagtail,Toshakins/wagtail,kaedroho/wagtail,jnns/wagtail,mixxorz/wagtail,iho/wagtail,gogobook/wagtail,takeflight/wagtail,kurtw/wagtail
import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
Check Django version instead of hasattr def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model)
21d5acb0ed340f15feccd5938ae51d47739f930a
falmer/commercial/queries.py
falmer/commercial/queries.py
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all()
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
Order offers by company name
Order offers by company name Closes #373
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.order_by('company_name').all()
Order offers by company name Closes #373 import graphene from .models import Offer from . import types class Query(graphene.ObjectType): all_offers = graphene.List(types.Offer) def resolve_all_offers(self, info): return Offer.objects.all()
62017dc7dc210d09e8f6753ad86365ac679f4a0a
oscar/apps/catalogue/categories.py
oscar/apps/catalogue/categories.py
from django.db.models import get_model Category = get_model('catalogue', 'category') def create_from_sequence(bits): """ Create categories from an iterable """ if len(bits) == 1: # Get or create root node try: root = Category.objects.get(depth=1, name=bits[0]) except Category.DoesNotExist: root = Category.add_root(name=bits[0]) return [root] else: parents = create_from_sequence(bits[:-1]) try: child = parents[-1].get_children().get(name=bits[-1]) except Category.DoesNotExist: child = parents[-1].add_child(name=bits[-1]) parents.append(child) return parents def create_from_breadcrumbs(breadcrumb_str, separator='>'): """ Create categories from a breadcrumb string """ category_names = [x.strip() for x in breadcrumb_str.split(separator)] categories = create_from_sequence(category_names) return categories[-1]
from django.db.models import get_model Category = get_model('catalogue', 'category') def create_from_sequence(bits): """ Create categories from an iterable """ if len(bits) == 1: # Get or create root node name = bits[0] try: # Category names should be unique at the depth=1 root = Category.objects.get(depth=1, name=name) except Category.DoesNotExist: root = Category.add_root(name=name) except Category.MultipleObjectsReturned: raise ValueError(( "There are more than one categories with name " "%s at depth=1") % name) return [root] else: parents = create_from_sequence(bits[:-1]) parent, name = parents[-1], bits[-1] try: child = parent.get_children().get(name=name) except Category.DoesNotExist: child = parent.add_child(name=name) except Category.MultipleObjectsReturned: raise ValueError(( "There are more than one categories with name " "%s which are children of %s") % (name, parent)) parents.append(child) return parents def create_from_breadcrumbs(breadcrumb_str, separator='>'): """ Create categories from a breadcrumb string """ category_names = [x.strip() for x in breadcrumb_str.split(separator)] categories = create_from_sequence(category_names) return categories[-1]
Rework category creation from breadcrumbs
Rework category creation from breadcrumbs We now handle MultipleObjectsReturned exceptions, which are possible as we are looking up based on non-unique filters.
Python
bsd-3-clause
vovanbo/django-oscar,adamend/django-oscar,MatthewWilkes/django-oscar,manevant/django-oscar,django-oscar/django-oscar,WadeYuChen/django-oscar,elliotthill/django-oscar,sasha0/django-oscar,WillisXChen/django-oscar,jinnykoo/wuyisj,Jannes123/django-oscar,makielab/django-oscar,WadeYuChen/django-oscar,lijoantony/django-oscar,jinnykoo/christmas,elliotthill/django-oscar,vovanbo/django-oscar,dongguangming/django-oscar,adamend/django-oscar,WillisXChen/django-oscar,ka7eh/django-oscar,dongguangming/django-oscar,bschuon/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,solarissmoke/django-oscar,WadeYuChen/django-oscar,pdonadeo/django-oscar,DrOctogon/unwash_ecom,josesanch/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,john-parton/django-oscar,spartonia/django-oscar,saadatqadri/django-oscar,eddiep1101/django-oscar,QLGu/django-oscar,ademuk/django-oscar,MatthewWilkes/django-oscar,eddiep1101/django-oscar,jmt4/django-oscar,Jannes123/django-oscar,bnprk/django-oscar,rocopartners/django-oscar,anentropic/django-oscar,ka7eh/django-oscar,taedori81/django-oscar,ka7eh/django-oscar,kapari/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,bnprk/django-oscar,eddiep1101/django-oscar,adamend/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,Bogh/django-oscar,machtfit/django-oscar,spartonia/django-oscar,manevant/django-oscar,jmt4/django-oscar,makielab/django-oscar,nfletton/django-oscar,jmt4/django-oscar,rocopartners/django-oscar,thechampanurag/django-oscar,Jannes123/django-oscar,monikasulik/django-oscar,manevant/django-oscar,jinnykoo/wuyisj.com,mexeniz/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,vovanbo/django-oscar,marcoantoniooliveira/labweb,taedori81/django-oscar,kapari/django-oscar,okfish/django-oscar,nickpack/django-oscar,dongguangming/django-oscar,kapt/django-oscar,manevant/django-oscar,marcoantoniooliveira/labweb,kapt/django-oscar,itbabu/django-oscar,okfish/django-oscar,QLGu/django-oscar,okfish/django-oscar,nickpack/django-oscar,solarissmoke/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,itbabu/django-oscar,sonofatailor/django-oscar,vovanbo/django-oscar,Bogh/django-oscar,faratro/django-oscar,binarydud/django-oscar,jinnykoo/wuyisj.com,pasqualguerrero/django-oscar,saadatqadri/django-oscar,nfletton/django-oscar,jlmadurga/django-oscar,michaelkuty/django-oscar,WillisXChen/django-oscar,elliotthill/django-oscar,spartonia/django-oscar,Idematica/django-oscar,anentropic/django-oscar,dongguangming/django-oscar,okfish/django-oscar,faratro/django-oscar,QLGu/django-oscar,Bogh/django-oscar,josesanch/django-oscar,kapt/django-oscar,adamend/django-oscar,jinnykoo/wuyisj,mexeniz/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,michaelkuty/django-oscar,sasha0/django-oscar,WillisXChen/django-oscar,Jannes123/django-oscar,Bogh/django-oscar,faratro/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,anentropic/django-oscar,DrOctogon/unwash_ecom,john-parton/django-oscar,itbabu/django-oscar,jinnykoo/christmas,jinnykoo/wuyisj.com,WadeYuChen/django-oscar,itbabu/django-oscar,pdonadeo/django-oscar,monikasulik/django-oscar,ahmetdaglarbas/e-commerce,eddiep1101/django-oscar,django-oscar/django-oscar,thechampanurag/django-oscar,makielab/django-oscar,binarydud/django-oscar,josesanch/django-oscar,monikasulik/django-oscar,machtfit/django-oscar,lijoantony/django-oscar,taedori81/django-oscar,sonofatailor/django-oscar,lijoantony/django-oscar,faratro/django-oscar,bschuon/django-oscar,amirrpp/django-oscar,taedori81/django-oscar,DrOctogon/unwash_ecom,michaelkuty/django-oscar,nfletton/django-oscar,WillisXChen/django-oscar,solarissmoke/django-oscar,bnprk/django-oscar,john-parton/django-oscar,saadatqadri/django-oscar,Idematica/django-oscar,mexeniz/django-oscar,jlmadurga/django-oscar,MatthewWilkes/django-oscar,MatthewWilkes/django-oscar,ahmetdaglarbas/e-commerce,bnprk/django-oscar,django-oscar/django-oscar,ademuk/django-oscar,bschuon/django-oscar,Idematica/django-oscar,ahmetdaglarbas/e-commerce,jlmadurga/django-oscar,amirrpp/django-oscar,binarydud/django-oscar,amirrpp/django-oscar,mexeniz/django-oscar,pdonadeo/django-oscar,spartonia/django-oscar,bschuon/django-oscar,rocopartners/django-oscar,makielab/django-oscar,jmt4/django-oscar,sonofatailor/django-oscar,thechampanurag/django-oscar,nickpack/django-oscar,ka7eh/django-oscar,sasha0/django-oscar,anentropic/django-oscar,jinnykoo/christmas,michaelkuty/django-oscar,pasqualguerrero/django-oscar,binarydud/django-oscar,machtfit/django-oscar,pasqualguerrero/django-oscar,saadatqadri/django-oscar,django-oscar/django-oscar,jinnykoo/wuyisj,jlmadurga/django-oscar,rocopartners/django-oscar,pasqualguerrero/django-oscar,QLGu/django-oscar,pdonadeo/django-oscar,thechampanurag/django-oscar,sasha0/django-oscar,nickpack/django-oscar
from django.db.models import get_model Category = get_model('catalogue', 'category') def create_from_sequence(bits): """ Create categories from an iterable """ if len(bits) == 1: # Get or create root node name = bits[0] try: # Category names should be unique at the depth=1 root = Category.objects.get(depth=1, name=name) except Category.DoesNotExist: root = Category.add_root(name=name) except Category.MultipleObjectsReturned: raise ValueError(( "There are more than one categories with name " "%s at depth=1") % name) return [root] else: parents = create_from_sequence(bits[:-1]) parent, name = parents[-1], bits[-1] try: child = parent.get_children().get(name=name) except Category.DoesNotExist: child = parent.add_child(name=name) except Category.MultipleObjectsReturned: raise ValueError(( "There are more than one categories with name " "%s which are children of %s") % (name, parent)) parents.append(child) return parents def create_from_breadcrumbs(breadcrumb_str, separator='>'): """ Create categories from a breadcrumb string """ category_names = [x.strip() for x in breadcrumb_str.split(separator)] categories = create_from_sequence(category_names) return categories[-1]
Rework category creation from breadcrumbs We now handle MultipleObjectsReturned exceptions, which are possible as we are looking up based on non-unique filters. from django.db.models import get_model Category = get_model('catalogue', 'category') def create_from_sequence(bits): """ Create categories from an iterable """ if len(bits) == 1: # Get or create root node try: root = Category.objects.get(depth=1, name=bits[0]) except Category.DoesNotExist: root = Category.add_root(name=bits[0]) return [root] else: parents = create_from_sequence(bits[:-1]) try: child = parents[-1].get_children().get(name=bits[-1]) except Category.DoesNotExist: child = parents[-1].add_child(name=bits[-1]) parents.append(child) return parents def create_from_breadcrumbs(breadcrumb_str, separator='>'): """ Create categories from a breadcrumb string """ category_names = [x.strip() for x in breadcrumb_str.split(separator)] categories = create_from_sequence(category_names) return categories[-1]
b97f97710a63c1d0c501c14e49dd0e26d8fb92d5
rabbitmq-connector.py
rabbitmq-connector.py
import asyncio import aioamqp @asyncio.coroutine def callback(channel, body, envelope, properties): print(body) @asyncio.coroutine def connect(): try: transport, protocol = yield from aioamqp.connect() channel = yield from protocol.channel() except aioamqp.AmqpClosedConnection: print("closed connections") return yield from channel.exchange("mnemosyne", "traces", durable=True) yield from channel.queue(queue_name="mnemosyne-server", durable=True) yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#") print(' [*] Waiting for logs. To exit press CTRL+C') yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True) # close using the `AMQP` protocol #yield from protocol.close() # ensure the socket is closed. #transport.close() event_loop = asyncio.get_event_loop() event_loop.run_until_complete(connect()) event_loop.run_forever()
Add basic python script for recieving mnemosyne AMQP messages
Add basic python script for recieving mnemosyne AMQP messages
Python
agpl-3.0
jgraichen/mnemosyne,jgraichen/mnemosyne,jgraichen/mnemosyne
import asyncio import aioamqp @asyncio.coroutine def callback(channel, body, envelope, properties): print(body) @asyncio.coroutine def connect(): try: transport, protocol = yield from aioamqp.connect() channel = yield from protocol.channel() except aioamqp.AmqpClosedConnection: print("closed connections") return yield from channel.exchange("mnemosyne", "traces", durable=True) yield from channel.queue(queue_name="mnemosyne-server", durable=True) yield from channel.queue_bind(exchange_name="mnemosyne", queue_name="hello", routing_key="#") print(' [*] Waiting for logs. To exit press CTRL+C') yield from channel.basic_consume(callback, queue_name="mnemosyne-server", no_ack=True) # close using the `AMQP` protocol #yield from protocol.close() # ensure the socket is closed. #transport.close() event_loop = asyncio.get_event_loop() event_loop.run_until_complete(connect()) event_loop.run_forever()
Add basic python script for recieving mnemosyne AMQP messages
e6181c5d7c95af23ee6d51d125642104782f5cf1
Python/136_SingleNumber.py
Python/136_SingleNumber.py
class Solution(object): def singleNumber(self, nums): """ :type nums: List[int] :rtype: int """ #Using XOR to find the single number. #Because every number appears twice, while N^N=0, 0^N=N, #XOR is cummutative, so the order of elements does not matter. #Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber res = 0 for num in nums: res ^= num return res nums = [1,1,5,5,3,4,4,9,9,8,8,7,7] foo = Solution() print foo.singleNumber(nums)
Add solution for 136_Single Number with XOR operation.
Add solution for 136_Single Number with XOR operation.
Python
mit
comicxmz001/LeetCode,comicxmz001/LeetCode
class Solution(object): def singleNumber(self, nums): """ :type nums: List[int] :rtype: int """ #Using XOR to find the single number. #Because every number appears twice, while N^N=0, 0^N=N, #XOR is cummutative, so the order of elements does not matter. #Finally, it will be res = 0 ^ singlenumber ==> res = singlenumber res = 0 for num in nums: res ^= num return res nums = [1,1,5,5,3,4,4,9,9,8,8,7,7] foo = Solution() print foo.singleNumber(nums)
Add solution for 136_Single Number with XOR operation.
0409580aed43b6a0556fcc4b8e6e9252d9f082ea
froide/publicbody/management/commands/validate_publicbodies.py
froide/publicbody/management/commands/validate_publicbodies.py
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all().iterator() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
Use queryset in validate publicbodies command
Use queryset in validate publicbodies command
Python
mit
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
Use queryset in validate publicbodies command from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all().iterator() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
d698d4ce3002db3b518e061075f294cf9b0089a6
aspc/senate/urls.py
aspc/senate/urls.py
from django.conf.urls import patterns, include, url from aspc.senate.views import DocumentList, AppointmentList urlpatterns = patterns('', url(r'^documents/$', DocumentList.as_view(), name="document_list"), url(r'^documents/(?P<page>[0-9]+)/$', DocumentList.as_view(), name="document_list_page"), url(r'^preview/positions/$', AppointmentList.as_view(), name="positions"), )
from django.conf.urls import patterns, include, url from aspc.senate.views import DocumentList, AppointmentList urlpatterns = patterns('', url(r'^documents/$', DocumentList.as_view(), name="document_list"), url(r'^documents/(?P<page>[0-9]+)/$', DocumentList.as_view(), name="document_list_page"), url(r'^positions/$', AppointmentList.as_view(), name="positions"), )
Remove the preview prefix from the positions URL pattern
Remove the preview prefix from the positions URL pattern
Python
mit
theworldbright/mainsite,aspc/mainsite,aspc/mainsite,aspc/mainsite,theworldbright/mainsite,aspc/mainsite,theworldbright/mainsite,theworldbright/mainsite
from django.conf.urls import patterns, include, url from aspc.senate.views import DocumentList, AppointmentList urlpatterns = patterns('', url(r'^documents/$', DocumentList.as_view(), name="document_list"), url(r'^documents/(?P<page>[0-9]+)/$', DocumentList.as_view(), name="document_list_page"), url(r'^positions/$', AppointmentList.as_view(), name="positions"), )
Remove the preview prefix from the positions URL pattern from django.conf.urls import patterns, include, url from aspc.senate.views import DocumentList, AppointmentList urlpatterns = patterns('', url(r'^documents/$', DocumentList.as_view(), name="document_list"), url(r'^documents/(?P<page>[0-9]+)/$', DocumentList.as_view(), name="document_list_page"), url(r'^preview/positions/$', AppointmentList.as_view(), name="positions"), )
4570ce14333ebc0bae3e09a59f28d7170cfc4621
dci/alembic/versions/b58867f72568_add_feeder_role.py
dci/alembic/versions/b58867f72568_add_feeder_role.py
# # Copyright (C) 2017 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add_feeder_role Revision ID: b58867f72568 Revises: 827c558895bc Create Date: 2017-10-27 08:29:07.283357 """ # revision identifiers, used by Alembic. revision = 'b58867f72568' down_revision = '827c558895bc' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql as pg import datetime import dci.common.utils as utils RESOURCE_STATES = ['active', 'inactive', 'archived'] STATES = sa.Enum(*RESOURCE_STATES, name='states') ROLES = sa.Table( 'roles', sa.MetaData(), sa.Column('id', pg.UUID(as_uuid=True), primary_key=True, default=utils.gen_uuid), sa.Column('created_at', sa.DateTime(), default=datetime.datetime.utcnow, nullable=False), sa.Column('updated_at', sa.DateTime(), onupdate=datetime.datetime.utcnow, default=datetime.datetime.utcnow, nullable=False), sa.Column('etag', sa.String(40), nullable=False, default=utils.gen_etag, onupdate=utils.gen_etag), sa.Column('name', sa.String(255), nullable=False), sa.Column('label', sa.String(255), nullable=False), sa.Column('description', sa.Text), sa.UniqueConstraint('label', name='roles_label_key'), sa.Column('state', STATES, default='active') ) def upgrade(): db_conn = op.get_bind() feeder_role_id = utils.gen_uuid() feeder_role = { 'id': feeder_role_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'etag': utils.gen_etag(), 'name': 'Feeder', 'label': 'FEEDER', 'description': 'A Feeder', } db_conn.execute(ROLES.insert().values(**feeder_role)) def downgrade(): pass
Add the feeder role in the ROLES table
Feeder: Add the feeder role in the ROLES table Change-Id: I4c09e0a5e7d08975602a683f4cecbf993cdec4ba
Python
apache-2.0
redhat-cip/dci-control-server,enovance/dci-control-server,redhat-cip/dci-control-server,enovance/dci-control-server
# # Copyright (C) 2017 Red Hat, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add_feeder_role Revision ID: b58867f72568 Revises: 827c558895bc Create Date: 2017-10-27 08:29:07.283357 """ # revision identifiers, used by Alembic. revision = 'b58867f72568' down_revision = '827c558895bc' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql as pg import datetime import dci.common.utils as utils RESOURCE_STATES = ['active', 'inactive', 'archived'] STATES = sa.Enum(*RESOURCE_STATES, name='states') ROLES = sa.Table( 'roles', sa.MetaData(), sa.Column('id', pg.UUID(as_uuid=True), primary_key=True, default=utils.gen_uuid), sa.Column('created_at', sa.DateTime(), default=datetime.datetime.utcnow, nullable=False), sa.Column('updated_at', sa.DateTime(), onupdate=datetime.datetime.utcnow, default=datetime.datetime.utcnow, nullable=False), sa.Column('etag', sa.String(40), nullable=False, default=utils.gen_etag, onupdate=utils.gen_etag), sa.Column('name', sa.String(255), nullable=False), sa.Column('label', sa.String(255), nullable=False), sa.Column('description', sa.Text), sa.UniqueConstraint('label', name='roles_label_key'), sa.Column('state', STATES, default='active') ) def upgrade(): db_conn = op.get_bind() feeder_role_id = utils.gen_uuid() feeder_role = { 'id': feeder_role_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'etag': utils.gen_etag(), 'name': 'Feeder', 'label': 'FEEDER', 'description': 'A Feeder', } db_conn.execute(ROLES.insert().values(**feeder_role)) def downgrade(): pass
Feeder: Add the feeder role in the ROLES table Change-Id: I4c09e0a5e7d08975602a683f4cecbf993cdec4ba