blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
288
content_id
stringlengths
40
40
detected_licenses
listlengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
684 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
147 values
src_encoding
stringclasses
25 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
128
12.7k
extension
stringclasses
142 values
content
stringlengths
128
8.19k
authors
listlengths
1
1
author_id
stringlengths
1
132
af5ee455cb7393efd56233ca1556032ce3b6435c
4c68778814b938d91d184749b50940549439c0f3
/scheme/fields/time.py
fe6e0bb58b391be8c8074c6fe7792ac82fede471
[ "LicenseRef-scancode-warranty-disclaimer", "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference" ]
permissive
jordanm/scheme
96a747258ce68de756ffe7996b37c3e8747a740c
5a87e24b35bb2f80b474273bf2e5c5fd563214e0
refs/heads/master
2021-01-17T05:48:51.479427
2020-01-20T16:03:28
2020-01-20T16:03:28
32,604,302
8
4
NOASSERTION
2020-01-20T16:03:29
2015-03-20T20:05:12
Python
UTF-8
Python
false
false
3,174
py
from __future__ import absolute_import from datetime import time from time import strptime from scheme.exceptions import * from scheme.field import * __all__ = ('Time',) class Time(Field): """A field for time values.""" basetype = 'time' equivalent = time parameters = {'maximum': None, 'minimum': None} pattern = '%H:%M:%S' errors = [ FieldError('invalid', 'invalid value', '%(field)s must be a time value'), FieldError('minimum', 'minimum value', '%(field)s must not occur before %(minimum)s'), FieldError('maximum', 'maximum value', '%(field)s must not occur after %(maximum)s'), ] def __init__(self, minimum=None, maximum=None, **params): super(Time, self).__init__(**params) if maximum is not None: try: maximum = self._unserialize_value(maximum) except InvalidTypeError: raise TypeError("argument 'maximum' must be either None, a datetime.time," " or a string in the format 'HH:MM:SS'") if minimum is not None: try: minimum = self._unserialize_value(minimum) except InvalidTypeError: raise TypeError("argument 'minimum' must be either None, a datetime.time," " or a string in the format 'HH:MM:SS'") self.maximum = maximum self.minimum = minimum def __repr__(self): aspects = [] if self.minimum is not None: aspects.append('minimum=%r' % self.minimum) if self.maximum is not None: aspects.append('maximum=%r' % self.maximum) return super(Time, self).__repr__(aspects) def describe(self, parameters=None, verbose=False): params = {} if self.maximum is not None: params['maximum'] = self.maximum.strftime(self.pattern) if self.minimum is not None: params['minimum'] = self.minimum.strftime(self.pattern) return super(Time, self).describe(parameters=parameters, verbose=verbose, **params) def _serialize_value(self, value): return value.strftime(self.pattern) def _unserialize_value(self, value, ancestry=None): if isinstance(value, time): return value try: return time(*strptime(value, self.pattern)[3:6]) except Exception: raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid') def _validate_value(self, value, ancestry): if not isinstance(value, time): raise InvalidTypeError(identity=ancestry, field=self, value=value).construct('invalid') minimum = self.minimum if minimum is not None and value < minimum: raise ValidationError(identity=ancestry, field=self, value=value).construct('minimum', minimum=minimum.strftime(self.pattern)) maximum = self.maximum if maximum is not None and value > maximum: raise ValidationError(identity=ancestry, field=self, value=value).construct('maximum', maximum=maximum.strftime(self.pattern))
48cd42cf70cd98648276cce423fd29d9850f9d0a
f2ab8ccda7203dd37d61facb9978cf74b781c7f1
/tests/apps.py
863cf58e139c91b4d865bed2d8a46b94a061f588
[ "MIT" ]
permissive
Apkawa/easy-thumbnails-admin
1991137224dcd117520b2c114d4012daf803776e
9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4
refs/heads/master
2021-01-01T15:47:34.334792
2017-11-23T10:38:09
2017-11-23T10:38:09
97,703,157
0
0
null
null
null
null
UTF-8
Python
false
false
274
py
try: from django.apps import AppConfig except ImportError: # Early Django versions import everything in test, avoid the failure due to # AppConfig only existing in 1.7+ AppConfig = object class TestConfig(AppConfig): name = 'tests' label = 'tests'
eed58a6b703faab6b504f4b3a66b4de43ae04f0a
e75521f26a9a6fdbd0b9dbe396b14a5f3c1af305
/src/repositories/word_classifier_repository.py
10cf90739a261923161b283cb2b1127ab1de82cd
[]
no_license
Ap3lsin4k/words-as-part-of-speech
2636edb87d309d44d3d18add14aadd13f7810507
e7f35d56d65a8f5033498f650265cadbd742a9de
refs/heads/master
2023-01-31T19:01:11.007917
2020-12-15T10:57:20
2020-12-15T10:57:20
320,807,979
0
0
null
null
null
null
UTF-8
Python
false
false
1,258
py
from bookmark_entity import Bookmark from language_entity import LanguageEntity from repositories.dictionary_surfer_common import DictionarySurferRepository class WordClassifierRepository(DictionarySurferRepository): def __init__(self, dictionary_entity: LanguageEntity): super().__init__(dictionary_entity) def make_response_model(self, part_of_speech, input_word): self.result = {part_of_speech: {}} for category_of_property, properties in self.dictionary[part_of_speech].items(): bookmark = Bookmark(part_of_speech, category_of_property) self.__classify_word_by_property(bookmark, input_word) if len(self.result[part_of_speech]) == 0: self.result = None def __save_property_of_word_to_presentable_format(self, bookmark): self.result[bookmark.get_part_of_speech()].update({bookmark.category_name: bookmark.property_name}) def __classify_word_by_property(self, bookmark, input_word): for bookmark.property_name in self.dictionary.get_properties(bookmark): words_tuple = self.dictionary.get_words_for_property(bookmark) if input_word in words_tuple: self.__save_property_of_word_to_presentable_format(bookmark)
f6fece3b5719a65008ae0fbe700a817b469a7a51
e7eff96df8160d3c238bf38068c99c7b8bd3005b
/norman/web/frontend/crops.py
08fa8b6415e718d05231de41cdbcfc0273dddb39
[]
no_license
sumansai14/norman
62c3760b47f15bb474786ac045efad5aff757b95
43a8c4e53830d57eb552c3ecb98bf2926c9d0457
refs/heads/master
2021-03-16T07:57:17.076408
2017-05-23T07:36:37
2017-05-23T07:36:37
92,188,183
0
0
null
null
null
null
UTF-8
Python
false
false
170
py
from norman.web.frontend.base import BaseAuthTemplateView class OrganizationCropsListView(BaseAuthTemplateView): template_name = 'norman/organization/crops_list.html'
2cf1cde00eea109a46c3e5983b4906feef72866f
f0856e60a095ce99ec3497b3f27567803056ac60
/keras2/keras66_gradient2.py
0e0d0cc1f27912ef32b11753f760a7606dd315f8
[]
no_license
hjuju/TF_Study-HAN
dcbac17ce8b8885f5fb7d7f554230c2948fda9ac
c0faf98380e7f220868ddf83a9aaacaa4ebd2c2a
refs/heads/main
2023-09-04T09:13:33.212258
2021-10-27T08:00:49
2021-10-27T08:00:49
384,371,952
1
0
null
null
null
null
UTF-8
Python
false
false
479
py
import numpy as np import matplotlib.pyplot as plt f = lambda x: x**2 - 4 * x + 6 gradient = lambda x: 2*x - 4 # f 미분 -> 미분한 값이 0이 되는 지점이 가장 낮은지점 -> 우리가 찾는 지점 x0 = 0.0 MaxIter = 20 learning_rate = 0.25 print("step\tx\tf(x)") print("{:02d}\t{:6.5f}\t{:6.5f}".format(0, x0, f(x0))) for i in range(MaxIter): x1 = x0 - learning_rate * gradient(x0) x0 = x1 print("{:02d}\t{:6.5f}\t{:6.5f}".format(i+1, x0, f(x0)))
fb2c64c0218df858e821204c4c485f29f4b33c74
e0527bce5c53a196752d3a16adf50cb60754de5f
/10-How to Stop Programs Crashing Demos/3-is_square.py
8bf01fcece7fa35279f95d25ece62fa140398965
[]
no_license
ARWA-ALraddadi/python-tutorial-for-beginners
ddeb657f419fbc176bea273bc9fb6b88d1894191
21cedfc47871ca4d25c2382464c60ab0a2121205
refs/heads/master
2023-06-30T20:24:30.688800
2021-08-08T08:22:29
2021-08-08T08:22:29
193,094,651
0
0
null
null
null
null
UTF-8
Python
false
false
3,066
py
################################################################ ## ## As a demonstration of a function which applies defensive ## programming in different ways, consider a predicate ## which is intended to return True if a given natural ## number (i.e., a non-negative integer) is a square of ## another natural number. ## ## From this description the function could be "misused" in ## three ways: ## ## 1) It could be given a negative number. ## 2) It could be given a floating point number. ## 3) It could be given a value which is not a number at ## all. ## ## By adding some "defensive" code we can make a naive ## implementation more robust by responding appropriately ## to each of these cases: ## ## 1) A negative number can never be a square of another ## number, so we can always return False in this case. ## Here we choose to do so "silently", not drawing ## attention to the unexpected value at all, since the ## answer returned is still "correct" mathematically. ## 2) A positive floating point number could be a square of ## a natural number so, even though we're not required ## to handle floating point numbers we can still do so, ## but choose to generate a "warning" message in this ## case. ## 3) If the function is given a non-numerical value it ## is reasonable to assume that something is seriously ## wrong with the calling code, so in this case we ## generate an "error" message and return the special ## value None. #--------------------------------------------------------- # Return True if the given natural number is the square of # some other natural number def is_square(natural_number): from math import sqrt # Three "defensive" checks follow ## # Check that the parameter is a number ## if not (isinstance(natural_number, int) or isinstance(natural_number, float)): ## print('ERROR - parameter must be numeric, given:', repr(natural_number)) ## return None ## ## # Check that the parameter is positive ## if natural_number < 0: ## return False ## ## # Check that the parameter is a natural number ## if isinstance(natural_number, float): ## print('Warning - expected natural, given float:', natural_number) # Return True if the number's square root is a whole number return sqrt(natural_number) % 1 == 0 #--------------------------------------------------------- # Some tests # # The first of these tests is a "valid" one, but the remaining # three all provide unexpected inputs. Uncommenting the # "defensive" checks above will cause the function to respond # appropriately. (It will crash until the defensive code is # uncommented. Why?) print(is_square(36)) # expected input print() print(is_square(-1)) # unexpected input, but handled silently print() print(is_square(225.0)) # unexpected input, handled with warning print() print(is_square('August')) # unexpected input, handled as an error
7bbfd94accf83c65ae4546356bccb460b15a900e
b8ea631aae5d132c7b0236684d5f7c12d3c222be
/Library/Graph/Dijkstra_heapq.py
6164198b7fcd573492928ce2f82d98e051b23864
[]
no_license
Ryushi-tech/card3
68c429313142e58d4722a1cd5a4acc4ab39ca41f
883636b2f518e38343a12816c5c641b60a87c098
refs/heads/master
2021-07-05T22:46:33.089945
2020-12-12T15:31:00
2020-12-12T15:31:00
209,176,836
0
0
null
null
null
null
UTF-8
Python
false
false
708
py
import heapq def dijkstra(s): q = [] dist[s] = 0 heapq.heappush(q, [0, s]) while q: p, v = heapq.heappop(q) if dist[v] < p: continue for i, x in g[v]: if dist[i] > dist[v] + x: dist[i] = dist[v] + x heapq.heappush(q, [dist[i], i]) n = int(input()) g = [[] for _ in range(n)] for _ in range(n - 1): a, b, c = map(int, input().split()) a, b = a - 1, b - 1 g[a].append((b, c)) g[b].append((a, c)) inf = 10 ** 14 dist = [inf] * n m, k = map(int, input().split()) k = k - 1 dijkstra(k) for _ in range(m): e, f = map(int, input().split()) res = dist[e - 1] + dist[f - 1] print(res)
c7e2d80388cbe425136e01a06bdb2ea24fa604c6
9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97
/sdBs/AllRun/sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_coadd.py
39e21f206956741881cd664d37e0bb5ecdba667f
[]
no_license
tboudreaux/SummerSTScICode
73b2e5839b10c0bf733808f4316d34be91c5a3bd
4dd1ffbb09e0a599257d21872f9d62b5420028b0
refs/heads/master
2021-01-20T18:07:44.723496
2016-08-08T16:49:53
2016-08-08T16:49:53
65,221,159
0
0
null
null
null
null
UTF-8
Python
false
false
498
py
from gPhoton.gMap import gMap def main(): gMap(band="NUV", skypos=[248.990167,34.240833], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_count_coadd.fits", overwrite=True, verbose=3) if __name__ == "__main__": main()
bde86714c9e9dcc484f3f18212f3921c491fe222
e50ba4cc303d4165bef9e2917103c084cfbe0e07
/rating_app/migrations/0016_auto_20201129_1156.py
25f2b5ff3130d55f5d492b5c185861041cf00086
[ "MIT" ]
permissive
Antony-me/Ratemyapp
09049fce54d3a3ed2b256970e7840d20942e8c84
e547fea82439a3e4f83aa78bf16f93b1ea9ab00b
refs/heads/main
2023-01-28T16:52:58.635646
2020-12-01T16:49:07
2020-12-01T16:49:07
316,425,507
0
0
null
null
null
null
UTF-8
Python
false
false
477
py
# Generated by Django 3.1.3 on 2020-11-29 11:56 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('rating_app', '0015_profilemerch'), ] operations = [ migrations.AlterField( model_name='profilemerch', name='projects', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rating_app.post'), ), ]
f82a7850addf3773f1ce92a89e4d51f96cf3f763
487ce91881032c1de16e35ed8bc187d6034205f7
/codes/CodeJamCrawler/16_0_2_neat/16_0_2_tkdkop_pancake.py
259ec04a68548d92ceed7f438162fc6b46baa760
[]
no_license
DaHuO/Supergraph
9cd26d8c5a081803015d93cf5f2674009e92ef7e
c88059dc66297af577ad2b8afa4e0ac0ad622915
refs/heads/master
2021-06-14T16:07:52.405091
2016-08-21T13:39:13
2016-08-21T13:39:13
49,829,508
2
0
null
2021-03-19T21:55:46
2016-01-17T18:23:00
Python
UTF-8
Python
false
false
286
py
#!/usr/bin/env python import sys import itertools m = sys.stdin.readline() i = 0 for line in sys.stdin.readlines(): line = line.strip() i += 1 out_str = "Case #%d: " % i line += '+' k = itertools.groupby(line) out_str += str(len(list(k))-1) print out_str
cdaec89a7ecfa4ae8042bf31ac073b89b8a58072
a3387fbcc918acb55d289ffb61b9fb603203dc11
/Puzzles/2022-01/01_22_balanced_days.py
924f5189761f280c72866b5565b743883fbda28e
[]
no_license
fbhs-cs/purdys-puzzles
13e970ff909ff2e093b3b9d9777faac47c099913
1cf3f9c52677843fad781e46304e1485a91aae58
refs/heads/master
2023-08-17T06:28:06.659751
2023-08-09T14:45:43
2023-08-09T14:45:43
212,085,565
4
3
null
null
null
null
UTF-8
Python
false
false
1,069
py
from math import ceil def is_balanced(num): n = str(num) first = n[:ceil(len(n)/2)] last = n[len(n)//2:] #print(first,last) if sum([int(x) for x in first]) == sum([int(x) for x in last]): return True else: return False def count_balanced(n): count = 0 for i in range(1,n): if is_balanced(i): count += 1 return count def sum_balanced(n): total = 0 for i in range(1,n): if is_balanced(i): #print(i) total += i return total def find_balanced_dates(): months = {1:31,2:28,3:31,4:30,5:31,6:30, 7:31,8:31,9:30,10:31,11:30,12:31} count = 0 sum = 0 for month in range(1,13): for day in range(1,months[month]+1): day_num = str(month) + str(day) + '2022' if is_balanced(int(day_num)): count += 1 sum += int(day_num) print(day_num) print(count) print(sum) find_balanced_dates()
9567422e1472a65046cf8160b1bdae8fbcf7dcd3
080c13cd91a073457bd9eddc2a3d13fc2e0e56ae
/MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/types/internal.py
c56c7aa6d7790b4c36d248603f2282e60af08a39
[ "Apache-2.0" ]
permissive
Portfolio-Projects42/UsefulResourceRepo2.0
1dccc8961a09347f124d3ed7c27c6d73b9806189
75b1e23c757845b5f1894ebe53551a1cf759c6a3
refs/heads/master
2023-08-04T12:23:48.862451
2021-09-15T12:51:35
2021-09-15T12:51:35
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,129
py
# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Types internal to TensorFlow. These types should not be exported. External code should not rely on these. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # TODO(mdan): Is this strictly needed? Only ops.py really uses it. class NativeObject(object): """Types natively supported by various TF operations. The most notable example of NativeObject is Tensor. """
abc2e14c55f8110ca3d0bc1403c2b44d4e5fe36e
026fee65b95206995baf1565f486ab4ed7f7cef9
/userprofiles/admin.py
89683d76fdacc00428bfbad69cc1e019d3f01b5e
[]
no_license
santhoshpkumar/pinclone
e8460aab355ebf3e5559d44127d7ccad22667747
8bf641df9a4999797731d1d2fb4ff3d78d717e10
refs/heads/master
2020-04-03T09:39:27.269726
2018-10-08T10:51:51
2018-10-08T10:51:51
null
0
0
null
null
null
null
UTF-8
Python
false
false
225
py
from django.contrib import admin from .models import Profile # Register your models here. @admin.register(Profile) class ProfileAdmin(admin.ModelAdmin): list_display = ('user', 'bio', 'website', 'birth_date')
28e7dee0700c6fe42c004b939fcaa2b9ff69d27e
eb64b799ff1d7ef3a244bf8e6f9f4e9118d5cfcd
/homeassistant/components/trafikverket_weatherstation/const.py
7bb53dc5356a0b8a392104982912658806275659
[ "Apache-2.0" ]
permissive
JeffLIrion/home-assistant
53966b81b5d5816679f12fc761f79e8777c738d6
8f4ec89be6c2505d8a59eee44de335abe308ac9f
refs/heads/dev
2023-08-22T09:42:02.399277
2022-02-16T01:26:13
2022-02-16T01:26:13
136,679,169
5
2
Apache-2.0
2023-09-13T06:59:25
2018-06-09T00:58:35
Python
UTF-8
Python
false
false
466
py
"""Adds constants for Trafikverket Weather integration.""" from homeassistant.const import Platform DOMAIN = "trafikverket_weatherstation" CONF_STATION = "station" PLATFORMS = [Platform.SENSOR] ATTRIBUTION = "Data provided by Trafikverket" ATTR_MEASURE_TIME = "measure_time" ATTR_ACTIVE = "active" NONE_IS_ZERO_SENSORS = { "air_temp", "road_temp", "wind_direction", "wind_speed", "wind_speed_max", "humidity", "precipitation_amount", }
7642072e77aebda4174a74cfe093db22e6377af7
7bd0954e956993df19d833810f9d71b60e2ebb9a
/phasor/utilities/ipynb/hdf.py
b9f7e5b1add89064ffd726859cfe27d4415619ec
[ "Apache-2.0" ]
permissive
aa158/phasor
5ee0cec4f816b88b0a8ac298c330ed48458ec3f2
fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d
refs/heads/master
2021-10-22T09:48:18.556091
2019-03-09T18:56:05
2019-03-09T18:56:05
null
0
0
null
null
null
null
UTF-8
Python
false
false
169
py
# -*- coding: utf-8 -*- """ """ from __future__ import division, print_function, unicode_literals import h5py from declarative.bunch.hdf_deep_bunch import HDFDeepBunch
267f5e570bff6ec85a0e60de98259cea7422da0e
edb37da2fd2d2f048df119db96a6de58fc816ddb
/jumpserver-0.4/zrd/my_blog/article/views.py
0634c5361e1cf968ac0e81b87ea55908e18fa6b5
[]
no_license
cucy/2017
88f1aa2e8df945162d8259918cf61a138a3422cf
33bcdd5c9e0717521544e3ea41ade10fbb325c4f
refs/heads/master
2020-05-21T15:31:39.935733
2017-07-10T11:04:29
2017-07-10T11:04:29
84,629,639
0
0
null
null
null
null
UTF-8
Python
false
false
1,766
py
# coding:utf-8 from django.shortcuts import render from django.shortcuts import render_to_response # Create your views here. from django.http import HttpResponse from models import SSHInfo # Create your views here. try: from ConfigParser import ConfigParser except: from configparser import ConfigParser try: import paramiko_client except: from . import paramiko_client def home(request): # 如果请求里有file for key in request.FILES: file = request.FILES[key] config = ConfigParser() # 读取配置文件 config.readfp(file) for section in config.sections(): print(section) host_name = config.get(section, 'host_name') host = config.get(section, 'host') port = config.get(section, 'port') usr = config.get(section, 'username') pwd = config.get(section, 'password') new_ssh, create = SSHInfo.objects.update_or_create( host_name=host_name , host=host , port=port , usr=usr , pwd=pwd ) new_ssh.save() # 保存配置信息到数据库 sshs = SSHInfo.objects.all() # 获取所有对象 if len(sshs) > 0: return render_to_response('sshlist.html', {'sshs': sshs}) else: return render_to_response('home_view.html') def run_ssh_cmd(requset): # 获取所有的信息 sshs = SSHInfo.objects.all() cmd_res = {} for ssh in sshs: client = paramiko_client.ParamikoClient() client.connect(ssh) res = client.run_cmd('date') # 执行命令 接收返回 cmd_res[ssh.host_name] = res return render_to_response('cmd_res.html', {'cmd_res': cmd_res})
d3e3b20b1ce012f78bbc61c3eb7dc31075d016ca
c9094a4ed256260bc026514a00f93f0b09a5d60c
/tests/components/accuweather/test_system_health.py
749f516e44c748caf05503460e8a72ec34d085d3
[ "Apache-2.0" ]
permissive
turbokongen/home-assistant
824bc4704906ec0057f3ebd6d92788e096431f56
4ab0151fb1cbefb31def23ba850e197da0a5027f
refs/heads/dev
2023-03-12T05:49:44.508713
2021-02-17T14:06:16
2021-02-17T14:06:16
50,231,140
4
1
Apache-2.0
2023-02-22T06:14:30
2016-01-23T08:55:09
Python
UTF-8
Python
false
false
1,785
py
"""Test AccuWeather system health.""" import asyncio from unittest.mock import Mock from aiohttp import ClientError from homeassistant.components.accuweather.const import COORDINATOR, DOMAIN from homeassistant.setup import async_setup_component from tests.common import get_system_health_info async def test_accuweather_system_health(hass, aioclient_mock): """Test AccuWeather system health.""" aioclient_mock.get("https://dataservice.accuweather.com/", text="") hass.config.components.add(DOMAIN) assert await async_setup_component(hass, "system_health", {}) hass.data[DOMAIN] = {} hass.data[DOMAIN]["0123xyz"] = {} hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock( accuweather=Mock(requests_remaining="42") ) info = await get_system_health_info(hass, DOMAIN) for key, val in info.items(): if asyncio.iscoroutine(val): info[key] = await val assert info == { "can_reach_server": "ok", "remaining_requests": "42", } async def test_accuweather_system_health_fail(hass, aioclient_mock): """Test AccuWeather system health.""" aioclient_mock.get("https://dataservice.accuweather.com/", exc=ClientError) hass.config.components.add(DOMAIN) assert await async_setup_component(hass, "system_health", {}) hass.data[DOMAIN] = {} hass.data[DOMAIN]["0123xyz"] = {} hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock( accuweather=Mock(requests_remaining="0") ) info = await get_system_health_info(hass, DOMAIN) for key, val in info.items(): if asyncio.iscoroutine(val): info[key] = await val assert info == { "can_reach_server": {"type": "failed", "error": "unreachable"}, "remaining_requests": "0", }
1b32ea37e4c7f6126f63d235f5bc196330d2dc7e
d94b6845aeeb412aac6850b70e22628bc84d1d6d
/dimensions_of_motion/geometry.py
d7a317cb08a95e69785f8cd0af032ae5db8a1f29
[ "CC-BY-4.0", "Apache-2.0" ]
permissive
ishine/google-research
541aea114a68ced68736340e037fc0f8257d1ea2
c1ae273841592fce4c993bf35cdd0a6424e73da4
refs/heads/master
2023-06-08T23:02:25.502203
2023-05-31T01:00:56
2023-05-31T01:06:45
242,478,569
0
0
Apache-2.0
2020-06-23T01:55:11
2020-02-23T07:59:42
Jupyter Notebook
UTF-8
Python
false
false
7,466
py
# coding=utf-8 # Copyright 2023 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # -*- coding: utf-8 -*- """Functions for sampling and warping images. We use texture coordinates to represent points and offsets in images. They go from (0,0) in the top-left corner of an image to (1,1) in the bottom right. It is convenient to work with these coordinates rather than counts of pixels, because they are resolution-independent. """ import tensorflow as tf import tensorflow_addons as tfa import utils def check_input_shape(name, tensor, axis, value): """Utility function for checking tensor shapes.""" shape = tensor.shape.as_list() if shape[axis] != value: raise ValueError('Input "%s": dimension %d should be %s. Shape = %s' % (name, axis, value, shape)) def pixel_center_grid(height, width): """Produce a grid of (x,y) texture-coordinate pairs of pixel centers. Args: height: (integer) height, not a tensor width: (integer) width, not a tensor Returns: A tensor of shape [height, width, 2] where each entry gives the (x,y) texture coordinates of the corresponding pixel center. For example, for pixel_center_grid(2, 3) the result is: [[[1/6, 1/4], [3/6, 1/4], [5/6, 1/4]], [[1/6, 3/4], [3/6, 3/4], [5/6, 3/4]]] """ height_float = tf.cast(height, dtype=tf.float32) width_float = tf.cast(width, dtype=tf.float32) ys = tf.linspace(0.5 / height_float, 1.0 - 0.5 / height_float, height) xs = tf.linspace(0.5 / width_float, 1.0 - 0.5 / width_float, width) xs, ys = tf.meshgrid(xs, ys) grid = tf.stack([xs, ys], axis=-1) assert grid.shape.as_list() == [height, width, 2] return grid def sample_image(image, coords): """Sample points from an image, using bilinear filtering. Args: image: [B0, ..., Bn-1, height, width, channels] image data coords: [B0, ..., Bn-1, ..., 2] (x,y) texture coordinates Returns: [B0, ..., Bn-1, ..., channels] image data, in which each value is sampled with bilinear interpolation from the image at position indicated by the (x,y) texture coordinates. The image and coords parameters must have matching batch dimensions B0, ..., Bn-1. Raises: ValueError: if shapes are incompatible. """ check_input_shape('coords', coords, -1, 2) tfshape = tf.shape(image)[-3:-1] height = tf.cast(tfshape[0], dtype=tf.float32) width = tf.cast(tfshape[1], dtype=tf.float32) # Resampler expects coordinates where (0,0) is the center of the top-left # pixel and (width-1, height-1) is the center of the bottom-right pixel. pixel_coords = coords * [width, height] - 0.5 # tfa.image.resampler only works with exactly one batch dimension, i.e. it # expects image to be [batch, height, width, channels] and pixel_coords to be # [batch, ..., 2]. So we need to reshape, perform the resampling, and then # reshape back to what we had. batch_dims = len(image.shape.as_list()) - 3 assert (image.shape.as_list()[:batch_dims] == pixel_coords.shape.as_list() [:batch_dims]) batched_image, _ = utils.flatten_batch(image, batch_dims) batched_coords, unflatten_coords = utils.flatten_batch( pixel_coords, batch_dims) resampled = tfa.image.resampler(batched_image, batched_coords) # Convert back to the right shape to return resampled = unflatten_coords(resampled) return resampled def bilinear_forward_warp(image, coords, weights=None): """Forward warp each point in an image using bilinear filtering. This is a sort of reverse of sample_image, in the sense that scatter is the reverse of gather. A new image is generated of the same size as the input, in which each pixel has been splatted onto the 2x2 block containing the corresponding coordinates, using bilinear weights (multiplied with the input per-pixel weights, if supplied). Thus if two or more pixels warp to the same point, the result will be a blend of the their values. If no pixels warp to a location, the result at that location will be zero. Args: image: [B0, ..., Bn-1, height, width, channels] image data coords: [B0, ..., Bn-1, height, width, 2] (x,y) texture coordinates weights: [B0, ... ,Bn-1, height, width] weights for each point. If omitted, all points are weighed equally. Use this to implement, for example, soft z-buffering. Returns: [B0, ..., Bn-1, ..., channels] image data, in which each point in the input image has been moved to the position indicated by the corresponding (x,y) texture coordinates. The image and coords parameters must have matching batch dimensions B0, ..., Bn-1. """ # Forward-warp computed using the gradient of reverse-warp. We use a dummy # image of the right size for reverse-warping. An extra channel is used to # accumulate the total weight for each pixel which we'll then divide by. image_and_ones = tf.concat([image, tf.ones_like(image[Ellipsis, -1:])], axis=-1) dummy = tf.zeros_like(image_and_ones) if weights is None: weighted_image = image_and_ones else: weighted_image = image_and_ones * weights[Ellipsis, tf.newaxis] with tf.GradientTape(watch_accessed_variables=False) as g: g.watch(dummy) reverse = tf.reduce_sum( sample_image(dummy, coords) * weighted_image, [-3, -2]) grads = g.gradient(reverse, dummy) rgb = grads[Ellipsis, :-1] total = grads[Ellipsis, -1:] result = tf.math.divide_no_nan(rgb, total) return result def flow_warp(image, flow): """Warp images by resampling according to flow vectors. Args: image: [..., H, W, C] images flow: [..., H, W, 2] (x, y) texture offsets Returns: [..., H, W, C] resampled images. Each pixel in each output image has been bilinearly sampled from the corresponding pixel in its input image plus the (x, y) flow vector. The flow vectors are texture coordinate offsets, e.g. (1, 1) is an offset of the whole width and height of the image. Sampling outside the image yields zero values. """ width = image.shape.as_list()[-2] height = image.shape.as_list()[-3] grid = pixel_center_grid(height, width) coords = grid + flow return sample_image(image, coords) def flow_forward_warp(image, flow): """Forward-warp images according to flow vectors. Args: image: [..., H, W, C] images flow: [..., H, W, 2] (x, y) texture offsets Returns: [..., H, W, C] warped images. Each pixel in each image is offset according to the corresponding value in the flow, and splatted onto a 2x2 pixel block. (See bilinear_forward_warp for details.) If no points warp to a location, the result will be zero. The flow vectors are texture coordinate offsets, e.g. (1, 1) is an offset of the whole width and height of the image. """ width = image.shape.as_list()[-2] height = image.shape.as_list()[-3] grid = pixel_center_grid(height, width) coords = grid + flow return bilinear_forward_warp(image, coords)
cf4869a008091dac50e4e6d07bded0da84f85bb3
2bcf18252fa9144ece3e824834ac0e117ad0bdf3
/zpt/trunk/site-packages/zpt/_pytz/zoneinfo/Asia/Ulan_Bator.py
23ee14fe6b126706fac6097086cd541788e4110c
[ "MIT", "ZPL-2.1" ]
permissive
chadwhitacre/public
32f65ba8e35d38c69ed4d0edd333283a239c5e1d
0c67fd7ec8bce1d8c56c7ff3506f31a99362b502
refs/heads/master
2021-05-10T14:32:03.016683
2010-05-13T18:24:20
2010-05-13T18:24:20
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,011
py
'''tzinfo timezone information for Asia/Ulan_Bator.''' from zpt._pytz.tzinfo import DstTzInfo from zpt._pytz.tzinfo import memorized_datetime as d from zpt._pytz.tzinfo import memorized_ttinfo as i class Ulan_Bator(DstTzInfo): '''Asia/Ulan_Bator timezone definition. See datetime.tzinfo for details''' zone = 'Asia/Ulan_Bator' _utc_transition_times = [ d(1,1,1,0,0,0), d(1905,7,31,16,52,28), d(1977,12,31,17,0,0), d(1983,3,31,16,0,0), d(1983,9,30,15,0,0), d(1984,3,31,16,0,0), d(1984,9,29,18,0,0), d(1985,3,30,18,0,0), d(1985,9,28,18,0,0), d(1986,3,29,18,0,0), d(1986,9,27,18,0,0), d(1987,3,28,18,0,0), d(1987,9,26,18,0,0), d(1988,3,26,18,0,0), d(1988,9,24,18,0,0), d(1989,3,25,18,0,0), d(1989,9,23,18,0,0), d(1990,3,24,18,0,0), d(1990,9,29,18,0,0), d(1991,3,30,18,0,0), d(1991,9,28,18,0,0), d(1992,3,28,18,0,0), d(1992,9,26,18,0,0), d(1993,3,27,18,0,0), d(1993,9,25,18,0,0), d(1994,3,26,18,0,0), d(1994,9,24,18,0,0), d(1995,3,25,18,0,0), d(1995,9,23,18,0,0), d(1996,3,30,18,0,0), d(1996,9,28,18,0,0), d(1997,3,29,18,0,0), d(1997,9,27,18,0,0), d(1998,3,28,18,0,0), d(1998,9,26,18,0,0), d(2001,4,27,18,0,0), d(2001,9,28,17,0,0), d(2002,3,29,18,0,0), d(2002,9,27,17,0,0), d(2003,3,28,18,0,0), d(2003,9,26,17,0,0), d(2004,3,26,18,0,0), d(2004,9,24,17,0,0), d(2005,3,25,18,0,0), d(2005,9,23,17,0,0), d(2006,3,24,18,0,0), d(2006,9,29,17,0,0), d(2007,3,30,18,0,0), d(2007,9,28,17,0,0), d(2008,3,28,18,0,0), d(2008,9,26,17,0,0), d(2009,3,27,18,0,0), d(2009,9,25,17,0,0), d(2010,3,26,18,0,0), d(2010,9,24,17,0,0), d(2011,3,25,18,0,0), d(2011,9,23,17,0,0), d(2012,3,30,18,0,0), d(2012,9,28,17,0,0), d(2013,3,29,18,0,0), d(2013,9,27,17,0,0), d(2014,3,28,18,0,0), d(2014,9,26,17,0,0), d(2015,3,27,18,0,0), d(2015,9,25,17,0,0), d(2016,3,25,18,0,0), d(2016,9,23,17,0,0), d(2017,3,24,18,0,0), d(2017,9,29,17,0,0), d(2018,3,30,18,0,0), d(2018,9,28,17,0,0), d(2019,3,29,18,0,0), d(2019,9,27,17,0,0), d(2020,3,27,18,0,0), d(2020,9,25,17,0,0), d(2021,3,26,18,0,0), d(2021,9,24,17,0,0), d(2022,3,25,18,0,0), d(2022,9,23,17,0,0), d(2023,3,24,18,0,0), d(2023,9,29,17,0,0), d(2024,3,29,18,0,0), d(2024,9,27,17,0,0), d(2025,3,28,18,0,0), d(2025,9,26,17,0,0), d(2026,3,27,18,0,0), d(2026,9,25,17,0,0), d(2027,3,26,18,0,0), d(2027,9,24,17,0,0), d(2028,3,24,18,0,0), d(2028,9,29,17,0,0), d(2029,3,30,18,0,0), d(2029,9,28,17,0,0), d(2030,3,29,18,0,0), d(2030,9,27,17,0,0), d(2031,3,28,18,0,0), d(2031,9,26,17,0,0), d(2032,3,26,18,0,0), d(2032,9,24,17,0,0), d(2033,3,25,18,0,0), d(2033,9,23,17,0,0), d(2034,3,24,18,0,0), d(2034,9,29,17,0,0), d(2035,3,30,18,0,0), d(2035,9,28,17,0,0), d(2036,3,28,18,0,0), d(2036,9,26,17,0,0), d(2037,3,27,18,0,0), d(2037,9,25,17,0,0), ] _transition_info = [ i(25680,0,'LMT'), i(25200,0,'ULAT'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), i(32400,3600,'ULAST'), i(28800,0,'ULAT'), ] Ulan_Bator = Ulan_Bator()
85ef73de5c1fceffd5aff452e2b9902d1718602f
5ca6730fa1178582d5f5875155f340ec0f406294
/practice_problem-16.py
44785ae4df282d5b7cc6f83173866d825eb41375
[]
no_license
MahadiRahman262523/Python_Code_Part-1
9740d5ead27209d69af4497eea410f2faef50ff3
e2f08e3d0564a003400743ae6050fd687c280639
refs/heads/main
2023-07-25T09:10:53.649082
2021-09-05T19:39:14
2021-09-05T19:39:14
403,396,706
0
0
null
null
null
null
UTF-8
Python
false
false
135
py
# Write a program to count the number of zeros in the following tuple: # a = (7,0,8,0,0,9) a = (7,0,8,0,0,9) print(a.count(0))
1b406b2dc38004db14248af19fb7f7be9b8e7f6c
487ce91881032c1de16e35ed8bc187d6034205f7
/codes/BuildLinks1.10/test_input/CJ_16_1/16_1_1_FreeTShirt_a.py
0207b362ff64f55d6e7a49f758c368374d2c5dc1
[]
no_license
DaHuO/Supergraph
9cd26d8c5a081803015d93cf5f2674009e92ef7e
c88059dc66297af577ad2b8afa4e0ac0ad622915
refs/heads/master
2021-06-14T16:07:52.405091
2016-08-21T13:39:13
2016-08-21T13:39:13
49,829,508
2
0
null
2021-03-19T21:55:46
2016-01-17T18:23:00
Python
UTF-8
Python
false
false
404
py
def argmax(s): z = max(s) return [(idx, c) for idx, c in enumerate(s) if c == z] def last(s): if len(s) <= 1: return s return max([s[idx]+last(s[:idx])+s[idx+1:] for idx, c in argmax(s)]) fw = open('a-o', 'w') for idx, line in enumerate(open('A-small-i')): if idx == 0: continue s = line.strip() print(s) fw.write('Case #{0}: {1}\n'.format(idx,last(s)))
8732c9af3fea83ea57fa51e58d56b098749760f6
6561baa7ca68875e62fbf2d20c7887e4aadebe9f
/tests/cds_test_20_sf_ukmo.py
efa292077e335becd6970c33d7b3c44900ea5f35
[ "Apache-2.0" ]
permissive
EXWEXs/cfgrib
9057c9e5abbc38a32f113f832f1506988839ee82
8a1727af2c3bbcf2e17f250dfafcb4cc4e959354
refs/heads/master
2020-04-01T15:44:45.140700
2018-10-14T14:39:13
2018-10-14T14:39:13
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,089
py
import pytest import cfgrib import cdscommon TEST_FILES = { 'seasonal-original-single-levels-ukmo': [ 'seasonal-original-single-levels', { 'originating_centre': 'ukmo', 'variable': 'maximum_2m_temperature_in_the_last_24_hours', 'year': '2018', 'month': ['04', '05'], 'day': [ '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31' ], 'leadtime_hour': ['24', '48'], 'grid': ['3', '3'], 'format': 'grib', }, 192, ], 'seasonal-original-pressure-levels-ukmo': [ 'seasonal-original-pressure-levels', { 'originating_centre': 'ukmo', 'variable': 'temperature', 'pressure_level': ['500', '850'], 'year': '2018', 'month': ['04', '05'], 'day': [ '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31' ], 'leadtime_hour': ['24', '48'], 'grid': ['3', '3'], 'format': 'grib', }, 192, ], 'seasonal-postprocessed-single-levels-ukmo': [ 'seasonal-postprocessed-single-levels', { 'originating_centre': 'ukmo', 'variable': 'maximum_2m_temperature_in_the_last_24_hours_anomaly', 'product_type': 'monthly_mean', 'year': '2018', 'month': ['04', '05'], 'leadtime_month': ['1', '2'], 'grid': ['3', '3'], 'format': 'grib', }, 210, ], 'seasonal-monthly-single-levels-monthly_mean-ukmo': [ 'seasonal-monthly-single-levels', { 'originating_centre': 'ukmo', 'variable': 'maximum_2m_temperature_in_the_last_24_hours', 'product_type': 'monthly_mean', 'year': '2018', 'month': ['04', '05'], 'leadtime_month': ['1', '2'], 'grid': ['3', '3'], 'format': 'grib', }, 210, ], 'seasonal-monthly-single-levels-ensemble_mean-ukmo': [ 'seasonal-monthly-single-levels', { 'originating_centre': 'ukmo', 'variable': 'maximum_2m_temperature_in_the_last_24_hours', 'product_type': 'ensemble_mean', 'year': '2018', 'month': ['04', '05'], 'leadtime_month': ['1', '2'], 'grid': ['3', '3'], 'format': 'grib', }, 210, ], 'seasonal-monthly-single-levels-hindcast_climate_mean-ukmo': [ 'seasonal-monthly-single-levels', { 'originating_centre': 'ukmo', 'variable': 'maximum_2m_temperature_in_the_last_24_hours', 'product_type': 'hindcast_climate_mean', 'year': '2018', 'month': ['04', '05'], 'leadtime_month': ['1', '2'], 'grid': ['3', '3'], 'format': 'grib', }, 210, ], } @pytest.mark.parametrize('test_file', TEST_FILES.keys()) def test_reanalysis_Stream(test_file): dataset, request, key_count = TEST_FILES[test_file] path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib') stream = cfgrib.FileStream(path) leader = stream.first() assert len(leader) == key_count assert sum(1 for _ in stream) == leader['count'] @pytest.mark.parametrize('test_file', TEST_FILES.keys()) def test_reanalysis_Dataset(test_file): dataset, request, key_count = TEST_FILES[test_file] path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib') res = cfgrib.xarray_store.open_dataset(path, flavour_name='cds') res.to_netcdf(path[:-5] + '.nc')
5e0bde2a16193651c22bf50efd429a326bf6f474
6b564e24a99b2d2c6a384d8674974f10ef9461d5
/iptv_proxy/providers/crystalclear/data_model.py
53c6ad0d72865ecf54ed3413a6d9df1d667e4c12
[ "MIT" ]
permissive
Onemars/IPTVProxy
1c1421c6962c1f7cf4cef90d8a2c98e98f5ded25
06d5472f49ecaa7eafb90832a1c9ac85a09cd268
refs/heads/master
2020-05-24T14:34:48.486177
2019-05-17T14:17:21
2019-05-17T14:17:21
187,311,948
1
0
null
2019-05-18T03:58:48
2019-05-18T03:58:47
null
UTF-8
Python
false
false
6,858
py
import logging from sqlalchemy import Column from sqlalchemy import Index from sqlalchemy import Integer from sqlalchemy import LargeBinary from sqlalchemy import String from sqlalchemy.ext.hybrid import hybrid_property from iptv_proxy.data_model import DateTimeUTC from iptv_proxy.providers.crystalclear.constants import CrystalClearConstants from iptv_proxy.providers.crystalclear.db import Base logger = logging.getLogger(__name__) class CrystalClearChannel(Base): _provider_name = CrystalClearConstants.PROVIDER_NAME.lower() __tablename__ = 'channel' _id = Column('id', String, primary_key=True, autoincrement=False) _m3u8_group = Column('m3u8_group', String, nullable=False) _number = Column('number', Integer, nullable=False) _name = Column('name', String, nullable=False) _pickle = Column('pickle', LargeBinary, nullable=False) _complete_xmltv = Column('complete_xmltv', String, nullable=False) _minimal_xmltv = Column('minimal_xmltv', String, nullable=False) __table_args__ = (Index('{0}_channel_ix_id'.format(_provider_name), _id.asc()), Index('{0}_channel_ix_m3u8_group'.format(_provider_name), _m3u8_group.asc()), Index('{0}_channel_ix_m3u8_group_&_number'.format(_provider_name), _m3u8_group.asc(), _number.asc()), Index('{0}_channel_ix_number'.format(_provider_name), _number.asc())) def __init__(self, id_, m3u8_group, number, name, pickle, complete_xmltv, minimal_xmltv): self._id = id_ self._m3u8_group = m3u8_group self._number = number self._name = name self._pickle = pickle self._complete_xmltv = complete_xmltv self._minimal_xmltv = minimal_xmltv @hybrid_property def complete_xmltv(self): return self._complete_xmltv @complete_xmltv.setter def complete_xmltv(self, complete_xmltv): self._complete_xmltv = complete_xmltv @hybrid_property def id(self): return self._id @id.setter def id(self, id_): self._id = id_ @hybrid_property def m3u8_group(self): return self._m3u8_group @m3u8_group.setter def m3u8_group(self, m3u8_group): self._m3u8_group = m3u8_group @hybrid_property def minimal_xmltv(self): return self._minimal_xmltv @minimal_xmltv.setter def minimal_xmltv(self, minimal_xmltv): self._minimal_xmltv = minimal_xmltv @hybrid_property def name(self): return self._name @name.setter def name(self, name): self._name = name @hybrid_property def number(self): return self._number @number.setter def number(self, number): self._number = number @hybrid_property def pickle(self): return self._pickle @pickle.setter def pickle(self, pickle): self._pickle = pickle class CrystalClearProgram(Base): _provider_name = CrystalClearConstants.PROVIDER_NAME.lower() __tablename__ = 'program' _id = Column('id', String, primary_key=True, autoincrement=False) _start = Column('start', DateTimeUTC(timezone=True), nullable=False) _stop = Column('stop', DateTimeUTC(timezone=True), nullable=False) _channel_xmltv_id = Column('channel_xmltv_id', String, nullable=False) _channel_number = Column('channel_number', Integer, nullable=False) _pickle = Column('pickle', LargeBinary, nullable=False) _complete_xmltv = Column('complete_xmltv', String, nullable=False) _minimal_xmltv = Column('minimal_xmltv', String, nullable=False) __table_args__ = ( Index('{0}_program_ix_id'.format(_provider_name), _id.asc()), Index('{0}_program_ix_channel_number_&_start'.format(_provider_name), _channel_number.asc(), _start.asc()), Index('{0}_program_ix_channel_xmltv_id_&_start'.format(_provider_name), _channel_xmltv_id.asc(), _start.asc()), Index('{0}_program_ix_channel_xmltv_id_&_start_&_stop'.format(_provider_name), _channel_xmltv_id.asc(), _start.asc(), _stop.asc()), Index('{0}_program_ix_start'.format(_provider_name), _start.asc())) def __init__(self, id_, start, stop, channel_xmltv_id, channel_number, pickle, complete_xmltv, minimal_xmltv): self._id = id_ self._start = start self._stop = stop self._channel_xmltv_id = channel_xmltv_id self._channel_number = channel_number self._pickle = pickle self._complete_xmltv = complete_xmltv self._minimal_xmltv = minimal_xmltv @hybrid_property def channel_number(self): return self._channel_number @channel_number.setter def channel_number(self, channel_number): self._channel_number = channel_number @hybrid_property def channel_xmltv_id(self): return self._channel_xmltv_id @channel_xmltv_id.setter def channel_xmltv_id(self, channel_xmltv_id): self._channel_xmltv_id = channel_xmltv_id @hybrid_property def complete_xmltv(self): return self._complete_xmltv @complete_xmltv.setter def complete_xmltv(self, complete_xmltv): self._complete_xmltv = complete_xmltv @hybrid_property def id(self): return self._id @id.setter def id(self, id_): self._id = id_ @hybrid_property def minimal_xmltv(self): return self._minimal_xmltv @minimal_xmltv.setter def minimal_xmltv(self, minimal_xmltv): self._minimal_xmltv = minimal_xmltv @hybrid_property def pickle(self): return self._pickle @pickle.setter def pickle(self, pickle): self._pickle = pickle @hybrid_property def start(self): return self._start @start.setter def start(self, start): self._start = start @hybrid_property def stop(self): return self._stop @stop.setter def stop(self, stop): self._stop = stop class CrystalClearSetting(Base): _provider_name = CrystalClearConstants.PROVIDER_NAME.lower() __tablename__ = 'setting' _name = Column('name', String, primary_key=True) _value = Column('value', String, nullable=False) __table_args__ = (Index('setting_ix_name', _name.asc()),) def __init__(self, name, value): self._name = name self._value = value @hybrid_property def name(self): return self._name @name.setter def name(self, name): self._name = name @hybrid_property def value(self): return self._value @value.setter def value(self, value): self._value = value
727c6dd5a9d6d63154d4df935778852dc73c00fa
c590571d129ead00bd1916025f854a1719d75683
/zvt/recorders/joinquant/meta/china_stock_meta_recorder.py
fa4a0c4364dd713ab0f74d8b7829a1b6f86f10ac
[ "MIT" ]
permissive
ming123jew/zvt
f2fb8e157951e9440a6decd5ae0c08ea227a39db
de66a48ad2a3ac2c3fb22b9ea17a85f28e95cc62
refs/heads/master
2023-05-28T15:00:52.015084
2021-06-13T12:56:18
2021-06-13T12:56:18
570,070,597
1
0
MIT
2022-11-24T09:16:48
2022-11-24T09:16:47
null
UTF-8
Python
false
false
5,733
py
# -*- coding: utf-8 -*- import pandas as pd from jqdatapy.api import get_all_securities, run_query from zvt.api.quote import china_stock_code_to_id, portfolio_relate_stock from zvt.contract.api import df_to_db, get_entity_exchange, get_entity_code from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder from zvt.domain import EtfStock, Stock, Etf, StockDetail from zvt.recorders.joinquant.common import to_entity_id, jq_to_report_period from zvt.utils.pd_utils import pd_is_not_null from zvt.utils.time_utils import to_time_str class BaseJqChinaMetaRecorder(Recorder): provider = 'joinquant' def __init__(self, batch_size=10, force_update=True, sleeping_time=10) -> None: super().__init__(batch_size, force_update, sleeping_time) def to_zvt_entity(self, df, entity_type, category=None): df = df.set_index('code') df.index.name = 'entity_id' df = df.reset_index() # 上市日期 df.rename(columns={'start_date': 'timestamp'}, inplace=True) df['timestamp'] = pd.to_datetime(df['timestamp']) df['list_date'] = df['timestamp'] df['end_date'] = pd.to_datetime(df['end_date']) df['entity_id'] = df['entity_id'].apply(lambda x: to_entity_id(entity_type=entity_type, jq_code=x)) df['id'] = df['entity_id'] df['entity_type'] = entity_type df['exchange'] = df['entity_id'].apply(lambda x: get_entity_exchange(x)) df['code'] = df['entity_id'].apply(lambda x: get_entity_code(x)) df['name'] = df['display_name'] if category: df['category'] = category return df class JqChinaStockRecorder(BaseJqChinaMetaRecorder): data_schema = Stock def run(self): # 抓取股票列表 df_stock = self.to_zvt_entity(get_all_securities(code='stock'), entity_type='stock') df_to_db(df_stock, data_schema=Stock, provider=self.provider, force_update=self.force_update) # persist StockDetail too df_to_db(df=df_stock, data_schema=StockDetail, provider=self.provider, force_update=self.force_update) # self.logger.info(df_stock) self.logger.info("persist stock list success") class JqChinaEtfRecorder(BaseJqChinaMetaRecorder): data_schema = Etf def run(self): # 抓取etf列表 df_index = self.to_zvt_entity(get_all_securities(code='etf'), entity_type='etf', category='etf') df_to_db(df_index, data_schema=Etf, provider=self.provider, force_update=self.force_update) # self.logger.info(df_index) self.logger.info("persist etf list success") class JqChinaStockEtfPortfolioRecorder(TimeSeriesDataRecorder): entity_provider = 'joinquant' entity_schema = Etf # 数据来自jq provider = 'joinquant' data_schema = EtfStock def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=True, batch_size=10, force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add', start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None: super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time, default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour, close_minute) def record(self, entity, start, end, size, timestamps): df = run_query(table='finance.FUND_PORTFOLIO_STOCK', conditions=f'pub_date#>=#{to_time_str(start)}&code#=#{entity.code}', parse_dates=None) if pd_is_not_null(df): # id code period_start period_end pub_date report_type_id report_type rank symbol name shares market_cap proportion # 0 8640569 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 1 601318 中国平安 19869239.0 1.361043e+09 7.09 # 1 8640570 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 2 600519 贵州茅台 921670.0 6.728191e+08 3.50 # 2 8640571 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 3 600036 招商银行 18918815.0 5.806184e+08 3.02 # 3 8640572 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 4 601166 兴业银行 22862332.0 3.646542e+08 1.90 df['timestamp'] = pd.to_datetime(df['pub_date']) df.rename(columns={'symbol': 'stock_code', 'name': 'stock_name'}, inplace=True) df['proportion'] = df['proportion'] * 0.01 df = portfolio_relate_stock(df, entity) df['stock_id'] = df['stock_code'].apply(lambda x: china_stock_code_to_id(x)) df['id'] = df[['entity_id', 'stock_id', 'pub_date', 'id']].apply(lambda x: '_'.join(x.astype(str)), axis=1) df['report_date'] = pd.to_datetime(df['period_end']) df['report_period'] = df['report_type'].apply(lambda x: jq_to_report_period(x)) df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update) # self.logger.info(df.tail()) self.logger.info(f"persist etf {entity.code} portfolio success {df.iloc[-1]['pub_date']}") return None if __name__ == '__main__': # JqChinaEtfRecorder().run() JqChinaStockEtfPortfolioRecorder(codes=['510050']).run() # the __all__ is generated __all__ = ['BaseJqChinaMetaRecorder', 'JqChinaStockRecorder', 'JqChinaEtfRecorder', 'JqChinaStockEtfPortfolioRecorder']
da4e65994020ecec1aae6923a1bd83b3951032e3
a90ba084b85683f4c52d0e638cfb6108207ced38
/896.py
91ca187efe65342ba1e072994842f422f065f605
[]
no_license
JiayuZhai/leetcode_python3
4a9260d00a52cde9ec37e6292e64d04161e66111
5755c3edd6d949af18d0247d2103379510dfab85
refs/heads/master
2020-04-02T21:22:42.270736
2019-03-29T23:28:48
2019-03-29T23:28:48
154,796,956
0
0
null
null
null
null
UTF-8
Python
false
false
162
py
class Solution: def isMonotonic(self, A: List[int]) -> bool: sortA = sorted(A) return (A == sortA or list(reversed(A)) == sortA)
90a5ad57cf62d7082f693f949d412f2d773b647a
844c7f8fb8d6bfab912583c71b93695167c59764
/fixação/Seção06/51-60/Sec06Ex51v2.py
35580169e28f8bc9bc58b28718531dd96aa9d948
[ "Apache-2.0" ]
permissive
gugajung/guppe
2be10656cd9aa33be6afb8e86f20df82662bcc59
a0ee7b85e8687e8fb8243fbb509119a94bc6460f
refs/heads/main
2023-05-28T08:08:24.963356
2021-06-07T16:56:11
2021-06-07T16:56:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
624
py
from datetime import date anoAtual = 1995 salarioAtual = 2000 percentAumento = 1.5 dataAtual = date.today() anoReal = dataAtual.year while anoAtual <= anoReal: salarioAtual = salarioAtual + ((salarioAtual*percentAumento)/100) print("----------------------------------------") print(" --- debug") print(f" --- > Ano Atual : {anoAtual}") print(f" --- > Salario Atual : {salarioAtual:.2f}") print(f" --- > Percente de Aumento : {percentAumento:.4f}") anoAtual += 1 percentAumento *= 2 print("=================================================") print("Final de O programas")
d3e7e9dae606fe6dc77d9c43997e9c592fbcd477
982bc95ab762829c8b6913e44504415cdd77241a
/account_easy_reconcile/base_reconciliation.py
b50c06b9eed699d96da272f0fb9dd9613177c235
[]
no_license
smart-solution/natuurpunt-finance
6b9eb65be96a4e3261ce46d7f0c31de3589e1e0d
6eeb48468792e09d46d61b89499467a44d67bc79
refs/heads/master
2021-01-23T14:42:05.017263
2020-11-03T15:56:35
2020-11-03T15:56:35
39,186,046
0
1
null
2020-11-03T15:56:37
2015-07-16T08:36:54
Python
UTF-8
Python
false
false
7,776
py
# -*- coding: utf-8 -*- ############################################################################## # # Copyright 2012-2013 Camptocamp SA (Guewen Baconnier) # Copyright (C) 2010 Sébastien Beau # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, orm from operator import itemgetter, attrgetter class easy_reconcile_base(orm.AbstractModel): """Abstract Model for reconciliation methods""" _name = 'easy.reconcile.base' _inherit = 'easy.reconcile.options' _columns = { 'account_id': fields.many2one( 'account.account', 'Account', required=True), 'partner_ids': fields.many2many( 'res.partner', string="Restrict on partners"), # other columns are inherited from easy.reconcile.options } def automatic_reconcile(self, cr, uid, ids, context=None): """ Reconciliation method called from the view. :return: list of reconciled ids, list of partially reconciled items """ if isinstance(ids, (int, long)): ids = [ids] assert len(ids) == 1, "Has to be called on one id" rec = self.browse(cr, uid, ids[0], context=context) return self._action_rec(cr, uid, rec, context=context) def _action_rec(self, cr, uid, rec, context=None): """ Must be inherited to implement the reconciliation :return: list of reconciled ids """ raise NotImplementedError def _base_columns(self, rec): """ Mandatory columns for move lines queries An extra column aliased as ``key`` should be defined in each query.""" aml_cols = ( 'id', 'debit', 'credit', 'date', 'period_id', 'ref', 'name', 'partner_id', 'account_id', 'move_id') return ["account_move_line.%s" % col for col in aml_cols] def _select(self, rec, *args, **kwargs): return "SELECT %s" % ', '.join(self._base_columns(rec)) def _from(self, rec, *args, **kwargs): return "FROM account_move_line" def _where(self, rec, *args, **kwargs): where = ("WHERE account_move_line.account_id = %s " "AND account_move_line.reconcile_id IS NULL ") # it would be great to use dict for params # but as we use _where_calc in _get_filter # which returns a list, we have to # accomodate with that params = [rec.account_id.id] if rec.partner_ids: where += " AND account_move_line.partner_id IN %s" params.append(tuple([l.id for l in rec.partner_ids])) return where, params def _get_filter(self, cr, uid, rec, context): ml_obj = self.pool.get('account.move.line') where = '' params = [] if rec.filter: dummy, where, params = ml_obj._where_calc( cr, uid, eval(rec.filter), context=context).get_sql() if where: where = " AND %s" % where return where, params def _below_writeoff_limit(self, cr, uid, rec, lines, writeoff_limit, context=None): precision = self.pool.get('decimal.precision').precision_get( cr, uid, 'Account') keys = ('debit', 'credit') sums = reduce( lambda line, memo: dict((key, value + memo[key]) for key, value in line.iteritems() if key in keys), lines) debit, credit = sums['debit'], sums['credit'] writeoff_amount = round(debit - credit, precision) return bool(writeoff_limit >= abs(writeoff_amount)), debit, credit def _get_rec_date(self, cr, uid, rec, lines, based_on='end_period_last_credit', context=None): period_obj = self.pool.get('account.period') def last_period(mlines): period_ids = [ml['period_id'] for ml in mlines] periods = period_obj.browse( cr, uid, period_ids, context=context) return max(periods, key=attrgetter('date_stop')) def last_date(mlines): return max(mlines, key=itemgetter('date')) def credit(mlines): return [l for l in mlines if l['credit'] > 0] def debit(mlines): return [l for l in mlines if l['debit'] > 0] if based_on == 'end_period_last_credit': return last_period(credit(lines)).date_stop if based_on == 'end_period': return last_period(lines).date_stop elif based_on == 'newest': return last_date(lines)['date'] elif based_on == 'newest_credit': return last_date(credit(lines))['date'] elif based_on == 'newest_debit': return last_date(debit(lines))['date'] # reconcilation date will be today # when date is None return None def _reconcile_lines(self, cr, uid, rec, lines, allow_partial=False, context=None): """ Try to reconcile given lines :param list lines: list of dict of move lines, they must at least contain values for : id, debit, credit :param boolean allow_partial: if True, partial reconciliation will be created, otherwise only Full reconciliation will be created :return: tuple of boolean values, first item is wether the items have been reconciled or not, the second is wether the reconciliation is full (True) or partial (False) """ if context is None: context = {} ml_obj = self.pool.get('account.move.line') writeoff = rec.write_off line_ids = [l['id'] for l in lines] below_writeoff, sum_debit, sum_credit = self._below_writeoff_limit( cr, uid, rec, lines, writeoff, context=context) date = self._get_rec_date( cr, uid, rec, lines, rec.date_base_on, context=context) rec_ctx = dict(context, date_p=date) if below_writeoff: if sum_credit < sum_debit: writeoff_account_id = rec.account_profit_id.id else: writeoff_account_id = rec.account_lost_id.id period_id = self.pool.get('account.period').find( cr, uid, dt=date, context=context)[0] ml_obj.reconcile( cr, uid, line_ids, type='auto', writeoff_acc_id=writeoff_account_id, writeoff_period_id=period_id, writeoff_journal_id=rec.journal_id.id, context=rec_ctx) return True, True elif allow_partial: ml_obj.reconcile_partial( cr, uid, line_ids, type='manual', context=rec_ctx) return True, False return False, False
ae535fe72253b6c574f7196c75a3b64e003c3ea3
ccb6918eff9624bc890c4318462b3d04fe01ab25
/d02/for/for/settings.py
763917cea83d3de15fae9c387027213bdac3fd6e
[]
no_license
shchliu/19django
431202f3b4a71fb2614f3f113174df327a338413
63af6aeff279a83fb170c1b5385d0804d96fafad
refs/heads/master
2020-08-15T08:53:36.707823
2019-10-16T08:26:41
2019-10-16T08:28:32
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,121
py
""" Django settings for for project. Generated by 'django-admin startproject' using Django 2.0. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'n$s!ww49p_&vb4(^$4-n#s(98qsu+(61j_2w2)&7pbx+3(k_x+' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'for.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')] , 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'for.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_URL = '/static/'
b6e187de710d37037dd7c0d830a50e7eaee1aa28
786027545626c24486753351d6e19093b261cd7d
/ghidra9.2.1_pyi/ghidra/app/util/bin/format/xcoff/XCoffSectionHeaderFlags.pyi
43a745532a3157885655ec9c25a175e6ac3df2ec
[ "MIT" ]
permissive
kohnakagawa/ghidra_scripts
51cede1874ef2b1fed901b802316449b4bf25661
5afed1234a7266c0624ec445133280993077c376
refs/heads/main
2023-03-25T08:25:16.842142
2021-03-18T13:31:40
2021-03-18T13:31:40
338,577,905
14
1
null
null
null
null
UTF-8
Python
false
false
772
pyi
import java.lang class XCoffSectionHeaderFlags(object): STYP_BSS: int = 128 STYP_DATA: int = 64 STYP_DEBUG: int = 8192 STYP_EXCEPT: int = 128 STYP_INFO: int = 512 STYP_LOADER: int = 4096 STYP_OVRFLO: int = 32768 STYP_PAD: int = 8 STYP_TEXT: int = 32 STYP_TYPCHK: int = 16384 def __init__(self): ... def equals(self, __a0: object) -> bool: ... def getClass(self) -> java.lang.Class: ... def hashCode(self) -> int: ... def notify(self) -> None: ... def notifyAll(self) -> None: ... def toString(self) -> unicode: ... @overload def wait(self) -> None: ... @overload def wait(self, __a0: long) -> None: ... @overload def wait(self, __a0: long, __a1: int) -> None: ...
ac2cbb0b731b97e581da7a9f035b4ce7209d5dbf
f08336ac8b6f8040f6b2d85d0619d1a9923c9bdf
/223-rectangleArea.py
b77b9c32e8858d4b5b81adab6076c7a69ecfadeb
[]
no_license
MarshalLeeeeee/myLeetCodes
fafadcc35eef44f431a008c1be42b1188e7dd852
80e78b153ad2bdfb52070ba75b166a4237847d75
refs/heads/master
2020-04-08T16:07:47.943755
2019-02-21T01:43:16
2019-02-21T01:43:16
159,505,231
0
0
null
null
null
null
UTF-8
Python
false
false
975
py
''' 223.Rectangle Area Find the total area covered by two rectilinear rectangles in a 2D plane. Each rectangle is defined by its bottom left corner and top right corner as shown in the figure. Example: Input: A = -3, B = 0, C = 3, D = 4, E = 0, F = -1, G = 9, H = 2 Output: 45 Note: Assume that the total area is never beyond the maximum possible value of int. ''' class Solution: def computeArea(self, A, B, C, D, E, F, G, H): """ :type A: int :type B: int :type C: int :type D: int :type E: int :type F: int :type G: int :type H: int :rtype: int """ X = [[A,0],[C,0],[E,1],[G,1]] Y = [[B,0],[D,0],[F,1],[H,1]] X.sort(key=lambda k: k[0]) Y.sort(key=lambda k: k[0]) #print(X,Y) common = (X[2][0]-X[1][0])*(Y[2][0]-Y[1][0]) if X[0][1] ^ X[1][1] and Y[0][1] ^ Y[1][1] else 0 return (C-A)*(D-B) + (G-E)*(H-F) - common
5575a34bb47b7f44bc2177357c0b7f8fb5fef18c
6260fd806b3bf82a601c86c8a903b49c983d9dda
/w3resource/7.py
03955a8d513c09e32bafc6d84f5fc6e5dfef3e0a
[]
no_license
skybohannon/python
6162077e4f18d0ed273d47c342620942e531031b
b78ac8ff1758826d9dd9c969096fb1f10783a4be
refs/heads/master
2021-09-05T07:09:23.844665
2018-01-25T02:58:59
2018-01-25T02:58:59
106,215,285
0
0
null
null
null
null
UTF-8
Python
false
false
300
py
# 7. Write a Python program to accept a filename from the user and print the extension of that. Go to the editor # Sample filename : abc.java # Output : java user_file = input("Please enter a filename: ") user_ext = user_file.split(".") print("The file extension is .{}".format(repr(user_ext[-1])))
84f43b493da4922aa43b8e092c662bce4e358e7d
1ba59e2cf087fc270dd32b24ac1d76e4b309afcc
/config.py
1b8fab6b06225fad9e290177b7e86c43413ce3c7
[ "MIT" ]
permissive
yangtong1989/Deep-Residual-Matting
2d96ce737b2b89859695e6f4f052c8984eba96bb
24bd5342b862e447fb7f4dec7edebdd73221db18
refs/heads/master
2020-08-31T23:48:39.028571
2019-10-18T10:12:45
2019-10-18T10:12:45
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,143
py
import torch device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # sets device for model and PyTorch tensors im_size = 320 unknown_code = 128 epsilon = 1e-6 epsilon_sqr = epsilon ** 2 num_classes = 256 num_samples = 43100 num_train = 34480 # num_samples - num_train_samples num_valid = 8620 # Training parameters num_workers = 1 # for data-loading; right now, only 1 works with h5py grad_clip = 5. # clip gradients at an absolute value of print_freq = 100 # print training/validation stats every __ batches checkpoint = None # path to checkpoint, None if none ############################################################## # Set your paths here # path to provided foreground images fg_path = 'data/fg/' # path to provided alpha mattes a_path = 'data/mask/' # Path to background images (MSCOCO) bg_path = 'data/bg/' # Path to folder where you want the composited images to go out_path = 'data/merged/' max_size = 1600 fg_path_test = 'data/fg_test/' a_path_test = 'data/mask_test/' bg_path_test = 'data/bg_test/' out_path_test = 'data/merged_test/' ##############################################################
2b05aafb513ea6ad66865aaa00981d7ff30884e1
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2733/40186/320060.py
85feba17c1b35b4a3536d8fcea4725c382ec5d13
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
438
py
inp=input() a=input() if inp=='8 3' and a=='10 7 9 3 4 5 8 17': print(10) print(17) print(9) elif a=='5 27 1 3 4 2 8 17': print(5) print(27) print(5) elif a=='105 2 9 3 8 5 7 7': print(2) print(8) print(9) print(105) print(7) elif inp=='101011': print(18552) elif inp=='10101101010111110100110100101010110001010010101001': print(322173207) else: print(inp) print(a) print(b)
0d4ab487c9de86cce3e199c7f5a4c2c87e57c607
2612f336d667a087823234daf946f09b40d8ca3d
/python/lib/Lib/site-packages/django/contrib/gis/tests/geoapp/models.py
89027eedfbc919466ac7c1335c42dfb57aea547a
[ "Apache-2.0" ]
permissive
tnorbye/intellij-community
df7f181861fc5c551c02c73df3b00b70ab2dd589
f01cf262fc196bf4dbb99e20cd937dee3705a7b6
refs/heads/master
2021-04-06T06:57:57.974599
2018-03-13T17:37:00
2018-03-13T17:37:00
125,079,130
2
0
Apache-2.0
2018-03-13T16:09:41
2018-03-13T16:09:41
null
UTF-8
Python
false
false
1,546
py
from django.contrib.gis.db import models from django.contrib.gis.tests.utils import mysql, spatialite # MySQL spatial indices can't handle NULL geometries. null_flag = not mysql class Country(models.Model): name = models.CharField(max_length=30) mpoly = models.MultiPolygonField() # SRID, by default, is 4326 objects = models.GeoManager() def __unicode__(self): return self.name class City(models.Model): name = models.CharField(max_length=30) point = models.PointField() objects = models.GeoManager() def __unicode__(self): return self.name # This is an inherited model from City class PennsylvaniaCity(City): county = models.CharField(max_length=30) objects = models.GeoManager() # TODO: This should be implicitly inherited. class State(models.Model): name = models.CharField(max_length=30) poly = models.PolygonField(null=null_flag) # Allowing NULL geometries here. objects = models.GeoManager() def __unicode__(self): return self.name class Track(models.Model): name = models.CharField(max_length=30) line = models.LineStringField() objects = models.GeoManager() def __unicode__(self): return self.name if not spatialite: class Feature(models.Model): name = models.CharField(max_length=20) geom = models.GeometryField() objects = models.GeoManager() def __unicode__(self): return self.name class MinusOneSRID(models.Model): geom = models.PointField(srid=-1) # Minus one SRID. objects = models.GeoManager()
33877bf7341e29b7edab2e7b7919f5bd03bfdc76
9507ff9e9bca2ca8104369c9e25acd74d308e9b3
/sta8100_upload/upload.py
6d962eeda6a0d7bd66233d1d52e6df9d0cd024bf
[]
no_license
yangkang411/python_tool
03e483c7ec7e1e76284f93cf5b9086fdf98af826
713071a9fbabfabcbc3c16ce58d1382c410a7ea3
refs/heads/master
2023-03-17T16:14:03.332332
2020-09-10T02:37:05
2020-09-10T02:37:05
null
0
0
null
null
null
null
UTF-8
Python
false
false
231
py
#!/usr/bin/python import os if __name__ == '__main__': cmd = "TeseoProgrammer_v2.9.0.exe program -f t5 -i sta.bin -o log.txt -c com53 -b 230400 -m SQI -d 0x10000400 -e TRUE -r TRUE"; print ("cmd = %s" % cmd); os.system(cmd)
3bacf127b039262cc40bb14e97fd4da50cac4c40
1c19db866110afddb04d2e9715b49909c7fbb3d4
/tests/test_user_locale.py
4635899202d226e926f9194aa81e0dcb4a0fc936
[ "BSD-2-Clause" ]
permissive
shane-kerr/peeringdb
505dd5087abe29c9d6013e81b5322d7259a97106
5f189631a4d60d3fde662743508784affc6fa22a
refs/heads/master
2020-09-14T16:25:33.442466
2019-11-21T13:54:32
2019-11-21T13:54:32
223,183,848
0
0
NOASSERTION
2019-11-21T13:54:34
2019-11-21T13:44:59
null
UTF-8
Python
false
false
2,541
py
import pytest import json from django.test import Client, TestCase, RequestFactory from django.contrib.auth.models import Group import peeringdb_server.models as models #from django.template import Context, Template #from django.utils import translation class UserLocaleTests(TestCase): """ Test peeringdb_server.models.User functions """ @classmethod def setUpTestData(cls): user_group = Group.objects.create(name="user") for name in ["user_undef", "user_en", "user_pt"]: setattr(cls, name, models.User.objects.create_user( name, "%s@localhost" % name, first_name=name, last_name=name, password=name)) cls.user_en.set_locale('en') cls.user_pt.set_locale('pt') user_group.user_set.add(cls.user_en) user_group.user_set.add(cls.user_pt) user_group.user_set.add(cls.user_undef) cls.user_undef.save() cls.user_en.save() cls.user_pt.save() def setUp(self): self.factory = RequestFactory() def test_user_locale(self): """ Tests if user profile page has the right language Note: Don't use Client.login(...) since it will miss language setting in the session """ #t = Template("{% load i18n %}{% get_current_language as LANGUAGE_CODE %}{{ LANGUAGE_CODE }}") #print(t.render(Context({}))) #translation.activate('pt') #print(t.render(Context({}))) #u_pt = models.User.objects.get(username="user_pt") #print(u_pt.get_locale()) c = Client() resp = c.get("/profile", follow=True) data = { "next": "/profile", "username": "user_en", "password": "user_en" } resp = c.post("/auth", data, follow=True) self.assertGreater( resp.content.find('<!-- Current language: en -->'), -1) c.logout() data = { "next": "/profile", "username": "user_pt", "password": "user_pt" } resp = c.post("/auth", data, follow=True) self.assertGreater( resp.content.find('<!-- Current language: pt -->'), -1) c.logout() data = { "next": "/profile", "username": "user_undef", "password": "user_undef" } resp = c.post("/auth", data, follow=True) self.assertGreater( resp.content.find('<!-- Current language: en -->'), -1)
a7f8d8f49b6809525e29121763627e7f50f9f9f7
ab8a34e5b821dde7b09abe37c838de046846484e
/twilio/sample-code-master/notify/v1/user/read-default/read-default.6.x.py
21a1ceb49f9637120f11fe5bf78cba619a151b3e
[]
no_license
sekharfly/twilio
492b599fff62618437c87e05a6c201d6de94527a
a2847e4c79f9fbf5c53f25c8224deb11048fe94b
refs/heads/master
2020-03-29T08:39:00.079997
2018-09-21T07:20:24
2018-09-21T07:20:24
149,721,431
0
1
null
null
null
null
UTF-8
Python
false
false
467
py
# Download the helper library from https://www.twilio.com/docs/python/install from twilio.rest import Client # Your Account Sid and Auth Token from twilio.com/console account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' auth_token = 'your_auth_token' client = Client(account_sid, auth_token) users = client.notify.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \ .users \ .list() for record in users: print(record.sid)
cb28e85295b024bb0498aa6b4989914be951cfa0
7963f09b4002249e73496c6cbf271fd6921b3d22
/tests/test_cpy.py
7b453154c26e92a9cf985753289721778c504e43
[]
no_license
thales-angelino/py6502emulator
6df908fc02f29b41fad550c8b773723a7b63c414
1cea28489d51d77d2dec731ab98a6fe8a515a2a8
refs/heads/master
2023-03-19T14:46:17.393466
2021-03-08T04:10:45
2021-03-08T04:10:45
345,754,473
0
0
null
null
null
null
UTF-8
Python
false
false
4,773
py
import unittest from emulator_6502 import emulator_6502 as emulator from emulator_6502.instructions import cpy class TestCPX(unittest.TestCase): def setUp(self): self.memory = emulator.Memory() self.cpu = emulator.CPU(self.memory) self.cpu.reset() def test_cpy_scenario_1(self): operand = 0x10 expected_zero = 0 expected_negative = 0 expected_carry = 1 self.cpu.y = 0x50 self.cpu.cpy(operand) self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry) self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero) self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative) def test_cpy_scenario_2(self): operand = 0x50 expected_zero = 1 expected_negative = 0 expected_carry = 1 self.cpu.y = 0x50 self.cpu.cpy(operand) self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry) self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero) self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative) def test_cpy_scenario_3(self): operand = 0x60 expected_zero = 0 expected_negative = 1 expected_carry = 0 self.cpu.y = 0x50 self.cpu.cpy(operand) self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry) self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero) self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative) def test_cpy_immediate(self): expected_cycles = 2 value = 0x10 self.cpu.y = 0x50 expected_zero = 0 expected_negative = 0 expected_carry = 1 self.memory.memory[emulator.START_ADDRESS] = cpy.CPY_IMMEDIATE_OPCODE self.memory.memory[emulator.START_ADDRESS + 1] = value self.cpu.execute(1) self.assertEqual(self.cpu.cycles, expected_cycles, "CPU cycles should be %d" % expected_cycles) self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry) self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero) self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative) def test_cpy_absolute(self): expected_cycles = 4 value = 0x10 self.cpu.y = 0x50 expected_zero = 0 expected_negative = 0 expected_carry = 1 self.memory.memory[emulator.START_ADDRESS] = cpy.CPY_ABSOLUTE_OPCODE self.memory.memory[emulator.START_ADDRESS + 1] = 0xff # LSB FIRST!!! self.memory.memory[emulator.START_ADDRESS + 2] = 0x02 self.memory.memory[0x02ff] = value self.cpu.execute(1) self.assertEqual(self.cpu.cycles, expected_cycles, "CPU cycles should be %d" % expected_cycles) self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry) self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero) self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative) def test_cpy_zeropage(self): expected_cycles = 3 value = 0x10 self.cpu.y = 0x50 expected_zero = 0 expected_negative = 0 expected_carry = 1 self.memory.memory[emulator.START_ADDRESS] = cpy.CPY_ZEROPAGE_OPCODE self.memory.memory[emulator.START_ADDRESS + 1] = 0xff self.memory.memory[0x00ff] = value self.cpu.execute(1) self.assertEqual(self.cpu.cycles, expected_cycles, "CPU cycles should be %d" % expected_cycles) self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry) self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero) self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative) if __name__ == '__main__': unittest.main()
738b4c2e8ea71aa1374de72bcbdaff282bbe4f37
8ace8be98c5fb7baac267ca7f83c8085e5cad35c
/26_two_sum_unique_pairs.py
def053f435def022e8e58082e3376b6e647929d4
[]
no_license
cyberbono3/amazon-oa-python
c063eb275a4d311e58f148c0300c7e19b0f03bea
7ce502bbe3a30b1d6052a46e7a28b724a327b5ae
refs/heads/master
2023-01-20T16:23:00.241012
2020-11-22T03:49:25
2020-11-22T03:49:25
293,693,115
1
1
null
null
null
null
UTF-8
Python
false
false
416
py
""" Input: nums = [1, 1, 2, 45, 46, 46], target = 47 1, 1 """ class Solution: def unique_pairs(self, nums, target): s = set() dic = {} for i,x in enumerate(nums): if target - x in s: dic[target-x] = x else: s.add(x) print(dic) return len(dic) sol = Solution() print(sol.unique_pairs([1, 1, 2, 45, 46, 46], 47))
cb07a323abf8740806bebc941c841ab0e659081b
e6ad1014aacaa92643f42952c278469177defc15
/napalm_ansible/napalm_diff_yang.py
d134e9bb1a69665bbfabcb13f326bcf956c8cb1d
[ "Apache-2.0" ]
permissive
cspeidel/napalm-ansible
d290ee7cc1abd9dd7d11044d5ddc542bd6658906
8ad4badb38d79ec5efd96faa666c71f7438dfa28
refs/heads/develop
2022-02-09T05:40:10.302690
2017-11-06T20:51:58
2017-11-06T20:51:58
110,727,639
0
0
Apache-2.0
2022-01-31T16:25:25
2017-11-14T18:18:35
Python
UTF-8
Python
false
false
3,409
py
#!/usr/bin/python # -*- coding: utf-8 -*- """ (c) 2017 David Barroso <[email protected]> This file is part of Ansible Ansible is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Ansible is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Ansible. If not, see <http://www.gnu.org/licenses/>. """ from ansible.module_utils.basic import AnsibleModule try: import napalm_yang except ImportError: napalm_yang = None DOCUMENTATION = ''' --- module: napalm_diff_yang author: "David Barroso (@dbarrosop)" version_added: "0.0" short_description: "Return diff of two YANG objects" description: - "Create two YANG objects from dictionaries and runs mehtod" - "napalm_yang.utils.diff on them." requirements: - napalm-yang options: models: description: - List of models to parse required: True first: description: - Dictionary with the data to load into the first YANG object required: True second: description: - Dictionary with the data to load into the second YANG object required: True ''' EXAMPLES = ''' napalm_diff_yang: first: "{{ candidate.yang_model }}" second: "{{ running_config.yang_model }}" models: - models.openconfig_interfaces register: diff ''' RETURN = ''' diff: description: "Same output as the method napalm_yang.utils.diff" returned: always type: dict sample: { "interfaces": { "interface": { "both": { "Port-Channel1": { "config": { "description": { "first": "blah", "second": "Asadasd" } } } } } } ''' def get_root_object(models): """ Read list of models and returns a Root object with the proper models added. """ root = napalm_yang.base.Root() for model in models: current = napalm_yang for p in model.split("."): current = getattr(current, p) root.add_model(current) return root def main(): module = AnsibleModule( argument_spec=dict( models=dict(type="list", required=True), first=dict(type='dict', required=True), second=dict(type='dict', required=True), ), supports_check_mode=True ) if not napalm_yang: module.fail_json(msg="the python module napalm-yang is required") first = get_root_object(module.params["models"]) first.load_dict(module.params["first"]) second = get_root_object(module.params["models"]) second.load_dict(module.params["second"]) diff = napalm_yang.utils.diff(first, second) module.exit_json(yang_diff=diff) if __name__ == '__main__': main()
fe4155275d3a9240634ebe2b2de50705201231ac
a140a7ca1bc5f0af773cb3d22081b4bb75138cfa
/234_palindromLinkedList.py
b1b3a195574aefe83cc26bf49500c32c48a8a3b2
[]
no_license
YeahHuang/Leetcode
d02bc99d2e890ed0e829515b6f85c4ca6394a1a1
78d36486ad4ec2bfb88fd35a5fd7fd4f0003ee97
refs/heads/master
2021-07-14T01:53:06.701325
2020-06-22T03:01:46
2020-06-22T03:01:46
166,235,118
1
0
null
null
null
null
UTF-8
Python
false
false
585
py
class Solution: def isPalindrome(self, head: ListNode) -> bool: rev = None slow = fast = head while fast and fast.next: fast = fast.next.next rev, rev.next, slow = slow, rev, slow.next if fast: # fast is at the end, move slow one step further for comparison(cross middle one) slow = slow.next while rev and rev.val == slow.val: slow = slow.next rev = rev.next # if equivalent then rev become None, return True; otherwise return False return not rev
f50a62262f8a5fd229e3a174e46c8c9fedf3c950
cef09d1e6d5e7cd335387d0829211ffb0da18f48
/tests2/tests/wedge100/test_psumuxmon.py
73784296b42bf03dd786c25cca01bc61c37967ce
[]
no_license
theopolis/openbmc
a1ef2e3335efd19bf750117d79c1477d47948ff3
1784748ba29ee89bccacb2019a0bb86bd181c651
refs/heads/master
2020-12-14T07:20:40.273681
2019-04-20T05:25:17
2019-04-20T05:25:17
43,323,632
0
1
null
2015-09-28T19:56:24
2015-09-28T19:56:24
null
UTF-8
Python
false
false
2,143
py
#!/usr/bin/env python # # Copyright 2018-present Facebook. All Rights Reserved. # # This program file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # You should have received a copy of the GNU General Public License # along with this program in a file named COPYING; if not, write to the # Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, # Boston, MA 02110-1301 USA # import unittest import os import re from utils.shell_util import run_shell_cmd from utils.cit_logger import Logger class PsumuxmonTest(unittest.TestCase): def setUp(self): Logger.start(name=__name__) def tearDown(self): Logger.info("Finished logging for {}".format(self._testMethodName)) pass def test_psumuxmon_runit_sv_status(self): cmd = ["/usr/bin/sv status psumuxmon"] data = run_shell_cmd(cmd) self.assertIn("run", data, "psumuxmon process not running") def get_ltc_hwmon_path(self, path): pcard_vin = None result = re.split("hwmon", path) if os.path.isdir(result[0]): construct_hwmon_path = result[0] + "hwmon" x = None for x in os.listdir(construct_hwmon_path): if x.startswith('hwmon'): construct_hwmon_path = construct_hwmon_path + "/" + x + "/" + result[2].split("/")[1] return construct_hwmon_path return None def test_psumuxmon_ltc_sensor_path_exists(self): # Based on lab device deployment, sensor data might not be accessible. # Verify that path exists cmd = "/sys/bus/i2c/devices/7-006f/hwmon/hwmon*/in1_input" self.assertTrue(os.path.exists(self.get_ltc_hwmon_path(cmd)), "psumuxmon LTC sensor path accessible")
34bb012d42ec90f93b307b447f5c5cd8c6a26646
c7a1c1ae40e9d95dfb92251dcfbf3c5010e6ba81
/sensehat/pi_surveillance_py.py
260dc24e20057985e9e1a46675745b948e2da882
[]
no_license
pranavlathigara/Raspberry-Pi-DIY-Projects
efd18e2e5b9b8369bb1a5f5418782480cf9bc729
0c14c316898d4d06015912ac4a8cb7b71a3980c0
refs/heads/master
2021-04-06T09:14:28.088223
2018-02-19T00:15:22
2018-02-19T00:15:22
124,649,553
1
2
null
2018-03-10T11:30:59
2018-03-10T11:30:59
null
UTF-8
Python
false
false
3,605
py
from pyimagesearch.tempimage import TempImage import dropbox as dbx from picamera.array import PiRGBArray from picamera import PiCamera import warnings import datetime import imutils import json import time import cv2 # filter warnings, load the configuration and initialize the Dropbox # client warnings.filterwarnings("ignore") client = None # Put your token here: db = dbx.Dropbox("YOUR_TOKEN_HERE") # initialize the camera and grab a reference to the raw camera capture camera = PiCamera() camera.resolution = (640,480) camera.framerate = 16 rawCapture = PiRGBArray(camera, size=(640,480)) # allow the camera to warmup, then initialize the average frame, last # uploaded timestamp, and frame motion counter print "[INFO] warming up..." time.sleep(2.5) avg = None lastUploaded = datetime.datetime.now() motionCounter = 0 # capture frames from the camera for f in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True): # grab the raw NumPy array representing the image and initialize # the timestamp and occupied/unoccupied text frame = f.array timestamp = datetime.datetime.now() # resize the frame, convert it to grayscale, and blur it frame = imutils.resize(frame, width=500) gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) gray = cv2.GaussianBlur(gray, (21, 21), 0) # if the average frame is None, initialize it if avg is None: print "[INFO] starting background model..." avg = gray.copy().astype("float") rawCapture.truncate(0) continue # accumulate the weighted average between the current frame and # previous frames, then compute the difference between the current # frame and running average cv2.accumulateWeighted(gray, avg, 0.5) frameDelta = cv2.absdiff(gray, cv2.convertScaleAbs(avg)) # threshold the delta image, dilate the thresholded image to fill # in holes, then find contours on thresholded image thresh = cv2.threshold(frameDelta, 5, 255, cv2.THRESH_BINARY)[1] thresh = cv2.dilate(thresh, None, iterations=2) (cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # loop over the contours for c in cnts: # if the contour is too small, ignore it if cv2.contourArea(c) < 5000: continue # compute the bounding box for the contour, draw it on the frame, # and update the text (x, y, w, h) = cv2.boundingRect(c) cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2) text = "!" # draw the text and timestamp on the frame ts = timestamp.strftime("%A %d %B %Y %I:%M:%S%p") cv2.putText(frame, "{}".format(ts), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) # check to see if the room is occupied if text == "!": # check to see if enough time has passed between uploads if (timestamp - lastUploaded).seconds >= 3.0: # increment the motion counter motionCounter += 1 # check to see if the number of frames with consistent motion is # high enough if motionCounter >= 8: # write the image to temporary file t = TempImage() cv2.imwrite(t.path, frame) print "[UPLOAD] {}".format(ts) path = "{base_path}/{timestamp}.jpg".format(base_path="/", timestamp=ts) client.put_file(open(t.path, "rb").read(), path) t.cleanup() # update the last uploaded timestamp and reset the motion # counter lastUploaded = timestamp motionCounter = 0 # otherwise, the room is not occupied else: motionCounter = 0 # clear the stream in preparation for the next frame rawCapture.truncate(0)
a9edbeaf88bade93d05aedb3c436f9b864421475
5139e63dfbc2b01a10b20bdd283005bfb64bc3e1
/api/api.py
998d101f9f00203f1225a882ce89d54334c0ff78
[]
no_license
Merevoli-DatLuu/SGUInfo
121098a67128d3ede72ce9f9f51955637c22fb9c
501d676ad1e02f00573cc879fbba6c44ab1b0ffb
refs/heads/master
2023-05-26T08:50:41.899513
2021-01-11T16:11:45
2021-01-11T16:11:45
281,350,587
4
1
null
2023-05-22T23:38:11
2020-07-21T09:13:00
Python
UTF-8
Python
false
false
1,848
py
from flask import Flask, render_template, request, jsonify import sys sys.path.append("..") from sguinfo import sguinfo app = Flask(__name__) @app.route("/api/v1/students", methods=['GET']) def get_student_list(): sgu = sguinfo() if "from_id" in request.args and "to_id" in request.args and "id_list" not in request.args: from_id = request.args['from_id'] to_id = request.args['to_id'] if sgu.validate_range_mssv(from_id, to_id): data = [] for d in sgu.find_range_info(from_id, to_id): data.append(sgu.change_to_eng_info(d)) return jsonify(data) else: return jsonify({}) elif "from_id" not in request.args and "to_id" not in request.args and "id_list" in request.args: list_id = request.args['id_list'].split(",") data = [] for id in list_id: if sgu.validate_mssv(id): data.append(sgu.change_to_eng_info(sgu.find_info(id))) return jsonify(data) else: return jsonify({}) @app.route("/api/v1/students/<id>", methods = ['GET']) def get_a_student(id): sgu = sguinfo() if sgu.validate_mssv(id): return jsonify(sgu.change_to_eng_info(sgu.find_info(id))) else: return jsonify({}) @app.route("/api/v1/students/<id>/<param>", methods = ['GET']) def get_a_student_with_param(id, param): sgu = sguinfo() if sgu.validate_mssv(id): data = sgu.change_to_eng_info(sgu.find_info(id)) if param in data.keys(): return jsonify(data[param]) else: return jsonify({}) else: return jsonify({}) @app.route("/test") def tessst(): return request.args if __name__ == '__main__': app.config['JSON_AS_ASCII'] = False app.config['JSON_SORT_KEYS'] = False app.run(debug = True)
33f504c5e1c391f90e11226e1c15be67091ee79f
0124528676ee3bbaec60df5d6950b408e6da37c8
/Projects/QTPy/circuitpython-community-bundle-7.x-mpy-20220601/examples/animation/main.py
ee50a4f811bdd29fdf5d3d51de532f353ba0b5a1
[ "LicenseRef-scancode-warranty-disclaimer" ]
no_license
land-boards/lb-boards
8127658dc537dcfde0bb59a5018ab75c3f0087f6
eeb98cc2003dac1924845d949f6f5bd387376568
refs/heads/master
2023-06-07T15:44:46.110742
2023-06-02T22:53:24
2023-06-02T22:53:24
4,847,305
10
12
null
null
null
null
UTF-8
Python
false
false
1,421
py
import board import dotstar_featherwing wing = dotstar_featherwing.DotstarFeatherwing(board.D13, board.D11) xmas_colors = {'w': ( 32, 32, 32), 'W': (255, 255, 255), 'G': ( 0, 32, 0), 'y': ( 32, 32, 0), 'Y': (255, 255, 0)} xmas_animation = [["..y.w......w", "..G.....w...", "..G..w....w.", ".GGG...w....", "GGGGG.......", "wwwwwwwwwwww"], ["..y.........", "..G.W......w", "..G.....w...", ".GGG.w....W.", "GGGGG..w....", "wwwwwwwwwwww"], ["..Y....W....", "..G.........", "..G.w......w", ".GGG....w...", "GGGGGw....W.", "wwwwwwwwwwww"], ["..y..w....w.", "..G....W....", "..G.........", ".GGGW......w", "GGGGG...w...", "wwwwwwwwwwww"], ["..Y.....w...", "..G..w....W.", "..G....w....", ".GGG........", "GGGGG......W", "wwwwwwwwwwww"]] wing.display_animation(xmas_animation, xmas_colors, 10000, 0.05)
4ffcafc58e0e171a78a295d77ad213c80a5bb0e5
5d2404f62e58d5fd1f6112744ff32c3166183ac7
/Exercicios/ex036.py
6fc9f4561d2c0ecd7c5e81514824facf4042177e
[]
no_license
Leownhart/My_Course_of_python
236cfc84d841c5883e5aa1cc0c0730e7a9a83c40
5abb21f8cdad91ab54247a007d40bf9ecd2cff8c
refs/heads/master
2020-08-28T15:04:33.628086
2020-08-24T19:25:39
2020-08-24T19:25:39
217,733,877
1
0
null
null
null
null
UTF-8
Python
false
false
536
py
valorcasa = float(input('Informe o valor da imovel: R$ ')) salcom = float(input('Informe o sálario do comprador: R$ ')) anos = int(input('Informe o tempo de financiamento em anos: ')) valpresta = (valorcasa / anos) / 12 # casa / (anos / * 12) porcent = salcom * 30.0 / 100 print('Para pagar uma casa de R${:.2f} em {} anos a ' 'prestação será de R${:.2f} mensais'.format(valorcasa, anos, valpresta)) if valpresta > porcent: print('\033[31mEmpréstimo NEGADO!\033[m') else: print('\033[32mEmpréstimo APROVADO!\033[m')
9f6ac6ecefb20871f98905fe6225b28a48eaf51d
f0d713996eb095bcdc701f3fab0a8110b8541cbb
/9szPm9Mg5D2vJyTvf_14.py
c4b1eb7103a2e128742d7e447be9653582eade63
[]
no_license
daniel-reich/turbo-robot
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
a7a25c63097674c0a81675eed7e6b763785f1c41
refs/heads/main
2023-03-26T01:55:14.210264
2021-03-23T16:08:01
2021-03-23T16:08:01
350,773,815
0
0
null
null
null
null
UTF-8
Python
false
false
681
py
""" Write a function that takes three arguments `(x, y, z)` and returns a list containing `x` sublists (e.g. `[[], [], []]`), each containing `y` number of item `z`. * `x` Number of sublists contained within the main list. * `y` Number of items contained within each sublist. * `z` Item contained within each sublist. ### Examples matrix(3, 2, 3) ➞ [[3, 3], [3, 3], [3, 3]] matrix(2, 1, "edabit") ➞ [["edabit"], ["edabit"]] matrix(3, 2, 0) ➞ [[0, 0], [0, 0], [0, 0]] ### Notes * The first two arguments will always be integers. * The third argument is either a string or an integer. """ def matrix(x, y, z): return [[z] * y] * x
d5cd7cfe45515f1a0899cf0344254ae70d9a69c6
8ef8e6818c977c26d937d09b46be0d748022ea09
/cv/3d_detection/pointnet2/pytorch/mmdetection3d/mmdet/version.py
0e03a9d35749aef5d396e532d5ab8c5a0bae223f
[ "Apache-2.0" ]
permissive
Deep-Spark/DeepSparkHub
eb5996607e63ccd2c706789f64b3cc0070e7f8ef
9d643e88946fc4a24f2d4d073c08b05ea693f4c5
refs/heads/master
2023-09-01T11:26:49.648759
2023-08-25T01:50:18
2023-08-25T01:50:18
534,133,249
7
6
Apache-2.0
2023-03-28T02:54:59
2022-09-08T09:07:01
Python
UTF-8
Python
false
false
529
py
# Copyright (c) OpenMMLab. All rights reserved. __version__ = '2.24.0' short_version = __version__ def parse_version_info(version_str): version_info = [] for x in version_str.split('.'): if x.isdigit(): version_info.append(int(x)) elif x.find('rc') != -1: patch_version = x.split('rc') version_info.append(int(patch_version[0])) version_info.append(f'rc{patch_version[1]}') return tuple(version_info) version_info = parse_version_info(__version__)
a8694b72dc9f4ac269b718d8c743574a18cfc288
1fc45a47f0e540941c87b04616f3b4019da9f9a0
/tests/sentry/api/endpoints/test_commit_filechange.py
49eefdcd009d8d4020c56be8b1609185bc95f982
[ "BSD-2-Clause" ]
permissive
seukjung/sentry-8.15.0
febc11864a74a68ddb97b146cc1d2438ef019241
fd3cab65c64fcbc32817885fa44df65534844793
refs/heads/master
2022-10-28T06:39:17.063333
2018-01-17T12:31:55
2018-01-17T12:31:55
117,833,103
0
0
BSD-3-Clause
2022-10-05T18:09:54
2018-01-17T12:28:13
Python
UTF-8
Python
false
false
2,225
py
from __future__ import absolute_import from django.core.urlresolvers import reverse from sentry.models import Commit, CommitFileChange, Release, ReleaseCommit, Repository from sentry.testutils import APITestCase class CommitFileChangeTest(APITestCase): def test_simple(self): project = self.create_project( name='foo', ) release = Release.objects.create( organization_id=project.organization_id, version='1', ) release.add_project(project) repo = Repository.objects.create( organization_id=project.organization_id, name=project.name, ) commit = Commit.objects.create( organization_id=project.organization_id, repository_id=repo.id, key='a' * 40, ) commit2 = Commit.objects.create( organization_id=project.organization_id, repository_id=repo.id, key='b' * 40, ) ReleaseCommit.objects.create( organization_id=project.organization_id, release=release, commit=commit, order=1, ) ReleaseCommit.objects.create( organization_id=project.organization_id, release=release, commit=commit2, order=0, ) CommitFileChange.objects.create( organization_id=project.organization_id, commit=commit, filename='.gitignore', type='M' ) CommitFileChange.objects.create( organization_id=project.organization_id, commit=commit2, filename='/static/js/widget.js', type='A' ) url = reverse('sentry-api-0-release-commitfilechange', kwargs={ 'organization_slug': project.organization.slug, 'version': release.version, }) self.login_as(user=self.user) response = self.client.get(url) assert response.status_code == 200, response.content assert len(response.data) == 2 assert response.data[0]['filename'] == '.gitignore' assert response.data[1]['filename'] == '/static/js/widget.js'
63e3fa0e7d86c5133e69ba329a533e4edfdc34c1
0d4ec25fb2819de88a801452f176500ccc269724
/sub_two_binaries.py
d4f6682fa6bf8cad577240ddabce0a9eaa7818a1
[]
no_license
zopepy/leetcode
7f4213764a6a079f58402892bd0ede0514e06fcf
3bfee704adb1d94efc8e531b732cf06c4f8aef0f
refs/heads/master
2022-01-09T16:13:09.399620
2019-05-29T20:00:11
2019-05-29T20:00:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
596
py
class Solution: def addBinary(self, a, b): """ :type a: str :type b: str :rtype: str """ s = "" a,b = a[::-1], b[::-1] la,lb = len(a), len(b) l = max(la, lb) i = 0 carry = 0 while i<l or carry==1: b1 = int(a[i] if i<la else 0) b2 = int(b[i] if i<lb else 0) curbit = b1^b2^carry carry = (b1&b2)|(carry&(b1|b2)) s += str(curbit) # print(curbit, carry) i+=1 return s[::-1] a,b="000", "000000" print(Solution().addBinary(a,b))
cc5695f1470140f25b2cb77800818102059fa4d6
f0d713996eb095bcdc701f3fab0a8110b8541cbb
/kdhgEC2ECXAfoXWQP_1.py
18cfc39baa91a8ce324e7628429be8a4c0702226
[]
no_license
daniel-reich/turbo-robot
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
a7a25c63097674c0a81675eed7e6b763785f1c41
refs/heads/main
2023-03-26T01:55:14.210264
2021-03-23T16:08:01
2021-03-23T16:08:01
350,773,815
0
0
null
null
null
null
UTF-8
Python
false
false
1,119
py
""" In this challenge, you have to obtain a sentence from the elements of a given matrix. In the matrix, each word of the sentence follows a columnar order from the top to the bottom, instead of the usual left-to-right order: it's time for **transposition**! Given a matrix `mtx`, implement a function that returns the complete sentence as a string, with the words separated by a space between them. ### Examples transpose_matrix([ ["Enter"], ["the"], ["Matrix!"] ]) ➞ "Enter the Matrix!" transpose_matrix([ ["The", "are"], ["columns", "rows."] ]) ➞ "The columns are rows." transpose_matrix([ ["You", "the"], ["must", "table"], ["transpose", "order."] ]) ➞ "You must transpose the table order." ### Notes * All given matrices are regular, as to say that each column has the same length. * Punctuation is already given, you just have to add the spaces in the returned string. """ def transpose_matrix(mtx): result = "" for i in range(len(mtx[0])): for j in mtx: result += j[i]+" " return result[:-1]
3da334d08f98f8cf06aa4794ea35ab1bdecc8c8a
8c8159691382ab8759ec637a97ef107ba898ad4c
/Recursive/removeInvalidParentheses.py
44953cd000adfcd6e1707c07b5da6c12c0038303
[]
no_license
windhaunting/Coding_practices
3c89cddaeb13bfe36eab7ff664d6e16d0e86d46f
8375988ac391376159438877b6729bb94340106b
refs/heads/master
2021-02-05T21:40:07.858445
2020-02-28T19:25:29
2020-02-28T19:25:29
243,836,816
0
1
null
null
null
null
UTF-8
Python
false
false
690
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue Jan 16 16:43:09 2018 @author: fubao """ #301. Remove Invalid Parentheses ''' Remove the minimum number of invalid parentheses in order to make the input string valid. Return all possible results. Note: The input string may contain letters other than the parentheses ( and ). Examples: "()())()" -> ["()()()", "(())()"] "(a)())()" -> ["(a)()()", "(a())()"] ")(" -> [""] ''' #reference: http://zxi.mytechroad.com/blog/searching/leetcode-301-remove-invalid-parentheses/ class Solution(object): def removeInvalidParentheses(self, s): """ :type s: str :rtype: List[str] """
6017ff5d62258b8bdc613d2deb7b6f19177ac641
d01fa1b6668c66236405b799e39e529d1492af7c
/{{cookiecutter.project_slug}}/pages/migrations/0016_sitebranding.py
9068f89e8055a2b76d16b1f85251befee436df7b
[ "MIT", "BSD-3-Clause", "LicenseRef-scancode-free-unknown", "Apache-2.0" ]
permissive
chrisdev/wagtail-cookiecutter-foundation
426ffd974aa08ab10e4b0e44d5003476c597f2e4
e7d56ee01eb5976588129d7bd4d5fc6dab2d794a
refs/heads/master
2023-08-31T06:05:43.999253
2022-03-31T18:44:37
2022-03-31T18:44:37
33,870,540
189
72
MIT
2023-09-14T03:30:34
2015-04-13T13:36:50
Python
UTF-8
Python
false
false
1,105
py
# -*- coding: utf-8 -*- # Generated by Django 1.11.6 on 2017-10-10 14:02 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('wagtailimages', '0019_delete_filter'), ('wagtailcore', '0040_page_draft_title'), ('pages', '0015_advert_button_text'), ] operations = [ migrations.CreateModel( name='SiteBranding', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('site_name', models.CharField(blank=True, max_length=250, null=True)), ('logo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')), ('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')), ], options={ 'abstract': False, }, ), ]
0b4285bff2df5cd19b3e3e2f31c78b854999b8f5
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/65/usersdata/185/34920/submittedfiles/investimento.py
c02de528254c4d919d01652089a4c2aa1ade2813
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
515
py
# -*- coding: utf-8 -*- from __future__ import division i0=float(input('digite o valor do investimesnto:')) taxa=float(input('digite o valor da taxa:')) i1=(i0+(i0*taxa)) i2=(i1+(i1*taxa)) i3=(i2+(i2*taxa)) i4=(i3+(i3*taxa)) i5=(i4+(i4*taxa)) i6=(i5+(i5*taxa)) i7=(i6+(i6*taxa)) i8=(i7+(i7*taxa)) i9=(i8+(i8*taxa)) i10=(i9+(i9*taxa)) print('%.2f' %i1) print('%.2f' %i2) print('%.2f' %i3) print('%.2f' %i4) print('%.2f' %i5) print('%.2f' %i6) print('%.2f' %i7) print('%.2f' %i8) print('%.2f' %i9) print('%.2f' %i10)
7e29e532d2f1285cd50e39b2cb2212b658e5b9a8
149db911cd5b9f404e5d74fd6c8ed047482d2c22
/backend/menu/migrations/0001_initial.py
2c07fd16d8ed613c8286821c487d80336fef03b4
[]
no_license
crowdbotics-apps/bigbitesgrill-22907
45814458930ad7aed64a1f4941aabd930f1f2587
6cd1b7b663de21c7587cdbce1612c4807e2cc5f6
refs/heads/master
2023-01-14T05:10:18.129338
2020-11-23T03:27:17
2020-11-23T03:27:17
315,189,727
0
0
null
null
null
null
UTF-8
Python
false
false
3,144
py
# Generated by Django 2.2.17 on 2020-11-23 03:26 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('delivery_user_profile', '0001_initial'), ] operations = [ migrations.CreateModel( name='Category', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('description', models.TextField()), ('image', models.URLField()), ('icon', models.URLField()), ], ), migrations.CreateModel( name='Country', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('description', models.TextField()), ('prefix', models.CharField(max_length=8)), ('flag', models.URLField()), ], ), migrations.CreateModel( name='Item', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('description', models.TextField()), ('image', models.URLField()), ('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='item_category', to='menu.Category')), ], ), migrations.CreateModel( name='Review', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('rating', models.FloatField()), ('review_text', models.TextField()), ('timestamp_created', models.DateTimeField(auto_now_add=True)), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='review_item', to='menu.Item')), ('profile', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='review_profile', to='delivery_user_profile.Profile')), ], ), migrations.CreateModel( name='ItemVariant', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=255)), ('description', models.TextField()), ('price', models.FloatField()), ('image', models.URLField()), ('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='itemvariant_country', to='menu.Country')), ('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='itemvariant_item', to='menu.Item')), ], ), ]
512cccdff042b753e66c88811c3fe1daaa5ce10b
d488f052805a87b5c4b124ca93494bc9b78620f7
/google-cloud-sdk/lib/googlecloudsdk/command_lib/accesscontextmanager/zones.py
7769f86c280257e290b19cd283c994d3d59183d5
[ "MIT", "LicenseRef-scancode-unknown-license-reference", "Apache-2.0" ]
permissive
PacktPublishing/DevOps-Fundamentals
5ce1fc938db66b420691aa8106ecfb3f9ceb1ace
60597e831e08325c7e51e8557591917f7c417275
refs/heads/master
2023-02-02T04:48:15.346907
2023-01-30T08:33:35
2023-01-30T08:33:35
131,293,311
13
19
null
null
null
null
UTF-8
Python
false
false
7,142
py
# Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Command line processing utilities for access zones.""" from googlecloudsdk.api_lib.accesscontextmanager import util from googlecloudsdk.calliope.concepts import concepts from googlecloudsdk.command_lib.accesscontextmanager import common from googlecloudsdk.command_lib.accesscontextmanager import levels from googlecloudsdk.command_lib.accesscontextmanager import policies from googlecloudsdk.command_lib.util.apis import arg_utils from googlecloudsdk.command_lib.util.args import repeated from googlecloudsdk.command_lib.util.concepts import concept_parsers from googlecloudsdk.core import resources REGISTRY = resources.REGISTRY def AddAccessLevels(ref, args, req): if args.IsSpecified('access_levels'): access_levels = [] for access_level in args.access_levels: level_ref = resources.REGISTRY.Create( 'accesscontextmanager.accessPolicies.accessLevels', accessLevelsId=access_level, **ref.Parent().AsDict()) access_levels.append(level_ref.RelativeName()) req.accessZone.accessLevels = access_levels return req def AddImplicitServiceWildcard(ref, args, req): """Add an implicit wildcard for services if they are modified. If either restricted services or unrestricted services is given, the other must also be provided as a wildcard (`*`). If neither is given, this is a no-op. Args: ref: resources.Resource, the (unused) resource args: argparse namespace, the parse arguments req: AccesscontextmanagerAccessPoliciesAccessZonesCreateRequest Returns: The modified request. """ del ref # Unused in AddImplicitServiceWildcard if args.IsSpecified('restricted_services'): req.accessZone.unrestrictedServices = ['*'] elif args.IsSpecified('unrestricted_services'): req.accessZone.restrictedServices = ['*'] return req def _GetAttributeConfig(): return concepts.ResourceParameterAttributeConfig( name='zone', help_text='The ID of the access zone.' ) def _GetResourceSpec(): return concepts.ResourceSpec( 'accesscontextmanager.accessPolicies.accessZones', resource_name='zone', accessPoliciesId=policies.GetAttributeConfig(), accessZonesId=_GetAttributeConfig()) def AddResourceArg(parser, verb): """Add a resource argument for an access zone. NOTE: Must be used only if it's the only resource arg in the command. Args: parser: the parser for the command. verb: str, the verb to describe the resource, such as 'to update'. """ concept_parsers.ConceptParser.ForResource( 'zone', _GetResourceSpec(), 'The access zone {}.'.format(verb), required=True).AddToParser(parser) def GetTypeEnumMapper(): return arg_utils.ChoiceEnumMapper( '--type', util.GetMessages().AccessZone.ZoneTypeValueValuesEnum, custom_mappings={ 'ZONE_TYPE_REGULAR': 'regular', 'ZONE_TYPE_BRIDGE': 'bridge' }, required=False, help_str="""\ Type of the zone. A *regular* zone allows resources within this access zone to import and export data amongst themselves. A project may belong to at most one regular access zone. A *bridge* access zone allows resources in different regular access zones to import and export data between each other. A project may belong to multiple bridge access zones (only if it also belongs to a regular access zone). Both restricted and unrestricted service lists, as well as access level lists, must be empty. """, ) def AddZoneUpdateArgs(parser): """Add args for zones update command.""" args = [ common.GetDescriptionArg('access zone'), common.GetTitleArg('access zone'), GetTypeEnumMapper().choice_arg ] for arg in args: arg.AddToParser(parser) _AddResources(parser) _AddUnrestrictedServices(parser) _AddRestrictedServices(parser) _AddLevelsUpdate(parser) def _AddResources(parser): repeated.AddPrimitiveArgs( parser, 'zone', 'resources', 'resources', additional_help=('Resources must be projects, in the form ' '`project/<projectnumber>`.')) def ParseResources(args, zone_result): return repeated.ParsePrimitiveArgs( args, 'resources', zone_result.GetAttrThunk('resources')) def _AddUnrestrictedServices(parser): repeated.AddPrimitiveArgs( parser, 'zone', 'unrestricted-services', 'unrestricted services', metavar='SERVICE', additional_help=( 'The zone boundary DOES NOT apply to these services (for example, ' '`storage.googleapis.com`). A wildcard (```*```) may be given to ' 'denote all services.\n\n' 'If restricted services are set, unrestricted services must be a ' 'wildcard.')) def ParseUnrestrictedServices(args, zone_result): return repeated.ParsePrimitiveArgs( args, 'unrestricted_services', zone_result.GetAttrThunk('unrestrictedServices')) def _AddRestrictedServices(parser): repeated.AddPrimitiveArgs( parser, 'zone', 'restricted-services', 'restricted services', metavar='SERVICE', additional_help=( 'The zone boundary DOES apply to these services (for example, ' '`storage.googleapis.com`). A wildcard (```*```) may be given to ' 'denote all services.\n\n' 'If unrestricted services are set, restricted services must be a ' 'wildcard.')) def ParseRestrictedServices(args, zone_result): return repeated.ParsePrimitiveArgs( args, 'restricted_services', zone_result.GetAttrThunk('restrictedServices')) def _AddLevelsUpdate(parser): repeated.AddPrimitiveArgs( parser, 'zone', 'access-levels', 'access levels', metavar='LEVEL', additional_help=( 'An intra-zone request must satisfy these access levels (for ' 'example, `MY_LEVEL`; must be in the same access policy as this ' 'zone) to be allowed.')) def _GetLevelIdFromLevelName(level_name): return REGISTRY.Parse(level_name, collection=levels.COLLECTION).accessLevelsId def ParseLevels(args, zone_result, policy_id): level_ids = repeated.ParsePrimitiveArgs( args, 'access_levels', zone_result.GetAttrThunk('accessLevels', transform=_GetLevelIdFromLevelName)) if level_ids is None: return None return [REGISTRY.Create(levels.COLLECTION, accessPoliciesId=policy_id, accessLevelsId=l) for l in level_ids]
46420d6d79533b4847126b91595955ff96211153
0a46b027e8e610b8784cb35dbad8dd07914573a8
/scripts/venv/lib/python2.7/site-packages/cogent/maths/stats/information_criteria.py
ead8f6417df1dd2fa2049a479c7f9aa4b4de1829
[ "MIT" ]
permissive
sauloal/cnidaria
bb492fb90a0948751789938d9ec64677052073c3
fe6f8c8dfed86d39c80f2804a753c05bb2e485b4
refs/heads/master
2021-01-17T13:43:17.307182
2016-10-05T14:14:46
2016-10-05T14:14:46
33,726,643
3
0
null
null
null
null
UTF-8
Python
false
false
1,145
py
from __future__ import division import numpy __author__ = "Gavin Huttley" __copyright__ = "Copyright 2007-2012, The Cogent Project" __credits__ = ["Gavin Huttley"] __license__ = "GPL" __version__ = "1.5.3" __maintainer__ = "Gavin Huttley" __email__ = "[email protected]" __status__ = "Production" def aic(lnL, nfp, sample_size=None): """returns Aikake Information Criterion Arguments: - lnL: the maximum log-likelihood of a model - nfp: the number of free parameters in the model - sample_size: if provided, the second order AIC is returned """ if sample_size is None: correction = 1 else: assert sample_size > 0, "Invalid sample_size %s" % sample_size correction = sample_size / (sample_size - nfp - 1) return -2* lnL + 2 * nfp * correction def bic(lnL, nfp, sample_size): """returns Bayesian Information Criterion Arguments: - lnL: the maximum log-likelihood of a model - nfp: the number of free parameters in the model - sample_size: size of the sample """ return -2* lnL + nfp * numpy.log(sample_size)
b254df743e617dfd1390743f0e04bbe4d12cb542
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03227/s922156594.py
3367f99a10180d83d75fbea989fb7e0b5a810cdd
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
import sys def input(): return sys.stdin.readline().rstrip() def main(): s=input() if len(s)==2:print(s) else:print(s[::-1]) if __name__=='__main__': main()
ae180e8cf37b46499f5232dd71f2789e8e56b342
a16691abb472e2d57cf417cc671e7574f97aaf23
/src/13_millas.py
785d28eea60a685a38043e0f43f552b1e14265d4
[ "MIT" ]
permissive
agomusa/oop-algorithms-python-platzi
fbb16208b68e822c6232ffb944c414c176004ac1
56e5f636c9243fbd81148a6e6e8405034f362c70
refs/heads/main
2023-06-19T21:38:19.925134
2021-07-07T20:49:09
2021-07-07T20:49:09
null
0
0
null
null
null
null
UTF-8
Python
false
false
733
py
class Millas: def __init__(self): self._distancia = 0 # Función para obtener el valor de _distancia def obtener_distancia(self): print("Llamada al método getter...") return self._distancia # Función para definir el valor de _distancia def definir_distancia(self, recorrido): print("Llamada al método setter...") self._distancia = recorrido # Función para eliminar el atributo _distancia def eliminar_distancia(self): del self._distancia distancia = property(obtener_distancia, definir_distancia, eliminar_distancia) if __name__ == '__main__': avion = Millas() avion.distancia = int(input('¿Cuantas millas vas a viajar? ')) print('Vas a viajar '+str(avion.distancia*1.609344)+' Kilometros')
5c713e71b6d36c733a3c7071ffaec82c80094caa
f8826a479f2b9d2f28993ceea7a7d0e3847aaf3d
/apps/requestlogger/models.py
9fa6f370798fdbd62b4484b8acf1d332f55c10a0
[]
no_license
icomms/wqmanager
bec6792ada11af0ff55dc54fd9b9ba49242313b7
f683b363443e1c0be150656fd165e07a75693f55
refs/heads/master
2021-01-20T11:59:42.299351
2012-02-20T15:28:40
2012-02-20T15:28:40
2,154,449
1
1
null
null
null
null
UTF-8
Python
false
false
2,869
py
from django.db import models from django.contrib.auth.models import User from domain.models import Domain import os import logging import settings # this is a really bad place for this class to live, but reference it here for now from scheduler.fields import PickledObjectField from datetime import datetime from django.utils.translation import ugettext_lazy as _ from django.core.urlresolvers import reverse REQUEST_TYPES = ( ('GET', 'Get'), ('POST', 'Post'), ('PUT', 'Put'), ) class RequestLog(models.Model): '''Keeps track of incoming requests''' # Lots of stuff here is replicated in Submission. # They should ultimately point here, but that's a data migration # problem. method = models.CharField(max_length=4, choices=REQUEST_TYPES) url = models.CharField(max_length=200) time = models.DateTimeField(_('Request Time'), default = datetime.now) ip = models.IPAddressField(_('Submitting IP Address'), null=True, blank=True) is_secure = models.BooleanField(default=False) # The logged in user user = models.ForeignKey(User, null=True, blank=True) # Some pickled fields for having access to the raw info headers = PickledObjectField(_('Request Headers')) parameters = PickledObjectField(_('Request Parameters')) def __unicode__(self): return "%s to %s at %s from %s" % (self.method, self.url, self.time, self.ip) @classmethod def from_request(cls, request): '''Creates an instance of a RequestLog from a standard django HttpRequest object. ''' log = RequestLog() log.method = request.method log.url = request.build_absolute_uri(request.path) log.time = datetime.now() log.is_secure = request.is_secure() if request.META.has_key('REMOTE_ADDR') and request.META['REMOTE_ADDR']: log.ip = request.META['REMOTE_ADDR'] elif request.META.has_key('REMOTE_HOST') and request.META['REMOTE_HOST']: log.ip = request.META['REMOTE_HOST'] # if request.user != User, then user is anonymous if isinstance(request.user, User): log.user = request.user def _convert_to_dict(obj): # converts a django-querydict to a true python dict # and converts any values to strings. This could result # in a loss of information to_return = {} for key, value in obj.items(): to_return[key] = str(value) return to_return log.headers = _convert_to_dict(request.META) if request.method == "GET": log.parameters = _convert_to_dict(request.GET) else: log.parameters = _convert_to_dict(request.POST) return log
ba11fe85c801d07e0e7c25b58d3aee09665d8952
77a7508c3a647711191b924959db80fb6d2bd146
/src/gamesbyexample/worms.py
2bea231d0dbdaeacc62cad083fcc56fafc920fb4
[ "MIT" ]
permissive
surlydev/PythonStdioGames
ff7edb4c8c57a5eb6e2036e2b6ebc7e23ec994e0
d54c2509c12a5b1858eda275fd07d0edd456f23f
refs/heads/master
2021-05-22T21:01:15.529159
2020-03-26T07:34:10
2020-03-26T07:34:10
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,750
py
"""Worm animation, by Al Sweigart [email protected] A screensaver of multicolor worms moving around. NOTE: Do not resize the terminal window while this program is running. Tags: large, artistic, simulation, bext""" __version__ = 0 import random, shutil, sys, time try: import bext except ImportError: print('''This program requires the bext module, which you can install by opening a Terminal window (on macOS & Linux) and running: python3 -m pip install --user bext or a Command Prompt window (on Windows) and running: python -m pip install --user bext''') sys.exit() # Set up the constants: PAUSE_LENGTH = 0.1 # Get the size of the terminal window: WIDTH, HEIGHT = shutil.get_terminal_size() # We can't print to the last column on Windows without it adding a # newline automatically, so reduce the width by one: WIDTH -= 1 WIDTH //= 2 NUMBER_OF_WORMS = 12 # (!) Try changing this value. MIN_WORM_LENGTH = 6 # (!) Try changing this value. MAX_WORM_LENGTH = 16 # (!) Try changing this value. ALL_COLORS = bext.ALL_COLORS NORTH = 'north' SOUTH = 'south' EAST = 'east' WEST = 'west' BLOCK = chr(9608) # Character 9608 is '█' def main(): # Generate worm data structures: worms = [] for i in range(NUMBER_OF_WORMS): worms.append(Worm()) bext.clear() while True: # Main simulation loop. # Draw quit message. bext.fg('white') bext.goto(0, 0) print('Ctrl-C to quit.', end='') for worm in worms: worm.display() for worm in worms: worm.moveRandom() sys.stdout.flush() time.sleep(PAUSE_LENGTH) class Worm: def __init__(self): self.length = random.randint(MIN_WORM_LENGTH, MAX_WORM_LENGTH) coloration = random.choice(['solid', 'stripe', 'random']) if coloration == 'solid': self.colors = [random.choice(ALL_COLORS)] * self.length elif coloration == 'stripe': color1 = random.choice(ALL_COLORS) color2 = random.choice(ALL_COLORS) self.colors = [] for i in range(self.length): self.colors.append((color1, color2)[i % 2]) elif coloration == 'random': self.colors = [] for i in range(self.length): self.colors.append(random.choice(ALL_COLORS)) x = random.randint(0, WIDTH - 1) y = random.randint(0, HEIGHT - 1) self.body = [] for i in range(self.length): self.body.append((x, y)) x, y = getRandomNeighbor(x, y) def moveNorth(self): headx, heady = self.body[0] if self.isBlocked(NORTH): return False self.body.insert(0, (headx, heady - 1)) self._eraseLastBodySegment() return True def moveSouth(self): headx, heady = self.body[0] if self.isBlocked(SOUTH): return False self.body.insert(0, (headx, heady + 1)) self._eraseLastBodySegment() return True def moveEast(self): headx, heady = self.body[0] if self.isBlocked(EAST): return False self.body.insert(0, (headx + 1, heady)) self._eraseLastBodySegment() return True def moveWest(self): headx, heady = self.body[0] if self.isBlocked(WEST): return False self.body.insert(0, (headx - 1, heady)) self._eraseLastBodySegment() return True def isBlocked(self, direction): headx, heady = self.body[0] if direction == NORTH: return heady == 0 or (headx, heady - 1) in self.body elif direction == SOUTH: return heady == HEIGHT - 1 or (headx, heady + 1) in self.body elif direction == EAST: return headx == WIDTH - 1 or (headx + 1, heady) in self.body elif direction == WEST: return headx == 0 or (headx - 1, heady) in self.body def moveRandom(self): if self.isBlocked(NORTH) and self.isBlocked(SOUTH) and self.isBlocked(EAST) and self.isBlocked(WEST): self.body.reverse() if self.isBlocked(NORTH) and self.isBlocked(SOUTH) and self.isBlocked(EAST) and self.isBlocked(WEST): return False hasMoved = False while not hasMoved: direction = random.choice([NORTH, SOUTH, EAST, WEST]) if direction == NORTH: hasMoved = self.moveNorth() elif direction == SOUTH: hasMoved = self.moveSouth() elif direction == EAST: hasMoved = self.moveEast() elif direction == WEST: hasMoved = self.moveWest() def _eraseLastBodySegment(self): # Erase the last body segment: bext.goto(self.body[-1][0] * 2, self.body[-1][1]) print(' ', end='') self.body.pop() # Delete the last (x, y) tuple in self.body. def display(self): for i, (x, y) in enumerate(self.body): bext.goto(x * 2, y) bext.fg(self.colors[i]) print(BLOCK + BLOCK, end='') def getRandomNeighbor(x, y): while True: direction = random.choice((NORTH, SOUTH, EAST, WEST)) if direction == NORTH and y != 0: return (x, y - 1) elif direction == SOUTH and y != HEIGHT - 1: return (x, y + 1) elif direction == EAST and x != WIDTH - 1: return (x + 1, y) elif direction == WEST and x != 0: return (x - 1, y) # If this program was run (instead of imported), run the game: if __name__ == '__main__': try: main() except KeyboardInterrupt: sys.exit() # When Ctrl-C is pressed, end the program.
11cd4d65d01665c0d10e4866ca5ef1b2c881800c
be9d18c3ac86921e8899a830ec42d35edd440919
/moztrap/view/runtests/finders.py
233321205408e7918ea9601274a03b83139b0057
[ "BSD-2-Clause" ]
permissive
AlinT/moztrap
abcbf74893d10f7bcf77b4ed44fa77bd017353d6
13927ae3f156b27e4dd064ea37f2feae14728398
refs/heads/master
2021-01-18T08:21:52.894687
2012-09-26T19:54:57
2012-09-26T19:54:57
null
0
0
null
null
null
null
UTF-8
Python
false
false
906
py
""" Finder for running tests. """ from django.core.urlresolvers import reverse from ... import model from ..lists import finder class RunTestsFinder(finder.Finder): template_base = "runtests/finder" columns = [ finder.Column( "products", "_products.html", model.Product.objects.order_by("name"), ), finder.Column( "productversions", "_productversions.html", model.ProductVersion.objects.all(), ), finder.Column( "runs", "_runs.html", model.Run.objects.filter(status=model.Run.STATUS.active), ), ] def child_query_url(self, obj): if isinstance(obj, model.Run): return reverse("runtests_environment", kwargs={"run_id": obj.id}) return super(RunTestsFinder, self).child_query_url(obj)
b2c759567b93cac768c610e6337ebe2ca19626e0
735a315ea82893f2acd5ac141f1a9b8be89f5cb9
/pylib/v6.1.84/mdsscalar.py
7cf7fe6e0ba174ecd9dc55b37dbdca77b5786088
[]
no_license
drsmith48/pppl-mdsplus-python
5ce6f7ccef4a23ea4b8296aa06f51f3a646dd36f
0fb5100e6718c8c10f04c3aac120558f521f9a59
refs/heads/master
2021-07-08T02:29:59.069616
2017-10-04T20:17:32
2017-10-04T20:17:32
105,808,853
0
0
null
null
null
null
UTF-8
Python
false
false
7,108
py
if '__package__' not in globals() or __package__ is None or len(__package__)==0: def _mimport(name,level): return __import__(name,globals()) else: def _mimport(name,level): return __import__(name,globals(),{},[],level) import numpy,copy _dtypes=_mimport('_mdsdtypes',1) _data=_mimport('mdsdata',1) def makeScalar(value): if isinstance(value,str): return String(value) if isinstance(value,Scalar): return copy.deepcopy(value) if isinstance(value,numpy.generic): if isinstance(value,numpy.string_): return String(value) try: if isinstance(value,numpy.bytes_): return String(str(value,encoding='utf8')) except: pass if isinstance(value,numpy.bool_): return makeScalar(int(value)) return globals()[value.__class__.__name__.capitalize()](value) try: if isinstance(value,long): return Int64(value) if isinstance(value,int): return Int32(value) except: if isinstance(value,int): return Int64(value) if isinstance(value,float): return Float32(value) if isinstance(value,str): return String(value) if isinstance(value,bytes): return String(value.decode()) if isinstance(value,bool): return Int8(int(value)) if isinstance(value,complex): return Complex128(numpy.complex128(value)) if isinstance(value,numpy.complex64): return Complex64(value) if isinstance(value,numpy.complex128): return Complex128(value) raise TypeError('Cannot make Scalar out of '+str(type(value))) class Scalar(_data.Data): def __new__(cls,value=0): try: import numpy _array=_mimport('mdsarray',1) if (isinstance(value,_array.Array)) or isinstance(value,list) or isinstance(value,numpy.ndarray): return _array.__dict__[cls.__name__+'Array'](value) except: pass return super(Scalar,cls).__new__(cls) def __init__(self,value=0): if self.__class__.__name__ == 'Scalar': raise TypeError("cannot create 'Scalar' instances") if self.__class__.__name__ == 'String': self._value=numpy.string_(value) return self._value=numpy.__dict__[self.__class__.__name__.lower()](value) def __getattr__(self,name): return self._value.__getattribute__(name) def _getValue(self): """Return the numpy scalar representation of the scalar""" return self._value value=property(_getValue) def __str__(self): formats={'Int8':'%dB','Int16':'%dW','Int32':'%d','Int64':'0X%0xQ', 'Uint8':'%uBU','Uint16':'%uWU','Uint32':'%uLU','Uint64':'0X%0xQU', 'Float32':'%g'} ans=formats[self.__class__.__name__] % (self._value,) if ans=='nan': ans="$ROPRAND" elif isinstance(self,Float32) and ans.find('.')==-1: ans=ans+"." return ans def decompile(self): return str(self) def __int__(self): """Integer: x.__int__() <==> int(x) @rtype: int""" return self._value.__int__() def __long__(self): """Long: x.__long__() <==> long(x) @rtype: int""" return self.__value.__long__() def _unop(self,op): return _data.makeData(getattr(self.value,op)()) def _binop(self,op,y): try: y=y.value except AttributeError: pass ans=getattr(self.value,op)(y) return _data.makeData(ans) def _triop(self,op,y,z): try: y=y.value except AttributeError: pass try: z=z.value except AttributeError: pass return _data.makeData(getattr(self.value,op)(y,z)) def _getMdsDtypeNum(self): return {'Uint8':DTYPE_BU,'Uint16':DTYPE_WU,'Uint32':DTYPE_LU,'Uint64':DTYPE_QU, 'Int8':DTYPE_B,'Int16':DTYPE_W,'Int32':DTYPE_L,'Int64':DTYPE_Q, 'String':DTYPE_T, 'Float32':DTYPE_FS, 'Float64':DTYPE_FT,'Complex64':DTYPE_FSC,'Complex128':DTYPE_FTC}[self.__class__.__name__] mdsdtype=property(_getMdsDtypeNum) def all(self): return self._unop('all') def any(self): return self._unop('any') def argmax(self,*axis): if axis: return self._binop('argmax',axis[0]) else: return self._unop('argmax') def argmin(self,*axis): if axis: return self._binop('argmin',axis[0]) else: return self._unop('argmin') def argsort(self,axis=-1,kind='quicksort',order=None): return _data.makeData(self.value.argsort(axis,kind,order)) def astype(self,type): return _data.makeData(self.value.astype(type)) def byteswap(self): return self._unop('byteswap') def clip(self,y,z): return self._triop('clip',y,z) class Int8(Scalar): """8-bit signed number""" class Int16(Scalar): """16-bit signed number""" class Int32(Scalar): """32-bit signed number""" class Int64(Scalar): """64-bit signed number""" class Uint8(Scalar): """8-bit unsigned number""" class Uint16(Scalar): """16-bit unsigned number""" class Uint32(Scalar): """32-bit unsigned number""" class Uint64(Scalar): """64-bit unsigned number""" def _getDate(self): return _data.Data.execute('date_time($)',self) date=property(_getDate) class Float32(Scalar): """32-bit floating point number""" class Complex64(Scalar): """32-bit complex number""" def __str__(self): return "Cmplx(%g,%g)" % (self._value.real,self._value.imag) class Float64(Scalar): """64-bit floating point number""" def __str__(self): return ("%E" % self._value).replace("E","D") class Complex128(Scalar): """64-bit complex number""" def __str__(self): return "Cmplx(%s,%s)" % (str(Float64(self._value.real)),str(Float64(self._value.imag))) class String(Scalar): """String""" def __radd__(self,y): """Reverse add: x.__radd__(y) <==> y+x @rtype: Data""" return self.execute('$//$',y,self) def __add__(self,y): """Add: x.__add__(y) <==> x+y @rtype: Data""" return self.execute('$//$',self,y) def __str__(self): """String: x.__str__() <==> str(x) @rtype: String""" if len(self._value) > 0: return str(self.value.tostring().decode()) else: return '' def __len__(self): return len(str(self)) def decompile(self): if len(self._value) > 0: return repr(self._value.tostring()) else: return "''" class Int128(Scalar): """128-bit number""" def __init__(self): raise TypeError("Int128 is not yet supported") class Uint128(Scalar): """128-bit unsigned number""" def __init__(self): raise TypeError("Uint128 is not yet supported")
f0714282ca1bed1a0bc706dfd5e96c9a2e87dc47
a94770c70704c22590c72d7a90f38e3a7d2e3e5c
/Algo/Leetcode/123BestTimeToBuyAndSellStockIII.py
2a292d28fef14431391bc62620bd69b4e46bf158
[]
no_license
lawy623/Algorithm_Interview_Prep
00d8a1c0ac1f47e149e95f8655d52be1efa67743
ca8b2662330776d14962532ed8994dfeedadef70
refs/heads/master
2023-03-22T16:19:12.382081
2023-03-21T02:42:05
2023-03-21T02:42:05
180,056,076
2
0
null
null
null
null
UTF-8
Python
false
false
409
py
class Solution(object): def maxProfit(self, prices): """ :type prices: List[int] :rtype: int """ buy1 = -2**31 buy2 = -2**31 sell1 = 0 sell2 = 0 for p in prices: buy1 = max(buy1, -p) sell1 = max(sell1, buy1+p) buy2 = max(buy2, sell1-p) sell2 = max(sell2, buy2+p) return sell2
8cb57215a38cae611c55923ca5e461bd7f9fed84
44b87d9faad99d542914c35410ba7d354d5ba9cd
/1/EXAM 2/start a following num end in b.py
6fdd809b6c2423f47c4a8dc46bc3723b23095a91
[]
no_license
append-knowledge/pythondjango
586292d1c7d0ddace3630f0d77ca53f442667e54
0e5dab580e8cc48e9940fb93a71bcd36e8e6a84e
refs/heads/master
2023-06-24T07:24:53.374998
2021-07-13T05:55:25
2021-07-13T05:55:25
385,247,677
0
0
null
null
null
null
UTF-8
Python
false
false
141
py
import re x='[a]\d+[b]' input=input('enter ') match=re.fullmatch(x,input) if match is not None: print('VALID') else: print('INVALID')
b7e335ec5f9b7c481858b08725dd834ca4d73b3b
917d4f67f6033a0cc01ba2b3b7b07dab94dcffdf
/property/pages/views.py
6104059f52839acc79ba33851e228e4120171433
[]
no_license
hghimanshu/Django
011156c484e6710a379be3fb7faf6ab814bde02c
75bef769e615df2719b213884f7269a56b7ccb7b
refs/heads/master
2023-02-19T08:49:35.691196
2022-03-21T09:03:58
2022-03-21T09:03:58
242,301,089
0
0
null
2023-02-15T18:19:31
2020-02-22T07:43:13
CSS
UTF-8
Python
false
false
856
py
from django.shortcuts import render from django.http import HttpResponse from listings.models import Listing from realtors.models import Realtor from listings.choices import price_choices, bedroom_choices, state_choices # Create your views here. def index(request): listings = Listing.objects.order_by('-list_date').filter(is_published=True)[:3] context = { 'listings': listings, 'state_choices': state_choices, 'bedroom_choices': bedroom_choices, 'price_choices': price_choices } return render(request, 'pages/index.html', context) def about(request): realtors = Realtor.objects.order_by('-hire_date') mvp_realtors = Realtor.objects.all().filter(is_mvp=True) context = { 'realtors': realtors, 'mvp': mvp_realtors } return render(request, 'pages/about.html', context)
780073cc16c8f338f3195e45934b88dd0709ef5b
f777b5e4a98c40f4bfc5c5c9e326faa09beb2d53
/projects/DensePose/densepose/modeling/cse/utils.py
18480db5e485dec3bd0daf3cae69263a6abdde4f
[ "Apache-2.0" ]
permissive
alekseynp/detectron2
04ae9a47d950ea4c737715b5f2aa7637d3742264
2409af0bf0d4bdcc685feb6d2c7fd659828acac4
refs/heads/master
2022-05-30T09:13:26.438077
2022-04-11T20:59:40
2022-04-11T20:59:40
254,280,315
0
1
Apache-2.0
2020-04-09T05:34:15
2020-04-09T05:34:14
null
UTF-8
Python
false
false
3,538
py
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import torch from torch.nn import functional as F def squared_euclidean_distance_matrix(pts1: torch.Tensor, pts2: torch.Tensor) -> torch.Tensor: """ Get squared Euclidean Distance Matrix Computes pairwise squared Euclidean distances between points Args: pts1: Tensor [M x D], M is the number of points, D is feature dimensionality pts2: Tensor [N x D], N is the number of points, D is feature dimensionality Return: Tensor [M, N]: matrix of squared Euclidean distances; at index (m, n) it contains || pts1[m] - pts2[n] ||^2 """ edm = torch.mm(-2 * pts1, pts2.t()) edm += (pts1 * pts1).sum(1, keepdim=True) + (pts2 * pts2).sum(1, keepdim=True).t() return edm.contiguous() def normalize_embeddings(embeddings: torch.Tensor, epsilon: float = 1e-6) -> torch.Tensor: """ Normalize N D-dimensional embedding vectors arranged in a tensor [N, D] Args: embeddings (tensor [N, D]): N D-dimensional embedding vectors epsilon (float): minimum value for a vector norm Return: Normalized embeddings (tensor [N, D]), such that L2 vector norms are all equal to 1. """ return embeddings / torch.clamp( embeddings.norm(p=None, dim=1, keepdim=True), min=epsilon # pyre-ignore[6] ) def get_closest_vertices_mask_from_ES( E: torch.Tensor, S: torch.Tensor, h: int, w: int, mesh_vertex_embeddings: torch.Tensor, device: torch.device, ): """ Interpolate Embeddings and Segmentations to the size of a given bounding box, and compute closest vertices and the segmentation mask Args: E (tensor [1, D, H, W]): D-dimensional embedding vectors for every point of the default-sized box S (tensor [1, 2, H, W]): 2-dimensional segmentation mask for every point of the default-sized box h (int): height of the target bounding box w (int): width of the target bounding box mesh_vertex_embeddings (tensor [N, D]): vertex embeddings for a chosen mesh N is the number of vertices in the mesh, D is feature dimensionality device (torch.device): device to move the tensors to Return: Closest Vertices (tensor [h, w]), int, for every point of the resulting box Segmentation mask (tensor [h, w]), boolean, for every point of the resulting box """ # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. embedding_resized = F.interpolate(E, size=(h, w), mode="bilinear")[0].to(device) # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. coarse_segm_resized = F.interpolate(S, size=(h, w), mode="bilinear")[0].to(device) mask = coarse_segm_resized.argmax(0) > 0 closest_vertices = torch.zeros(mask.shape, dtype=torch.long, device=device) all_embeddings = embedding_resized[:, mask].t() size_chunk = 10_000 # Chunking to avoid possible OOM edm = [] if len(all_embeddings) == 0: return closest_vertices, mask for chunk in range((len(all_embeddings) - 1) // size_chunk + 1): chunk_embeddings = all_embeddings[size_chunk * chunk : size_chunk * (chunk + 1)] edm.append( torch.argmin( squared_euclidean_distance_matrix(chunk_embeddings, mesh_vertex_embeddings), dim=1 ) ) closest_vertices[mask] = torch.cat(edm) return closest_vertices, mask
b605974ab6d3d89ba69d3248a135c89cc71111ec
5ca02343c366662b60966e060e50e9d6960c0531
/TX/TX/settings.py
45f686302330fb2cfde0ecc003da2686115a362c
[]
no_license
yyzhu0817/scrapy
eff5cc68ab25c89fe01c62e2c94e5511dad3fc34
9186b127bf49450850028c76142262c6f2c935da
refs/heads/master
2020-12-10T01:00:34.924969
2020-01-20T02:54:58
2020-01-20T02:54:58
233,465,772
0
0
null
null
null
null
UTF-8
Python
false
false
3,174
py
# -*- coding: utf-8 -*- # Scrapy settings for TX project # # For simplicity, this file contains only settings considered important or # commonly used. You can find more settings consulting the documentation: # # https://docs.scrapy.org/en/latest/topics/settings.html # https://docs.scrapy.org/en/latest/topics/downloader-middleware.html # https://docs.scrapy.org/en/latest/topics/spider-middleware.html BOT_NAME = 'TX' SPIDER_MODULES = ['TX.spiders'] NEWSPIDER_MODULE = 'TX.spiders' # Crawl responsibly by identifying yourself (and your website) on the user-agent #USER_AGENT = 'TX (+http://www.yourdomain.com)' # Obey robots.txt rules ROBOTSTXT_OBEY = False # Configure maximum concurrent requests performed by Scrapy (default: 16) #CONCURRENT_REQUESTS = 32 # Configure a delay for requests for the same website (default: 0) # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay # See also autothrottle settings and docs #DOWNLOAD_DELAY = 3 # The download delay setting will honor only one of: #CONCURRENT_REQUESTS_PER_DOMAIN = 16 #CONCURRENT_REQUESTS_PER_IP = 16 # Disable cookies (enabled by default) #COOKIES_ENABLED = False # Disable Telnet Console (enabled by default) #TELNETCONSOLE_ENABLED = False # Override the default request headers: DEFAULT_REQUEST_HEADERS = { 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Language': 'en', 'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36', } # Enable or disable spider middlewares # See https://docs.scrapy.org/en/latest/topics/spider-middleware.html #SPIDER_MIDDLEWARES = { # 'TX.middlewares.TxSpiderMiddleware': 543, #} # Enable or disable downloader middlewares # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html #DOWNLOADER_MIDDLEWARES = { # 'TX.middlewares.TxDownloaderMiddleware': 543, #} # Enable or disable extensions # See https://docs.scrapy.org/en/latest/topics/extensions.html #EXTENSIONS = { # 'scrapy.extensions.telnet.TelnetConsole': None, #} # Configure item pipelines # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html # ITEM_PIPELINES = { # 'TX.pipelines.TxPipeline': 300, # } # Enable and configure the AutoThrottle extension (disabled by default) # See https://docs.scrapy.org/en/latest/topics/autothrottle.html #AUTOTHROTTLE_ENABLED = True # The initial download delay #AUTOTHROTTLE_START_DELAY = 5 # The maximum download delay to be set in case of high latencies #AUTOTHROTTLE_MAX_DELAY = 60 # The average number of requests Scrapy should be sending in parallel to # each remote server #AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0 # Enable showing throttling stats for every response received: #AUTOTHROTTLE_DEBUG = False # Enable and configure HTTP caching (disabled by default) # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings #HTTPCACHE_ENABLED = True #HTTPCACHE_EXPIRATION_SECS = 0 #HTTPCACHE_DIR = 'httpcache' #HTTPCACHE_IGNORE_HTTP_CODES = [] #HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
de41515bdfe3faa82c3ce8ed5c220f24b123aac9
3712a929d1124f514ea7af1ac0d4a1de03bb6773
/开班笔记/pythonMongoDB部分/day39/code/mongo1.py
5b9aef2d4d0dc19f114aaca150810694bc086161
[]
no_license
jiyabing/learning
abd82aa3fd37310b4a98b11ea802c5b0e37b7ad9
6059006b0f86aee9a74cfc116d2284eb44173f41
refs/heads/master
2020-04-02T20:47:33.025331
2018-10-26T05:46:10
2018-10-26T05:46:10
154,779,387
0
0
null
null
null
null
UTF-8
Python
false
false
990
py
#coding:utf8 #索引和聚合操作 from pymongo import MongoClient,IndexModel conn = MongoClient('localhost',27017) db = conn.stu my_set = db.class4 #创建索引,并且将索引名返回 #index = my_set.ensure_index('name') #print(index) #复合索引 #index = my_set.ensure_index([('name',1),('king',-1)]) #print(index) #唯一索引和稀疏索引 cls = db.class0 #唯一索引 #index = cls.ensure_index('name',unique=True) #稀疏索引 #index = my_set.ensure_index('king_name',sparse=True) #删除索引 #my_set.drop_index('name_1') #my_set.drop_indexes() #删除所有索引 #同时创建多个索引 #index1 = IndexModel([('name',1),('king',-1)]) #index2 = IndexModel([('king_name',1)]) #indexes = my_set.create_indexes([index1,index2]) #查看一个集合中的索引 #for i in my_set.list_indexes(): # print(i) #聚合管道 l = [{'$group':{'_id':'$king','count':{'$sum':1}}},{'$match':{'count':{'$gt':1}}}] cursor = my_set.aggregate(l) for i in cursor: print(i)
9e8e5607a62fa19a162b1026aab6e20e14275de9
1a2bf34d7fc1d227ceebf05edf00287de74259c5
/Django/Test/LuZhenNan/APP/views.py
7e131b7ef59abef0102ca853aada9b3ad236a88c
[]
no_license
lzn9423362/Django-
de69fee75160236e397b3bbc165281eadbe898f0
8c1656d20dcc4dfc29fb942b2db54ec07077e3ae
refs/heads/master
2020-03-29T18:03:47.323734
2018-11-28T12:07:12
2018-11-28T12:07:12
150,192,771
0
0
null
null
null
null
UTF-8
Python
false
false
2,187
py
import hashlib from django.http import HttpResponse, JsonResponse from django.shortcuts import render, redirect from django.urls import reverse from .models import * # Create your views here. def index(request): username = request.session.get('username') users = User.objects.filter(username=username) user = users.first() girl = Girl.objects.all() man = Man.objects.all() if users.exists(): user = users.first() return render(request, 'index.html', {'user': user, 'girl1': girl[0:1], 'girl2':girl[1:6], 'man1': man[0:2], 'man2': man[2:11] }) else: return render(request, 'index.html', {'user':user, 'girl1': girl[0:1], 'girl2':girl[1:6], 'man1': man[0:2], 'man2': man[2:11] }) # 'girl1': girl[0:2], 'girl2':girl[2:7], 'man1': Man[0:2], 'man2': Man[2:11] def register(request): return render(request, 'register.html') def registerhandle(request): if request.method == 'POST': username = request.POST.get('username') password = request.POST.get('password') email = request.POST.get('email') User.objects.create(username=username, password=password, email=email) return redirect(reverse('APP:login')) def login(request): return render(request, 'login.html') def loginhandle(request): if request.method == 'POST': username = request.POST.get('phone') password = request.POST.get('password') users = User.objects.filter(username=username, password=password) if users.exists(): request.session['username'] = users.first().username return redirect(reverse('APP:index')) else: return HttpResponse('账号密码错误') else: return HttpResponse('请求方式错误') def logout(request): request.session.clear() return redirect(reverse("APP:index")) def loginajax(request): username = request.POST.get('value') try: user = User.objects.get(username=username) return JsonResponse({'status': 0}) except: return JsonResponse({'status': 1}) def my_md5(string): m = hashlib.md5() m.update(string.encode()) return m.hexdigest()
caa0850988b9faedb6726b682fb5a8154116b383
ddd4edc45481e6a7c7141b93e47b974634506d2d
/tradgram/relations/admin.py
4dbfea14ffdda1239afd8dbc98f9c3eba2c6aaf4
[ "MIT" ]
permissive
didils/tradgram
407de9d05d01bc840c5c165155d370f092d82f0d
4868ca082ab78a1b5b96f25ee9f958567bd1bb1e
refs/heads/master
2021-11-19T02:47:02.224088
2019-04-05T08:19:14
2019-04-05T08:19:14
148,162,588
0
0
MIT
2021-09-08T00:57:43
2018-09-10T13:49:57
Python
UTF-8
Python
false
false
439
py
from django.contrib import admin from . import models # Register your models here. @admin.register(models.Relation) class RelationAdmin(admin.ModelAdmin): search_fields =( 'product1', 'product2', ) list_filter = ( 'product1', 'product2', 'count', 'created_at' ) list_display = ( 'product1', 'product2', 'count', 'created_at' )
d7cab272034def647cc8d74d213a5bd61f80a1cd
3f5a1ef51620fd8c35ef38064ca5aa00776ab6f4
/full_speed_educative/dictionary/defination.py
e920f8dbd52c14cb5029ac0ed167f195ae926aff
[]
no_license
poojagmahajan/python_exercises
1b290a5c0689f703538caf89bca5bc6c1fdb392a
65539cf31c5b2ad5768d652ed5fe95054ce5f63f
refs/heads/master
2022-11-12T03:52:13.533781
2020-07-04T20:50:29
2020-07-04T20:54:46
263,151,942
0
0
null
null
null
null
UTF-8
Python
false
false
1,814
py
### Dictionaries are data structures that index values by a given key (key-value pairs). ages = { "purvansh" : 3, "Pooja" : 28, "Gaurav" : 30, "swarit" : 1 , "meet" : 10, "ishan" : 6 } print("print age of any one -") print (ages ["purvansh"]) print(ages["Gaurav"]) print("\n print ages of all -") for name,age in ages.items() : print(name,age) address = {} #Call dictionary with no parameters using the empty {} pincode = dict() #Call dictionary with no parameters using the dict keyword address["pooja"] = "pune" ## fill to empty dictionary address["purvansh"] = "chinchwad" for name,address in address.items() : print("\n", name,address) d = { ## dictionary keys can be immutable object and don’t necessarily need to be strings 0: [0, 0, 0], 1: [1, 1, 1], 2: [2, 2, 2], } print ( d[2] ) ##You can create an ordered dictionary which preserves the order in which the keys are inserted. # This is done by importing the OrderedDictionary from the collections library, and call the OrderedDictionary() built-in method. from collections import OrderedDict ages = OrderedDict() ages["ram"] = 20 ages["sham"] = 40 for key, value in ages.items(): print(key, value) #####Loop to Get All Keys for key in ages: #for name in ages print(key) ####or print(ages.keys()) ##### Loop to Get All Values for age in ages : # for value in ages : print(ages[age]) #### or print (ages.values()) ###################################### Dict1 = { "FruitName": "Mango", "season": "Spring", } Dict1.pop("season") ## pop delete value print(Dict1.values()) print (Dict1) ## print whole dictionary print (Dict1.values()) print(Dict1.keys()) Dict1.clear() # delete Dict print(Dict1) # will print empty paranthesis {}
806e3cd0e19b3608d616b002a8bb2b876ca9e51a
d564c1dcde3a139960e441a732f308dee7bac268
/code/run5All_Content_PlainUD.py
517e77577c22a0ae492044444a377776233b03a6
[]
no_license
m-hahn/left-right-asymmetries
9b5142dcf822194068feea2ccc0e8cc3b0573bbe
45e5b40a145e2a9d51c12617dc76be5a49ddf43e
refs/heads/master
2020-04-26T11:47:57.781431
2019-03-22T01:00:48
2019-03-22T01:00:48
173,528,908
0
0
null
null
null
null
UTF-8
Python
false
false
355
py
from ud_languages import languages import subprocess languages = sorted(languages, reverse=True) for language in languages: for model in ["REAL_REAL", "REVERSE"]: #, "GROUND"] + (["RANDOM_BY_TYPE"] * 5): command = ["./python27", "testLeftRightEntUniHDCond3FilterMIWord5_Content_PlainUD_Bugfix.py", language, model] subprocess.call(command)
95612c8e2207355469ab70ff6f985fb9fef74ba0
d6ca0b326f1bd0ce381c6db611f6331096bf4187
/pypet/tests/_atworema.py
5862c910f1aa08c6ff96162a56510430111ec8f6
[ "BSD-3-Clause" ]
permissive
SmokinCaterpillar/pypet
aa35355d70e8f44be015313494376d993f645d80
3d454ac65f89e7833baaf89510f73c546e90d8f6
refs/heads/develop
2023-08-08T16:01:54.087819
2023-02-14T14:59:32
2023-02-14T14:59:32
12,901,526
89
22
BSD-3-Clause
2023-07-24T00:46:12
2013-09-17T17:06:00
Python
UTF-8
Python
false
false
352
py
__author__ = 'Robert Meyer' from pypet.tests.testutils.ioutils import run_suite, discover_tests, TEST_IMPORT_ERROR if __name__ == '__main__': suite = discover_tests(predicate= lambda class_name, test_name, tags: class_name != TEST_IMPORT_ERROR) run_suite(remove=False, folder=None, suite=suite)
72ff753a9ba4196f39464a93290728c75816d6aa
5623771414b26c021be54facaaaefbd9314b389d
/week7/DS/DP/Min_sum_path.py
ae37aa64e4f0f1e20de2069fd94641db8a4796da
[]
no_license
saxenasamarth/BootCamp_PythonLearning
36b705b83c7f0e297931bb8d75cb541088690248
d5b8fe2d6fcfe54c5a7393f218414b1122f3e49e
refs/heads/master
2023-04-17T15:29:05.402863
2019-08-29T08:46:34
2019-08-29T08:46:34
null
0
0
null
null
null
null
UTF-8
Python
false
false
664
py
# Given a m x n grid filled with non-negative numbers, find a path from top left to bottom right which minimizes the sum of all numbers along its path. def find_min_sum_path(matrix): out = [[0 for i in range(len(matrix[0]))] for j in range(len(matrix))] out[0][0] = matrix[0][0] for i in range(1, len(matrix)): out[i][0] = out[i-1][0]+matrix[i][0] for i in range(1, len(matrix[0])): out[0][i] = out[0][i-1]+matrix[0][i] for i in range(1, len(matrix)): for j in range(1, len(matrix[0])): out[i][j] = matrix[i][j] + min(out[i-1][j], out[i][j-1]) return out[-1][-1] matrix = [[1,3,1],[1,5,1],[4,2,1]] print(find_min_sum_path(matrix))
0d5fb3722a72d746607b18d92434d47ef39879d8
c6f15aa103de030f7eea6c1aaf6e7ad0ec88dbc1
/add/AppMcsv/storage/volume/Volume.py
b21fe01e6712fe2709429c0d0eb031b3f2a0eedd
[]
no_license
sysdeep/dcat
6f3478348113b0d1206f82456f5bd80431282daf
f8c801173ace4447018c3034c56254ab1a6d4089
refs/heads/master
2023-05-03T16:04:28.027335
2023-04-17T15:04:04
2023-04-17T15:04:04
320,551,696
0
0
null
null
null
null
UTF-8
Python
false
false
2,453
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import gzip from enum import Enum import time DET = "-"*10 class Sections(Enum): header = 0 body = 1 class Record(object): def __init__(self): self.name = "" self.uuid = "" self.parent = "" self.childrens = [] def append(self, ch): self.childrens.append(ch) def parse_record(line: str) -> Record: try: fields = line.split("|") except Exception as e: print(e) return None r = Record() r.name = fields[0] r.uuid = fields[10] r.parent = fields[9] return r class Volume(object): def __init__(self, full_path): self.path = full_path self.name = "undefined" self.__tree = [] self.__tmap = {} self.__roots = [] def read_header(self): fd = gzip.open(self.path, "rt", encoding="utf-8") sheader = [] section = Sections.header c = 1000 while True: line = fd.readline().strip() if not line: print("null line") break if section == Sections.header: if line == DET: break else: sheader.append(line) c -= 1 if c < 0: print("emerg") break fd.close() for line in sheader: print(line) chunks = line.split(":") if chunks[0] == "name": self.name = chunks[1] break def read_body(self): self.__tree = [] self.__tmap = {} fd = gzip.open(self.path, "rt", encoding="utf-8") t1 = time.time() section = Sections.header c = 10000000000 while True: line = fd.readline().strip() if not line: print("null line") break if section == Sections.header: if line == DET: section = Sections.body else: # pass header pass elif section == Sections.body: # print(line) record = parse_record(line) # self.__tree.append(record) self.__tmap[record.uuid] = record if record.parent == "0": self.__roots.append(record) else: pass c -= 1 if c < 0: print("emerg") break print("*"*20) print("files: ", c) print("*"*20) fd.close() t2 = time.time() print("parse body time: ", t2-t1) self.__link() def __link(self): for r in self.__tmap.values(): if r.parent == "0": continue parent_node = self.__tmap.get(r.parent) if parent_node: parent_node.append(r) def get_root(self) -> list: # return [r for r in self.__tree if r.parent == "0"] return self.__roots
a4cce41e3df9414b4d0faa27cb4e7dc024befcb8
4c5328381f53d8b77b56a597cc39a32b55a0c4c2
/Cura/gui/view3D/printableObjectRenderer.py
88a57fe5161dce1651e1ffc756679a55a1b9d57a
[]
no_license
sanyaade-iot/Cura2
47fc18a8886dcc8537439b699cdb201d92e68683
b8655a20ca4a03acaa2ada555f57fe415264d944
refs/heads/master
2021-01-16T20:06:18.885340
2014-06-06T12:51:10
2014-06-06T12:51:10
null
0
0
null
null
null
null
UTF-8
Python
false
false
3,207
py
__author__ = 'Jaime van Kessel' from OpenGL.GL import * from Cura.gui import openGLUtils from Cura.resources import getMesh from Cura.gui.view3D.renderer import Renderer class PrintableObjectRenderer(Renderer): def __init__(self): super(PrintableObjectRenderer,self).__init__() self._shader = openGLUtils.GLShader(filename='objectShader.glsl') def render(self): self._shader.bind() for obj in self.scene.getObjects(): mesh = obj.getMesh() glPushMatrix() offset = obj.getDrawOffset() glTranslatef(obj.getPosition()[0], obj.getPosition()[1], obj.getSize()[2] / 2.0) openGLUtils.glMultiplyMatrix(obj.getTempMatrix()) glTranslatef(offset[0], offset[1], offset[2] - obj.getSize()[2] / 2.0) openGLUtils.glMultiplyMatrix(obj.getMatrix()) colorStrength = 0.8 if obj.isSelected(): colorStrength = 1.0 if mesh is not None: for v in mesh.getVolumes(): if 'VertexRenderer' not in v.metaData: v.metaData['VertexRenderer'] = openGLUtils.VertexRenderer(GL_TRIANGLES, v.vertexData) glColor3f(1 * colorStrength, 0.5 * colorStrength, 1 * colorStrength) v.metaData['VertexRenderer'].render() else: mesh = getMesh('loading_mesh.stl') for v in mesh.getVolumes(): if 'VertexRenderer' not in v.metaData: v.metaData['VertexRenderer'] = openGLUtils.VertexRenderer(GL_TRIANGLES, v.vertexData) glColor3f(0.5 * colorStrength, 0.5 * colorStrength, 0.5 * colorStrength) v.metaData['VertexRenderer'].render() glPopMatrix() self._shader.unbind() def focusRender(self): objIdx = 0 for obj in self.scene.getObjects(): glPushMatrix() offset = obj.getDrawOffset() glTranslatef(obj.getPosition()[0], obj.getPosition()[1], obj.getSize()[2] / 2.0) openGLUtils.glMultiplyMatrix(obj.getTempMatrix()) glTranslatef(offset[0], offset[1], offset[2] - obj.getSize()[2] / 2.0) openGLUtils.glMultiplyMatrix(obj.getMatrix()) self.setCurrentFocusRenderObject(obj) mesh = obj.getMesh() if mesh is not None: volumeIdx = 0 for v in mesh.getVolumes(): if 'VertexRenderer' not in v.metaData: v.metaData['VertexRenderer'] = openGLUtils.VertexRenderer(GL_TRIANGLES, v.vertexData) v.metaData['VertexRenderer'].render() volumeIdx += 1 else: volumeIdx = 0 mesh = getMesh('loading_mesh.stl') for v in mesh.getVolumes(): if 'VertexRenderer' not in v.metaData: v.metaData['VertexRenderer'] = openGLUtils.VertexRenderer(GL_TRIANGLES, v.vertexData) v.metaData['VertexRenderer'].render() volumeIdx += 1 objIdx += 1 glPopMatrix()
d3ed2e74b0e9dba9944dd11ca896b5016acd263d
154fd16fe7828cb6925ca8f90e049b754ce06413
/lino_book/projects/lydia/tests/dumps/18.12.0/teams_team.py
e3d2d31af2866296e853ea6765cf5e65fe6a2a6c
[ "BSD-2-Clause" ]
permissive
lino-framework/book
68de2f8d130266bd9d9de7576d30597b3cde1c91
4eab916832cd8f48ff1b9fc8c2789f0b437da0f8
refs/heads/master
2021-03-27T16:16:55.403940
2021-03-15T02:53:50
2021-03-15T02:53:50
58,830,342
3
9
BSD-2-Clause
2021-03-09T13:11:27
2016-05-14T21:02:17
Python
UTF-8
Python
false
false
254
py
# -*- coding: UTF-8 -*- logger.info("Loading 2 objects to table teams_team...") # fields: id, ref, name loader.save(create_teams_team(1,u'E',['Eupen', '', ''])) loader.save(create_teams_team(2,u'S',['St. Vith', '', ''])) loader.flush_deferred_objects()
99a64502bc4d3c80b07c903df53770314112a9ed
df7f13ec34591fe1ce2d9aeebd5fd183e012711a
/hata/discord/user/thread_profile/tests/test__ThreadProfile__magic.py
62df5d60ace156b75fde3936db52d10717f48aed
[ "LicenseRef-scancode-warranty-disclaimer" ]
permissive
HuyaneMatsu/hata
63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e
53f24fdb38459dc5a4fd04f11bdbfee8295b76a4
refs/heads/master
2023-08-20T15:58:09.343044
2023-08-20T13:09:03
2023-08-20T13:09:03
163,677,173
3
3
Apache-2.0
2019-12-18T03:46:12
2018-12-31T14:59:47
Python
UTF-8
Python
false
false
1,575
py
from datetime import datetime as DateTime import vampytest from ..flags import ThreadProfileFlag from ..thread_profile import ThreadProfile def test__ThreadProfile__repr(): """ Tests whether ``ThreadProfile.__repr__`` works as intended. """ flags = ThreadProfileFlag(2) joined_at = DateTime(2016, 5, 15) thread_profile = ThreadProfile( flags = flags, joined_at = joined_at, ) vampytest.assert_instance(repr(thread_profile), str) def test__ThreadProfile__hash(): """ Tests whether ``ThreadProfile.__hash__`` works as intended. """ flags = ThreadProfileFlag(2) joined_at = DateTime(2016, 5, 15) thread_profile = ThreadProfile( flags = flags, joined_at = joined_at, ) vampytest.assert_instance(hash(thread_profile), int) def test__ThreadProfile__eq(): """ Tests whether ``ThreadProfile.__eq__`` works as intended. """ flags = ThreadProfileFlag(2) joined_at = DateTime(2016, 5, 15) keyword_parameters = { 'flags': flags, 'joined_at': joined_at, } thread_profile = ThreadProfile(**keyword_parameters) vampytest.assert_eq(thread_profile, thread_profile) vampytest.assert_ne(thread_profile, object()) for field_name, field_value in ( ('flags', ThreadProfileFlag(4)), ('joined_at', None), ): test_thread_profile = ThreadProfile(**{**keyword_parameters, field_name: field_value}) vampytest.assert_ne(thread_profile, test_thread_profile)
099b882a7057d5cd24e0b98ae5aa752f70f5f128
30a8b69bd2e0a3f3c2c1c88fb3bd8a28e6fc4cd0
/Part1/load_shapefile.py
5f31c2a37d32ed6c638bb6a4a1c85920628b25c3
[]
no_license
llord1/Mining-Georeferenced-Data
d49108f443922f02b90431ad7a9626ea17fd0554
c71f2e151ccfc4a1a9c07b5fcf4e95b7f7ba70e9
refs/heads/master
2021-05-30T13:27:57.663015
2015-12-29T09:10:08
2015-12-29T09:10:08
null
0
0
null
null
null
null
UTF-8
Python
false
false
542
py
#!/usr/bin/env python import sys import shapefile from shapely.geometry import shape shp = shapefile.Reader(sys.argv[1]) print "Found", shp.numRecords, "records:" pos = None count = 0 for record in shp.records(): print " ", record[1] if record[1] == sys.argv[2]: pos = count count += 1 if pos is None: print >> sys.stderr, sys.argv[2], "not found in shapefile" sys.exit() print >> sys.stderr, "Using", sys.argv[2], "..." manhattan = shape(shp.shapes()[pos]) print manhattan.contains(manhattan.centroid)
b4158282f6e90ee810904eb5e6be6f5e5f95435d
1fad121fea752aa3aee03f7665917ce9563e0d08
/src/form/panel/VmdPanel.py
e75138d37ae45d096b8a52074f7f82a941f91b1f
[ "MIT" ]
permissive
JerryAJIAN/vmd_sizing
0d382b9b94cdc3878e9d9a1c03f2c9c5f285ac6a
baad81eb40a21c9fa864344fbbf75cdab887c9c6
refs/heads/master
2022-11-18T03:57:57.111852
2020-07-06T15:10:27
2020-07-06T15:10:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
7,450
py
# -*- coding: utf-8 -*- # import wx import wx.lib.newevent import sys from form.panel.BasePanel import BasePanel from form.parts.BaseFilePickerCtrl import BaseFilePickerCtrl from form.parts.ConsoleCtrl import ConsoleCtrl from form.worker.VmdWorkerThread import VmdWorkerThread from module.MMath import MRect, MVector3D, MVector4D, MQuaternion, MMatrix4x4 # noqa from utils import MFormUtils, MFileUtils # noqa from utils.MLogger import MLogger # noqa logger = MLogger(__name__) # イベント定義 (VmdThreadEvent, EVT_VMD_THREAD) = wx.lib.newevent.NewEvent() class VmdPanel(BasePanel): def __init__(self, frame: wx.Frame, parent: wx.Notebook, tab_idx: int): super().__init__(frame, parent, tab_idx) self.convert_vmd_worker = None self.description_txt = wx.StaticText(self, wx.ID_ANY, "指定されたCSVファイル(ボーン+モーフ or カメラ)を、VMDファイルとして出力します。\n" \ + "モデルモーション(ボーン・モーフ)とカメラモーション(カメラ)は別々に出力できます。\n" \ + "CSVのフォーマットは、CSVタブで出力したデータと同じものを定義してください。", wx.DefaultPosition, wx.DefaultSize, 0) self.sizer.Add(self.description_txt, 0, wx.ALL, 5) self.static_line = wx.StaticLine(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL) self.sizer.Add(self.static_line, 0, wx.EXPAND | wx.ALL, 5) # CSVファイルコントロール(ボーン) self.bone_csv_file_ctrl = BaseFilePickerCtrl(frame, self, u"CSVファイル(ボーン)", u"CSVファイルを選択してください", ("csv"), wx.FLP_DEFAULT_STYLE, \ u"VMDに変換したいボーンモーションのファイルパスを指定してください。", \ is_aster=False, is_save=False, set_no=0, required=False) self.sizer.Add(self.bone_csv_file_ctrl.sizer, 0, wx.EXPAND | wx.ALL, 0) # CSVファイルコントロール(モーフ) self.morph_csv_file_ctrl = BaseFilePickerCtrl(frame, self, u"CSVファイル(モーフ)", u"CSVファイルを選択してください", ("csv"), wx.FLP_DEFAULT_STYLE, \ u"VMDに変換したいモーフモーションのファイルパスを指定してください。", \ is_aster=False, is_save=False, set_no=0, required=False) self.sizer.Add(self.morph_csv_file_ctrl.sizer, 0, wx.EXPAND | wx.ALL, 0) self.static_line2 = wx.StaticLine(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.LI_HORIZONTAL) self.sizer.Add(self.static_line2, 0, wx.EXPAND | wx.ALL, 5) # CSVファイルコントロール(カメラ) self.camera_csv_file_ctrl = BaseFilePickerCtrl(frame, self, u"CSVファイル(カメラ)", u"CSVファイルを選択してください", ("csv"), wx.FLP_DEFAULT_STYLE, \ u"VMDに変換したいカメラモーションのファイルパスを指定してください。", \ is_aster=False, is_save=False, set_no=0, required=False) self.sizer.Add(self.camera_csv_file_ctrl.sizer, 0, wx.EXPAND | wx.ALL, 0) btn_sizer = wx.BoxSizer(wx.HORIZONTAL) # VMD変換実行ボタン self.vmd_btn_ctrl = wx.Button(self, wx.ID_ANY, u"VMD変換実行", wx.DefaultPosition, wx.Size(200, 50), 0) self.vmd_btn_ctrl.SetToolTip(u"CSVをVMDに変換します。") self.vmd_btn_ctrl.Bind(wx.EVT_BUTTON, self.on_convert_vmd) btn_sizer.Add(self.vmd_btn_ctrl, 0, wx.ALL, 5) self.sizer.Add(btn_sizer, 0, wx.ALIGN_CENTER | wx.SHAPED, 5) # コンソール self.console_ctrl = ConsoleCtrl(self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size(-1, 420), \ wx.TE_MULTILINE | wx.TE_READONLY | wx.BORDER_NONE | wx.HSCROLL | wx.VSCROLL | wx.WANTS_CHARS) self.console_ctrl.SetBackgroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_3DLIGHT)) self.console_ctrl.Bind(wx.EVT_CHAR, lambda event: MFormUtils.on_select_all(event, self.console_ctrl)) self.sizer.Add(self.console_ctrl, 1, wx.ALL | wx.EXPAND, 5) # ゲージ self.gauge_ctrl = wx.Gauge(self, wx.ID_ANY, 100, wx.DefaultPosition, wx.DefaultSize, wx.GA_HORIZONTAL) self.gauge_ctrl.SetValue(0) self.sizer.Add(self.gauge_ctrl, 0, wx.ALL | wx.EXPAND, 5) self.fit() # フレームに変換完了処理バインド self.frame.Bind(EVT_VMD_THREAD, self.on_convert_vmd_result) # フォーム無効化 def disable(self): self.bone_csv_file_ctrl.disable() self.morph_csv_file_ctrl.disable() self.camera_csv_file_ctrl.disable() self.vmd_btn_ctrl.Disable() # フォーム無効化 def enable(self): self.bone_csv_file_ctrl.enable() self.morph_csv_file_ctrl.enable() self.camera_csv_file_ctrl.enable() self.vmd_btn_ctrl.Enable() # VMD変換 def on_convert_vmd(self, event: wx.Event): # フォーム無効化 self.disable() # タブ固定 self.fix_tab() # コンソールクリア self.console_ctrl.Clear() # 出力先をVMDパネルのコンソールに変更 sys.stdout = self.console_ctrl wx.GetApp().Yield() self.elapsed_time = 0 result = True result = self.bone_csv_file_ctrl.is_valid() and result if not result: # 終了音 self.frame.sound_finish() # タブ移動可 self.release_tab() # フォーム有効化 self.enable() # 出力先をデフォルトに戻す sys.stdout = self.frame.file_panel_ctrl.console_ctrl return result # VMD変換開始 if self.convert_vmd_worker: logger.error("まだ処理が実行中です。終了してから再度実行してください。", decoration=MLogger.DECORATION_BOX) else: # 別スレッドで実行 self.convert_vmd_worker = VmdWorkerThread(self.frame, VmdThreadEvent) self.convert_vmd_worker.start() return result event.Skip() # VMD変換完了処理 def on_convert_vmd_result(self, event: wx.Event): self.elapsed_time = event.elapsed_time # 終了音 self.frame.sound_finish() # タブ移動可 self.release_tab() # フォーム有効化 self.enable() # ワーカー終了 self.convert_vmd_worker = None # プログレス非表示 self.gauge_ctrl.SetValue(0) if not event.result: logger.error("VMD変換処理に失敗しました。", decoration=MLogger.DECORATION_BOX) event.Skip() return False logger.info("VMD変換が完了しました", decoration=MLogger.DECORATION_BOX, title="OK") # 出力先をデフォルトに戻す sys.stdout = self.frame.file_panel_ctrl.console_ctrl
93ea71308e6fd5d365bda5609b169c4f773ce234
2a3157ccb5376ffb03b13df4721afa405fbfc95d
/bin/virtualenv
851dd46bb411994b649306face43a5dd104c9557
[]
no_license
bopopescu/DemoDjango
694501259322590d2959ef65cb6231ba1b1cf128
b5ea252f0293ea63905a72045703b50815fbd673
refs/heads/master
2022-11-20T23:25:41.737807
2018-09-17T09:49:28
2018-09-17T09:49:28
282,543,262
0
0
null
2020-07-25T23:44:16
2020-07-25T23:44:16
null
UTF-8
Python
false
false
241
#!/home/jinesh/Documents/djangoproj/bin/python # -*- coding: utf-8 -*- import re import sys from virtualenv import main if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(main())
dca2f3644310a1e7c67b6ae89b9eb9ea3a0c23db
781e2692049e87a4256320c76e82a19be257a05d
/all_data/exercism_data/python/bob/b4037f9e2f47429f9d3e6ac8ed0fa8bf.py
1a70ecf442bfba0ffa267c895ed7411ce53dcf4a
[]
no_license
itsolutionscorp/AutoStyle-Clustering
54bde86fe6dbad35b568b38cfcb14c5ffaab51b0
be0e2f635a7558f56c61bc0b36c6146b01d1e6e6
refs/heads/master
2020-12-11T07:27:19.291038
2016-03-16T03:18:00
2016-03-16T03:18:42
59,454,921
4
0
null
2016-05-23T05:40:56
2016-05-23T05:40:56
null
UTF-8
Python
false
false
515
py
class Bob: def hey(self, ask): conversation = Identify(ask) if conversation.question(): return "Sure." elif conversation.yell(): return "Woah, chill out!" elif conversation.anything(): return "Fine. Be that way!" else: return "Whatever." class Identify: def __init__(self, ask): self.ask = ask or "" def question(self): return self.ask.endswith("?") def yell(self): return self.ask == self.ask.upper() def anything(self): return self.ask.replace(" ","") == self.ask.split()
3ca771e19dc6b23d14b4a8164764a44e5830a529
03195a6f98396fd27aedc3c06d81f1553fb1d16b
/pandas/core/_numba/executor.py
0b59d0717a476b949054b145952a0c044d5e15b9
[ "BSD-3-Clause" ]
permissive
huaxz1986/pandas
a08d80d27726fe141d449835b9a09265bca5b5e0
ba2473834fedcf571d3f8245b4b24796873f2736
refs/heads/master
2023-06-11T02:20:14.544220
2022-01-12T04:40:06
2022-01-12T04:40:06
131,370,494
3
4
BSD-3-Clause
2018-04-28T03:51:05
2018-04-28T03:51:05
null
UTF-8
Python
false
false
1,726
py
from __future__ import annotations from typing import ( TYPE_CHECKING, Callable, ) import numpy as np from pandas._typing import Scalar from pandas.compat._optional import import_optional_dependency from pandas.core.util.numba_ import ( NUMBA_FUNC_CACHE, get_jit_arguments, ) def generate_shared_aggregator( func: Callable[..., Scalar], engine_kwargs: dict[str, bool] | None, cache_key_str: str, ): """ Generate a Numba function that loops over the columns 2D object and applies a 1D numba kernel over each column. Parameters ---------- func : function aggregation function to be applied to each column engine_kwargs : dict dictionary of arguments to be passed into numba.jit cache_key_str: str string to access the compiled function of the form <caller_type>_<aggregation_type> e.g. rolling_mean, groupby_mean Returns ------- Numba function """ nopython, nogil, parallel = get_jit_arguments(engine_kwargs, None) cache_key = (func, cache_key_str) if cache_key in NUMBA_FUNC_CACHE: return NUMBA_FUNC_CACHE[cache_key] if TYPE_CHECKING: import numba else: numba = import_optional_dependency("numba") @numba.jit(nopython=nopython, nogil=nogil, parallel=parallel) def column_looper( values: np.ndarray, start: np.ndarray, end: np.ndarray, min_periods: int, *args, ): result = np.empty((len(start), values.shape[1]), dtype=np.float64) for i in numba.prange(values.shape[1]): result[:, i] = func(values[:, i], start, end, min_periods, *args) return result return column_looper
fc668b0f4beb102abcf466f2f54e0323dd94b77f
f0d713996eb095bcdc701f3fab0a8110b8541cbb
/k9usvZ8wfty4HwqX2_2.py
6df3da8982061b94fd50d4d07581a39b1c4e148e
[]
no_license
daniel-reich/turbo-robot
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
a7a25c63097674c0a81675eed7e6b763785f1c41
refs/heads/main
2023-03-26T01:55:14.210264
2021-03-23T16:08:01
2021-03-23T16:08:01
350,773,815
0
0
null
null
null
null
UTF-8
Python
false
false
1,045
py
""" Create a function to check whether a given number is **Cuban Prime**. A cuban prime is a prime number that is a solution to one of two different specific equations involving third powers of x and y. For this challenge we are only concerned with the cuban numbers from the **first equation**. We **ignore** the cuban numbers from the **second equation**. ### Equation Form p = (x^3 - y^3)/(x - y), x = y + 1, y > 0 ... and the first few cuban primes from this equation are 7, 19, 37, 61, 127, 271. ### Examples cuban_prime(7) ➞ "7 is cuban prime" cuban_prime(9) ➞ "9 is not cuban prime" cuban_prime(331) ➞ "331 is cuban prime" cuban_prime(40) ➞ "40 is not cuban prime" ### Notes * The inputs are positive integers only. * Check the **Resources** for help. """ is_prime=lambda p:p>1and all(p%i for i in range(2,int(p**0.5+1))) ​ def cuban_prime(n): for y in range(n): if n==3*y**2+3*y+1 and is_prime(n):return str(n)+' is cuban prime' return str(n)+' is not cuban prime'
f4f5aba0f8f2e294996ec623c74604d180bfc276
52b5773617a1b972a905de4d692540d26ff74926
/.history/2D_20200722181027.py
ea6e6f492e2b93ebfeedfabbb4e5edb694f6f6ce
[]
no_license
MaryanneNjeri/pythonModules
56f54bf098ae58ea069bf33f11ae94fa8eedcabc
f4e56b1e4dda2349267af634a46f6b9df6686020
refs/heads/master
2022-12-16T02:59:19.896129
2020-09-11T12:05:22
2020-09-11T12:05:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,348
py
def array(n,m): # where n is row size and m is column size array = [[0 for x in range(n)] for x in range(m)] print(array) a = [[2, 4, 6, 8, 10], [3, 6, 9, 12, 15], [4, 8, 12, 16, 20]] # where the first arguement reps the row and second arguement reps the column print(a[0][3]) from sys import maxint def hourGlass(arr): # you have a 2d array # get max hour glass # var maxCount to keep record of the max count # what do you know about an hourglass # the indicies fall in a pattern where # i and i+2 are not equal to 0 and i + 1 is equal to 0 maxCount = - maxint if arr !=[]: for i in range(len(arr)-2): totalCount = 0 # remember j is looping through arr[i] for j in range(len(arr[i])-2): totalCount = arr[i][j] + arr[i][j+1] + arr[i][j+2] + arr[i+1][j+1] + arr[i+2][j] + arr[i+2][j+1] + arr[i+2][j+2] print('total',totalCount) if totalCount > maxCount: maxCount = totalCount print(maxCount) else: return 0 print(hourGlass([[-1,-1,0,-9,-2,-2],[-2,-1,-6,-8,-2,-5],[-1,-1,-1,-2,-3,-4],[-1,-9,2,-4,-4,-5],[-7,-3,-3,-2,-9,-9],[-1,-3,-1,-2,-4,-5]]))
99f8c1a49641c470c778fea08467ebaf332d4693
8997a0bf1e3b6efe5dd9d5f307e1459f15501f5a
/graph__networkx__d3__dot_graphviz/graphviz__examples/generate__as__bytes.py
cad13511e6c2200cf6958416c256790986119d81
[ "CC-BY-4.0" ]
permissive
stepik/SimplePyScripts
01092eb1b2c1c33756427abb2debbd0c0abf533f
3259d88cb58b650549080d6f63b15910ae7e4779
refs/heads/master
2023-05-15T17:35:55.743164
2021-06-11T22:59:07
2021-06-11T22:59:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
300
py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' # pip install graphviz from graphviz import Digraph g = Digraph('G', format='svg') g.edge('Hello', 'World') # Get bytes print(g.pipe()) print(g.pipe('png')) # OR: # g.format = 'png' # print(g.pipe()) print(g.pipe('pdf'))
00b1a11eae7b2cec07120665c6de7285c8bbdae3
7ce479cac0a14d924159db9c784e3325b8f0bce7
/schemaorgschemas/Thing/Intangible/Enumeration/MedicalImagingTechnique/Ultrasound/__init__.py
f0e44756fdb174fb8619176fe9fda3fa72543f5a
[]
no_license
EvelineAndreea/AGRe
1f0c27237eb047a60bbcfb8d73e3157035406409
b952125896a82741f6617c259dd4060954583180
refs/heads/master
2020-04-08T16:08:11.517166
2018-11-28T07:15:56
2018-11-28T07:15:56
null
0
0
null
null
null
null
UTF-8
Python
false
false
465
py
# -*- coding: utf-8 -*- from schemaorgschemas.djangoschema import SchemaObject, SchemaProperty, SchemaEnumProperty, SCHEMA_ORG from django.conf import settings class UltrasoundSchema(SchemaObject): """Schema Mixin for Ultrasound Usage: place after django model in class definition, schema will return the schema.org url for the object Ultrasound imaging. """ def __init__(self): self.schema = 'Ultrasound' # schema.org version 2.0
0e647dd279872f9ca98db25c23550b1a1e7e5fb4
df83f97ed2c6dd199005e96bc7c494cfb3b49f8c
/GeeksForGeeks/String Rotations.py
42ed217509cdfcaf23e1e662e437f71bfb0dfa7b
[]
no_license
poojan14/Python-Practice
45f0b68b0ad2f92bbf0b92286602d64f3b1ae992
ed98acc788ba4a1b53bec3d0757108abb5274c0f
refs/heads/master
2022-03-27T18:24:18.130598
2019-12-25T07:26:09
2019-12-25T07:26:09
null
0
0
null
null
null
null
UTF-8
Python
false
false
469
py
''' Given strings s1 and s2, you need to find if s2 is a rotated version of the string s1. The strings are lowercase. ''' if __name__ == '__main__': T = int(input()) for _ in range(T): s1 = input() s2 = input() if len(s1)==len(s2): tmp = s1+s1 # It gives all possible rotations if s2 in tmp : print(1) # of a string. else : print(0) else: print(0)
e172c4d221cb93b78fdf15d990b35e7e7e7fd500
48894ae68f0234e263d325470178d67ab313c73e
/scripts/noc-wf.py
9a461df838cfb1119d145697b6241de9a1a2e87f
[ "BSD-3-Clause", "LicenseRef-scancode-unknown-license-reference" ]
permissive
DreamerDDL/noc
7f949f55bb2c02c15ac2cc46bc62d957aee43a86
2ab0ab7718bb7116da2c3953efd466757e11d9ce
refs/heads/master
2021-05-10T18:22:53.678588
2015-06-29T12:28:20
2015-06-29T12:28:20
118,628,133
0
0
null
2018-01-23T15:19:51
2018-01-23T15:19:51
null
UTF-8
Python
false
false
663
py
#!./bin/python # -*- coding: utf-8 -*- ##---------------------------------------------------------------------- ## noc-wf daemon ##---------------------------------------------------------------------- ## Copyright (C) 2007-2011 The NOC Project ## See LICENSE for details ##---------------------------------------------------------------------- if __name__ == "__main__": from noc.wf.wf.daemon import WFDaemon from noc.lib.debug import error_report from noc.main.models import CustomField CustomField.install_fields() try: WFDaemon().process_command() except SystemExit: pass except Exception: error_report()
d3905ca9265658e5bf4b7a91a378ed0ea340b520
ac5e52a3fc52dde58d208746cddabef2e378119e
/exps-gsn-edf/gsn-edf_ut=3.0_rd=1_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=82/sched.py
304905f0cc9f12230fa3ed58eca351b59ad910a9
[]
no_license
ricardobtxr/experiment-scripts
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
7bcebff7ac2f2822423f211f1162cd017a18babb
refs/heads/master
2023-04-09T02:37:41.466794
2021-04-25T03:27:16
2021-04-25T03:27:16
358,926,457
0
0
null
null
null
null
UTF-8
Python
false
false
337
py
-X FMLP -Q 0 -L 2 105 400 -X FMLP -Q 0 -L 2 85 250 -X FMLP -Q 0 -L 2 70 250 -X FMLP -Q 1 -L 2 66 200 -X FMLP -Q 1 -L 2 64 250 -X FMLP -Q 1 -L 2 50 200 -X FMLP -Q 2 -L 1 41 150 -X FMLP -Q 2 -L 1 40 125 -X FMLP -Q 2 -L 1 34 100 -X FMLP -Q 3 -L 1 33 200 -X FMLP -Q 3 -L 1 20 250 -X FMLP -Q 3 -L 1 10 100
ade4325ffae0867072eb07d5294917e637b30a23
de4d26a724b966ca8d0b95ec3063b5b784129028
/UserData/UserApp/migrations/0002_auto_20190402_0505.py
cc02790701761a7d0486f6803b359929ae666412
[]
no_license
ChetanKoranga/UserRESTapi
88904a326a093842ad68628eed98ea5ca2a95de0
11342bef21be163c4faf79744e90e9848e3a89bf
refs/heads/master
2020-05-04T00:01:22.998117
2019-04-02T05:51:18
2019-04-02T05:51:18
178,876,580
0
0
null
null
null
null
UTF-8
Python
false
false
373
py
# Generated by Django 2.2 on 2019-04-02 05:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('UserApp', '0001_initial'), ] operations = [ migrations.AlterField( model_name='usermodel', name='zip', field=models.CharField(max_length=10), ), ]
a003a04f25ae531bcff5fcc6b77658dab1d893f8
ca82e3c6084e697ecbdbf32d96c08293c5540287
/courses/python_data_structures_linked_lists/Exercise Files/Ch05/05_01/End/dll.py
50cac5b09fdcbb4bdfd6e43e8d6640dcd496bb4e
[]
no_license
bheki-maenetja/small-projects-py
8c8b35444ff2ecef7ad77e709392a9c860967ecc
18504d2e1f1ea48b612a4e469828682f426c9704
refs/heads/master
2023-08-17T00:38:06.208787
2023-08-16T16:25:22
2023-08-16T16:25:22
131,871,876
1
0
null
2023-08-14T23:44:23
2018-05-02T15:37:58
Python
UTF-8
Python
false
false
1,390
py
class DLLNode: def __init__(self, data): self.data = data self.next = None self.previous = None def __repr__(self): return "DLLNode object: data={}".format(self.data) def get_data(self): """Return the self.data attribute.""" return self.data def set_data(self, new_data): """Replace the existing value of the self.data attribute with new_data parameter.""" self.data = new_data def get_next(self): """Return the self.next attribute""" return self.next def set_next(self, new_next): """Replace the existing value of the self.next attribute with new_next parameter.""" self.next = new_next def get_previous(self): """Return the self.previous attribute""" return self.previous def set_previous(self, new_previous): """Replace the existing value of the self.previous attribute with new_previous parameter.""" self.previous = new_previous class DLL: def __init__(self): self.head = None def __repr__(self): return "<DLL object: head=>".format(self.head) def is_empty(self): return self.head is None def size(self): pass def search(self, data): pass def add_front(self, data): pass def remove(self, data): pass
f3c5d20d29dd9b88627ce522e66785298e8855f1
498fcf34fa4482be5c9fefc488666e60edcf46c7
/supervised_learning/0x08-deep_cnns/6-transition_layer.py
b1f56c159fcbde725fe51e00dbf6f594f96be8dd
[]
no_license
MansourKef/holbertonschool-machine_learning
7dbc465def04c311c1afb0e8b8903cbe34c72ad3
19f78fc09f0ebeb9f27f3f76b98e7a0e9212fd22
refs/heads/main
2023-03-12T16:18:08.919099
2021-03-05T09:42:09
2021-03-05T09:42:09
317,303,125
0
0
null
null
null
null
UTF-8
Python
false
false
659
py
#!/usr/bin/env python3 """module""" import tensorflow.keras as K def transition_layer(X, nb_filters, compression): """function""" BN1 = K.layers.BatchNormalization(axis=3)(X) Relu1 = K.layers.Activation("relu")(BN1) conv1 = K.layers.Conv2D(int(compression * nb_filters), kernel_size=(1, 1), padding="same", kernel_initializer="he_normal", strides=(1, 1))(Relu1) pool5 = K.layers.AveragePooling2D(pool_size=(2, 2), strides=(2, 2))(conv1) return pool5, int(compression * nb_filters)
270b750136f37b35a8ec6301de7546fe80dc514e
8186514b510a801863229e3f9711c0c657e727e5
/assembly/qtable/qlist_q.py
c4d46f59661410f1d3c06c6df3d6c2b23370a997
[]
no_license
masknugget/mypyqt
274b2cbbf66c04927453815248f9c1bc5e65ca17
b86a49e4b8c7c8c3d8546ce1b49f8f3bb6332307
refs/heads/main
2023-08-17T13:30:11.451066
2021-09-27T14:14:54
2021-09-27T14:14:54
355,904,935
0
0
null
null
null
null
UTF-8
Python
false
false
2,241
py
# 自定义控件--实现了一个带全选功能的复选框 import sys from PyQt5.QtWidgets import QApplication, QListWidget, QCheckBox, QListWidgetItem from PyQt5.QtCore import Qt class FilteredList(QListWidget): # 继承自列表控件 def __init__(self, textList, parent=None): super().__init__(parent) self.selectAll_ch = QCheckBox("全选(selectAll)") self.selectAll_ch.setCheckState(Qt.Checked) self.selectAll_ch.stateChanged[int].connect(self.on_selectAll) # item = QListWidgetItem(self) self.setItemWidget(item, self.selectAll_ch) # 列表控件的项设为 QCheckBox self.dict = dict() self.boxes = set() for index, text in enumerate(textList): ch = QCheckBox(text) ch.setCheckState(Qt.Unchecked) ch.stateChanged[int].connect(self.on_stateChanged) # item.setCheckState(Qt.Unchecked)# item = QListWidgetItem(self) self.setItemWidget(item, ch) self.boxes.add(ch) self.dict[index] = ch def on_selectAll(self, state): if state == 2: for ch in self.boxes: ch.setCheckState(2) if state == 0: for ch in self.boxes: ch.setCheckState(0) def on_stateChanged(self, state): ch = self.sender() if state: if len([ch for ch in self.boxes if ch.checkState()]) == self.count() - 1: # 0 不选中, 1 部分选中,2 全选中 #Qt.Unchecked #Qt.PartiallyChecked #Qt.Checked self.selectAll_ch.setCheckState(2) else: self.selectAll_ch.setCheckState(1) else: if len([k for k in self.boxes if k.checkState()]): self.selectAll_ch.setCheckState(1) else: self.selectAll_ch.setCheckState(0) def keyPressEvent(self, event): # Ctrl+A 全选 if event.modifiers() & Qt.ControlModifier and event.key() == Qt.Key_A: self.selectAll_ch.setCheckState(2) if __name__ == '__main__': app = QApplication(sys.argv) myList = FilteredList(textList=["a", "b", "c", "d"]) myList.show() sys.exit(app.exec_())
12655a75caf61802783410d883ae5ec5680cefe5
b77cc1448ae2c68589c5ee24e1a0b1e53499e606
/asset/migrations/0005_auto_20171026_1532.py
eb4e2ea65956f0a359a6c7516eb7dbb444b94e2a
[]
no_license
PregTech-c/Hrp_system
a5514cf6b4c778bf7cc58e8a6e8120ac7048a0a7
11d8dd3221497c536dd7df9028b9991632055b21
refs/heads/master
2022-10-09T07:54:49.538270
2018-08-21T11:12:04
2018-08-21T11:12:04
145,424,954
1
1
null
2022-10-01T09:48:53
2018-08-20T13:58:31
JavaScript
UTF-8
Python
false
false
664
py
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-10-26 12:32 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('asset', '0004_auto_20171022_1404'), ] operations = [ migrations.AddField( model_name='asset', name='model', field=models.CharField(default='test', max_length=32), preserve_default=False, ), migrations.AlterField( model_name='asset', name='description', field=models.CharField(max_length=256, null=True), ), ]
33282c89da89f060278ed17e50013ffdb1f88707
455c1cec4101254a0b7f50349e915411033a0af1
/supervised_learning/0x00-binary_classification/9-neural_network.py
5f65dc0fea7fe410b59fbce3194f1ddcd97e815b
[]
no_license
Daransoto/holbertonschool-machine_learning
30c9f2753463d57cac87f245b77c8d6655351e75
1e7cd1589e6e4896ee48a24b9ca85595e16e929d
refs/heads/master
2021-03-10T14:32:09.419389
2020-10-23T19:47:31
2020-10-23T19:47:31
246,461,514
0
1
null
null
null
null
UTF-8
Python
false
false
1,290
py
#!/usr/bin/env python3 """ Creates a neural network. """ import numpy as np class NeuralNetwork: """ Neural network class. """ def __init__(self, nx, nodes): """ Initializer for the neural network. """ if type(nx) != int: raise TypeError('nx must be an integer') if nx < 1: raise ValueError('nx must be a positive integer') if type(nodes) != int: raise TypeError('nodes must be an integer') if nodes < 1: raise ValueError('nodes must be a positive integer') self.__W1 = np.random.randn(nodes, nx) self.__b1 = np.zeros((nodes, 1)) self.__A1 = 0 self.__W2 = np.random.randn(1, nodes) self.__b2 = 0 self.__A2 = 0 @property def W1(self): """ Getter for W1. """ return self.__W1 @property def b1(self): """ Getter for b1. """ return self.__b1 @property def A1(self): """ Getter for A1. """ return self.__A1 @property def W2(self): """ Getter for W2. """ return self.__W2 @property def b2(self): """ Getter for b2. """ return self.__b2 @property def A2(self): """ Getter for A2. """ return self.__A2
f10d585c637387ccc269aab61ce295e13ab11663
321e58ab3e6b2385bb3549aaaefd56a58c2a51e7
/python/atpic/perf_postgres.py
3c2b1312c886a38a2fa3d9e62deeb883a4697fb5
[]
no_license
alexmadon/atpic_photosharing
7829118d032344bd9a67818cd50e2c27a228d028
9fdddeb78548dadf946b1951aea0d0632e979156
refs/heads/master
2020-06-02T15:00:29.282979
2017-06-12T17:09:52
2017-06-12T17:09:52
94,095,494
0
0
null
null
null
null
UTF-8
Python
false
false
1,408
py
import atpic.database import time import pycurl import StringIO import cStringIO time1=time.time() for i in range(1,100): print i con=atpic.database.connect() listofdict=atpic.database.query("select 1",con) con.close() time2=time.time() print "==========" con=atpic.database.connect() for i in range(1,100): print i query="select id from artist_pic where id='%i'" % i listofdict=atpic.database.query(query,con) con.close() time3=time.time() # using Solr + curl new curl handle each time (new socket) #fp=open("/dev/null","w") fp=cStringIO.StringIO() for i in range(1,100): print i url="http://localhost:8983/solr/select/?q=pid:%i&fl=pid" % i c=pycurl.Curl() # c.setopt(c.WRITEDATA,fp); c.setopt(c.WRITEFUNCTION, fp.write) c.setopt(c.URL, url); c.perform() c.close() # print data fp.close() time4=time.time() # using Solr + curl same curl handle c=pycurl.Curl() fp=cStringIO.StringIO() for i in range(1,100): print i #c.setopt(c.WRITEDATA,fp); url="http://localhost:8983/solr/select/?q=pid:%i&fl=pid" % i c.setopt(c.WRITEFUNCTION, fp.write) c.setopt(c.URL, url); c.perform() c.close() fp.close() time5=time.time() print "Time1 %s" % (time2-time1) print "Time2 %s" % (time3-time2) print "Ratio=%f" % ((time2-time1)/(time3-time2)) print "Time3 %s" % (time4-time3) print "Time4 %s" % (time5-time4)
06f952c695c3533ca0dd029f3e93895af5b02c59
5c8139f1e57e06c7eaf603bd8fe74d9f22620513
/PartB/py删除链表的倒数第n个节点的位置的值2.py
ab9093a8ca2755b9b1f62111641d210996e07d4a
[]
no_license
madeibao/PythonAlgorithm
c8a11d298617d1abb12a72461665583c6a44f9d2
b4c8a75e724a674812b8a38c0202485776445d89
refs/heads/master
2023-04-03T07:18:49.842063
2021-04-11T12:02:40
2021-04-11T12:02:40
325,269,130
0
0
null
null
null
null
UTF-8
Python
false
false
915
py
# 把一个链表的倒数的第n个节点来进行删除。 class ListNode(object): def __init__(self, x): self.val = x self.next = None class Solution(object): def remove(self, head, n): dummy = ListNode(-1) dummy.next = head slow = dummy fast = dummy for i in range(n): fast = fast.next while fast and fast.next: fast = fast.next slow = slow.next slow.next = slow.next.next return dummy.next if __name__ == "__main__": s = Solution() n1 = ListNode(1) n2 = ListNode(2) n3 = ListNode(3) n4 = ListNode(4) n5 = ListNode(5) n6 = ListNode(6) n1.next = n2 n2.next = n3 n3.next = n4 n4.next = n5 n5.next = n6 n6.next = None k = 2 res = s.remove(n1, k) while res: print(res.val, end="->") res = res.next
9ac60f6dc3755d4c8f3c20fd4d1cd54718994a90
2faf152deabb0476ac43d4754f3b529fd678a36d
/ch_18.py
3d923149df97df02941390334db1bf1ff1f74392
[]
no_license
Sakartu/matasano
46cba1325a01c41f6272f80b9fa698c6338c2e50
b42e5a2ce5daa2fcc6691873e995a4b0d05e03d2
refs/heads/master
2021-01-23T09:51:50.305296
2015-08-10T15:37:59
2015-08-10T15:37:59
32,535,769
0
0
null
null
null
null
UTF-8
Python
false
false
542
py
#!/usr/bin/env python3 # -*- coding: utf8 -*- """ Usage: test_ctr.py """ import base64 import util __author__ = 'peter' def main(): test = base64.b64decode('L77na/nrFsKvynd6HzOoG7GHTLXsTVu9qvY/2syLXzhPweyyMTJULu/6/kXX0KSvoOLSFQ==') assert util.aes_ctr_decrypt(test, b"YELLOW SUBMARINE") == b"Yo, VIP Let's kick it Ice, Ice, baby Ice, Ice, baby " k = util.get_random_bytes(16) m = b'This is an interesting message' assert util.aes_ctr_decrypt(util.aes_ctr_encrypt(m, k), k) == m if __name__ == '__main__': main()