Dataset Viewer
blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
af5ee455cb7393efd56233ca1556032ce3b6435c | 4c68778814b938d91d184749b50940549439c0f3 | /scheme/fields/time.py | fe6e0bb58b391be8c8074c6fe7792ac82fede471 | [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jordanm/scheme | 96a747258ce68de756ffe7996b37c3e8747a740c | 5a87e24b35bb2f80b474273bf2e5c5fd563214e0 | refs/heads/master | 2021-01-17T05:48:51.479000 | 2020-01-20T16:03:28 | 2020-01-20T16:03:28 | 32,604,302 | 8 | 4 | NOASSERTION | 2020-01-20T16:03:29 | 2015-03-20T20:05:12 | Python | UTF-8 | Python | false | false | 3,174 | py | from __future__ import absolute_import
from datetime import time
from time import strptime
from scheme.exceptions import *
from scheme.field import *
__all__ = ('Time',)
class Time(Field):
"""A field for time values."""
basetype = 'time'
equivalent = time
parameters = {'maximum': None, 'minimum': None}
pattern = '%H:%M:%S'
errors = [
FieldError('invalid', 'invalid value', '%(field)s must be a time value'),
FieldError('minimum', 'minimum value', '%(field)s must not occur before %(minimum)s'),
FieldError('maximum', 'maximum value', '%(field)s must not occur after %(maximum)s'),
]
def __init__(self, minimum=None, maximum=None, **params):
super(Time, self).__init__(**params)
if maximum is not None:
try:
maximum = self._unserialize_value(maximum)
except InvalidTypeError:
raise TypeError("argument 'maximum' must be either None, a datetime.time,"
" or a string in the format 'HH:MM:SS'")
if minimum is not None:
try:
minimum = self._unserialize_value(minimum)
except InvalidTypeError:
raise TypeError("argument 'minimum' must be either None, a datetime.time,"
" or a string in the format 'HH:MM:SS'")
self.maximum = maximum
self.minimum = minimum
def __repr__(self):
aspects = []
if self.minimum is not None:
aspects.append('minimum=%r' % self.minimum)
if self.maximum is not None:
aspects.append('maximum=%r' % self.maximum)
return super(Time, self).__repr__(aspects)
def describe(self, parameters=None, verbose=False):
params = {}
if self.maximum is not None:
params['maximum'] = self.maximum.strftime(self.pattern)
if self.minimum is not None:
params['minimum'] = self.minimum.strftime(self.pattern)
return super(Time, self).describe(parameters=parameters, verbose=verbose, **params)
def _serialize_value(self, value):
return value.strftime(self.pattern)
def _unserialize_value(self, value, ancestry=None):
if isinstance(value, time):
return value
try:
return time(*strptime(value, self.pattern)[3:6])
except Exception:
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
def _validate_value(self, value, ancestry):
if not isinstance(value, time):
raise InvalidTypeError(identity=ancestry, field=self,
value=value).construct('invalid')
minimum = self.minimum
if minimum is not None and value < minimum:
raise ValidationError(identity=ancestry, field=self, value=value).construct('minimum',
minimum=minimum.strftime(self.pattern))
maximum = self.maximum
if maximum is not None and value > maximum:
raise ValidationError(identity=ancestry, field=self, value=value).construct('maximum',
maximum=maximum.strftime(self.pattern))
| [
"[email protected]"
] | |
48cd42cf70cd98648276cce423fd29d9850f9d0a | f2ab8ccda7203dd37d61facb9978cf74b781c7f1 | /tests/apps.py | 863cf58e139c91b4d865bed2d8a46b94a061f588 | [
"MIT"
] | permissive | Apkawa/easy-thumbnails-admin | 1991137224dcd117520b2c114d4012daf803776e | 9d7a38f215cdac53a663b00f1d4ff3a3c2a54eb4 | refs/heads/master | 2021-01-01T15:47:34.334000 | 2017-11-23T10:38:09 | 2017-11-23T10:38:09 | 97,703,157 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 274 | py | try:
from django.apps import AppConfig
except ImportError:
# Early Django versions import everything in test, avoid the failure due to
# AppConfig only existing in 1.7+
AppConfig = object
class TestConfig(AppConfig):
name = 'tests'
label = 'tests'
| [
"[email protected]"
] | |
eed58a6b703faab6b504f4b3a66b4de43ae04f0a | e75521f26a9a6fdbd0b9dbe396b14a5f3c1af305 | /src/repositories/word_classifier_repository.py | 10cf90739a261923161b283cb2b1127ab1de82cd | [] | no_license | Ap3lsin4k/words-as-part-of-speech | 2636edb87d309d44d3d18add14aadd13f7810507 | e7f35d56d65a8f5033498f650265cadbd742a9de | refs/heads/master | 2023-01-31T19:01:11.007000 | 2020-12-15T10:57:20 | 2020-12-15T10:57:20 | 320,807,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,258 | py | from bookmark_entity import Bookmark
from language_entity import LanguageEntity
from repositories.dictionary_surfer_common import DictionarySurferRepository
class WordClassifierRepository(DictionarySurferRepository):
def __init__(self, dictionary_entity: LanguageEntity):
super().__init__(dictionary_entity)
def make_response_model(self, part_of_speech, input_word):
self.result = {part_of_speech: {}}
for category_of_property, properties in self.dictionary[part_of_speech].items():
bookmark = Bookmark(part_of_speech, category_of_property)
self.__classify_word_by_property(bookmark, input_word)
if len(self.result[part_of_speech]) == 0:
self.result = None
def __save_property_of_word_to_presentable_format(self, bookmark):
self.result[bookmark.get_part_of_speech()].update({bookmark.category_name: bookmark.property_name})
def __classify_word_by_property(self, bookmark, input_word):
for bookmark.property_name in self.dictionary.get_properties(bookmark):
words_tuple = self.dictionary.get_words_for_property(bookmark)
if input_word in words_tuple:
self.__save_property_of_word_to_presentable_format(bookmark) | [
"[email protected]"
] | |
f6fece3b5719a65008ae0fbe700a817b469a7a51 | e7eff96df8160d3c238bf38068c99c7b8bd3005b | /norman/web/frontend/crops.py | 08fa8b6415e718d05231de41cdbcfc0273dddb39 | [] | no_license | sumansai14/norman | 62c3760b47f15bb474786ac045efad5aff757b95 | 43a8c4e53830d57eb552c3ecb98bf2926c9d0457 | refs/heads/master | 2021-03-16T07:57:17.076000 | 2017-05-23T07:36:37 | 2017-05-23T07:36:37 | 92,188,183 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from norman.web.frontend.base import BaseAuthTemplateView
class OrganizationCropsListView(BaseAuthTemplateView):
template_name = 'norman/organization/crops_list.html'
| [
"[email protected]"
] | |
2cf1cde00eea109a46c3e5983b4906feef72866f | f0856e60a095ce99ec3497b3f27567803056ac60 | /keras2/keras66_gradient2.py | 0e0d0cc1f27912ef32b11753f760a7606dd315f8 | [] | no_license | hjuju/TF_Study-HAN | dcbac17ce8b8885f5fb7d7f554230c2948fda9ac | c0faf98380e7f220868ddf83a9aaacaa4ebd2c2a | refs/heads/main | 2023-09-04T09:13:33.212000 | 2021-10-27T08:00:49 | 2021-10-27T08:00:49 | 384,371,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | import numpy as np
import matplotlib.pyplot as plt
f = lambda x: x**2 - 4 * x + 6
gradient = lambda x: 2*x - 4 # f 미분 -> 미분한 값이 0이 되는 지점이 가장 낮은지점 -> 우리가 찾는 지점
x0 = 0.0
MaxIter = 20
learning_rate = 0.25
print("step\tx\tf(x)")
print("{:02d}\t{:6.5f}\t{:6.5f}".format(0, x0, f(x0)))
for i in range(MaxIter):
x1 = x0 - learning_rate * gradient(x0)
x0 = x1
print("{:02d}\t{:6.5f}\t{:6.5f}".format(i+1, x0, f(x0)))
| [
"[email protected]"
] | |
fb2c64c0218df858e821204c4c485f29f4b33c74 | e0527bce5c53a196752d3a16adf50cb60754de5f | /10-How to Stop Programs Crashing Demos/3-is_square.py | 8bf01fcece7fa35279f95d25ece62fa140398965 | [] | no_license | ARWA-ALraddadi/python-tutorial-for-beginners | ddeb657f419fbc176bea273bc9fb6b88d1894191 | 21cedfc47871ca4d25c2382464c60ab0a2121205 | refs/heads/master | 2023-06-30T20:24:30.688000 | 2021-08-08T08:22:29 | 2021-08-08T08:22:29 | 193,094,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,066 | py | ################################################################
##
## As a demonstration of a function which applies defensive
## programming in different ways, consider a predicate
## which is intended to return True if a given natural
## number (i.e., a non-negative integer) is a square of
## another natural number.
##
## From this description the function could be "misused" in
## three ways:
##
## 1) It could be given a negative number.
## 2) It could be given a floating point number.
## 3) It could be given a value which is not a number at
## all.
##
## By adding some "defensive" code we can make a naive
## implementation more robust by responding appropriately
## to each of these cases:
##
## 1) A negative number can never be a square of another
## number, so we can always return False in this case.
## Here we choose to do so "silently", not drawing
## attention to the unexpected value at all, since the
## answer returned is still "correct" mathematically.
## 2) A positive floating point number could be a square of
## a natural number so, even though we're not required
## to handle floating point numbers we can still do so,
## but choose to generate a "warning" message in this
## case.
## 3) If the function is given a non-numerical value it
## is reasonable to assume that something is seriously
## wrong with the calling code, so in this case we
## generate an "error" message and return the special
## value None.
#---------------------------------------------------------
# Return True if the given natural number is the square of
# some other natural number
def is_square(natural_number):
from math import sqrt
# Three "defensive" checks follow
## # Check that the parameter is a number
## if not (isinstance(natural_number, int) or isinstance(natural_number, float)):
## print('ERROR - parameter must be numeric, given:', repr(natural_number))
## return None
##
## # Check that the parameter is positive
## if natural_number < 0:
## return False
##
## # Check that the parameter is a natural number
## if isinstance(natural_number, float):
## print('Warning - expected natural, given float:', natural_number)
# Return True if the number's square root is a whole number
return sqrt(natural_number) % 1 == 0
#---------------------------------------------------------
# Some tests
#
# The first of these tests is a "valid" one, but the remaining
# three all provide unexpected inputs. Uncommenting the
# "defensive" checks above will cause the function to respond
# appropriately. (It will crash until the defensive code is
# uncommented. Why?)
print(is_square(36)) # expected input
print()
print(is_square(-1)) # unexpected input, but handled silently
print()
print(is_square(225.0)) # unexpected input, handled with warning
print()
print(is_square('August')) # unexpected input, handled as an error
| [
"[email protected]"
] | |
7bbfd94accf83c65ae4546356bccb460b15a900e | b8ea631aae5d132c7b0236684d5f7c12d3c222be | /Library/Graph/Dijkstra_heapq.py | 6164198b7fcd573492928ce2f82d98e051b23864 | [] | no_license | Ryushi-tech/card3 | 68c429313142e58d4722a1cd5a4acc4ab39ca41f | 883636b2f518e38343a12816c5c641b60a87c098 | refs/heads/master | 2021-07-05T22:46:33.089000 | 2020-12-12T15:31:00 | 2020-12-12T15:31:00 | 209,176,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 708 | py | import heapq
def dijkstra(s):
q = []
dist[s] = 0
heapq.heappush(q, [0, s])
while q:
p, v = heapq.heappop(q)
if dist[v] < p:
continue
for i, x in g[v]:
if dist[i] > dist[v] + x:
dist[i] = dist[v] + x
heapq.heappush(q, [dist[i], i])
n = int(input())
g = [[] for _ in range(n)]
for _ in range(n - 1):
a, b, c = map(int, input().split())
a, b = a - 1, b - 1
g[a].append((b, c))
g[b].append((a, c))
inf = 10 ** 14
dist = [inf] * n
m, k = map(int, input().split())
k = k - 1
dijkstra(k)
for _ in range(m):
e, f = map(int, input().split())
res = dist[e - 1] + dist[f - 1]
print(res)
| [
"[email protected]"
] | |
c7e2d80388cbe425136e01a06bdb2ea24fa604c6 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_coadd.py | 39e21f206956741881cd664d37e0bb5ecdba667f | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723000 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[248.990167,34.240833], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_sdssj9-10_163557.64+341427.0/sdB_sdssj9-10_163557.64+341427.0_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
bde86714c9e9dcc484f3f18212f3921c491fe222 | e50ba4cc303d4165bef9e2917103c084cfbe0e07 | /rating_app/migrations/0016_auto_20201129_1156.py | 25f2b5ff3130d55f5d492b5c185861041cf00086 | [
"MIT"
] | permissive | Antony-me/Ratemyapp | 09049fce54d3a3ed2b256970e7840d20942e8c84 | e547fea82439a3e4f83aa78bf16f93b1ea9ab00b | refs/heads/main | 2023-01-28T16:52:58.635000 | 2020-12-01T16:49:07 | 2020-12-01T16:49:07 | 316,425,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | # Generated by Django 3.1.3 on 2020-11-29 11:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rating_app', '0015_profilemerch'),
]
operations = [
migrations.AlterField(
model_name='profilemerch',
name='projects',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rating_app.post'),
),
]
| [
"[email protected]"
] | |
f82a7850addf3773f1ce92a89e4d51f96cf3f763 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2_neat/16_0_2_tkdkop_pancake.py | 259ec04a68548d92ceed7f438162fc6b46baa760 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405000 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 286 | py | #!/usr/bin/env python
import sys
import itertools
m = sys.stdin.readline()
i = 0
for line in sys.stdin.readlines():
line = line.strip()
i += 1
out_str = "Case #%d: " % i
line += '+'
k = itertools.groupby(line)
out_str += str(len(list(k))-1)
print out_str
| [
"[[email protected]]"
] | |
cdaec89a7ecfa4ae8042bf31ac073b89b8a58072 | a3387fbcc918acb55d289ffb61b9fb603203dc11 | /Puzzles/2022-01/01_22_balanced_days.py | 924f5189761f280c72866b5565b743883fbda28e | [] | no_license | fbhs-cs/purdys-puzzles | 13e970ff909ff2e093b3b9d9777faac47c099913 | 1cf3f9c52677843fad781e46304e1485a91aae58 | refs/heads/master | 2023-08-17T06:28:06.659000 | 2023-08-09T14:45:43 | 2023-08-09T14:45:43 | 212,085,565 | 4 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | from math import ceil
def is_balanced(num):
n = str(num)
first = n[:ceil(len(n)/2)]
last = n[len(n)//2:]
#print(first,last)
if sum([int(x) for x in first]) == sum([int(x) for x in last]):
return True
else:
return False
def count_balanced(n):
count = 0
for i in range(1,n):
if is_balanced(i):
count += 1
return count
def sum_balanced(n):
total = 0
for i in range(1,n):
if is_balanced(i):
#print(i)
total += i
return total
def find_balanced_dates():
months = {1:31,2:28,3:31,4:30,5:31,6:30,
7:31,8:31,9:30,10:31,11:30,12:31}
count = 0
sum = 0
for month in range(1,13):
for day in range(1,months[month]+1):
day_num = str(month) + str(day) + '2022'
if is_balanced(int(day_num)):
count += 1
sum += int(day_num)
print(day_num)
print(count)
print(sum)
find_balanced_dates()
| [
"[email protected]"
] | |
9567422e1472a65046cf8160b1bdae8fbcf7dcd3 | 080c13cd91a073457bd9eddc2a3d13fc2e0e56ae | /MY_REPOS/awesome-4-new-developers/tensorflow-master/tensorflow/python/types/internal.py | c56c7aa6d7790b4c36d248603f2282e60af08a39 | [
"Apache-2.0"
] | permissive | Portfolio-Projects42/UsefulResourceRepo2.0 | 1dccc8961a09347f124d3ed7c27c6d73b9806189 | 75b1e23c757845b5f1894ebe53551a1cf759c6a3 | refs/heads/master | 2023-08-04T12:23:48.862000 | 2021-09-15T12:51:35 | 2021-09-15T12:51:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,129 | py | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Types internal to TensorFlow.
These types should not be exported. External code should not rely on these.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# TODO(mdan): Is this strictly needed? Only ops.py really uses it.
class NativeObject(object):
"""Types natively supported by various TF operations.
The most notable example of NativeObject is Tensor.
"""
| [
"[email protected]"
] | |
abc2e14c55f8110ca3d0bc1403c2b44d4e5fe36e | 026fee65b95206995baf1565f486ab4ed7f7cef9 | /userprofiles/admin.py | 89683d76fdacc00428bfbad69cc1e019d3f01b5e | [] | no_license | santhoshpkumar/pinclone | e8460aab355ebf3e5559d44127d7ccad22667747 | 8bf641df9a4999797731d1d2fb4ff3d78d717e10 | refs/heads/master | 2020-04-03T09:39:27.269000 | 2018-10-08T10:51:51 | 2018-10-08T10:51:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | from django.contrib import admin
from .models import Profile
# Register your models here.
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'bio', 'website', 'birth_date')
| [
"[email protected]"
] | |
28e7dee0700c6fe42c004b939fcaa2b9ff69d27e | eb64b799ff1d7ef3a244bf8e6f9f4e9118d5cfcd | /homeassistant/components/trafikverket_weatherstation/const.py | 7bb53dc5356a0b8a392104982912658806275659 | [
"Apache-2.0"
] | permissive | JeffLIrion/home-assistant | 53966b81b5d5816679f12fc761f79e8777c738d6 | 8f4ec89be6c2505d8a59eee44de335abe308ac9f | refs/heads/dev | 2023-08-22T09:42:02.399000 | 2022-02-16T01:26:13 | 2022-02-16T01:26:13 | 136,679,169 | 5 | 2 | Apache-2.0 | 2023-09-13T06:59:25 | 2018-06-09T00:58:35 | Python | UTF-8 | Python | false | false | 466 | py | """Adds constants for Trafikverket Weather integration."""
from homeassistant.const import Platform
DOMAIN = "trafikverket_weatherstation"
CONF_STATION = "station"
PLATFORMS = [Platform.SENSOR]
ATTRIBUTION = "Data provided by Trafikverket"
ATTR_MEASURE_TIME = "measure_time"
ATTR_ACTIVE = "active"
NONE_IS_ZERO_SENSORS = {
"air_temp",
"road_temp",
"wind_direction",
"wind_speed",
"wind_speed_max",
"humidity",
"precipitation_amount",
}
| [
"[email protected]"
] | |
7642072e77aebda4174a74cfe093db22e6377af7 | 7bd0954e956993df19d833810f9d71b60e2ebb9a | /phasor/utilities/ipynb/hdf.py | b9f7e5b1add89064ffd726859cfe27d4415619ec | [
"Apache-2.0"
] | permissive | aa158/phasor | 5ee0cec4f816b88b0a8ac298c330ed48458ec3f2 | fe86dc6dec3740d4b6be6b88d8eef8566e2aa78d | refs/heads/master | 2021-10-22T09:48:18.556000 | 2019-03-09T18:56:05 | 2019-03-09T18:56:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | # -*- coding: utf-8 -*-
"""
"""
from __future__ import division, print_function, unicode_literals
import h5py
from declarative.bunch.hdf_deep_bunch import HDFDeepBunch
| [
"[email protected]"
] | |
267f5e570bff6ec85a0e60de98259cea7422da0e | edb37da2fd2d2f048df119db96a6de58fc816ddb | /jumpserver-0.4/zrd/my_blog/article/views.py | 0634c5361e1cf968ac0e81b87ea55908e18fa6b5 | [] | no_license | cucy/2017 | 88f1aa2e8df945162d8259918cf61a138a3422cf | 33bcdd5c9e0717521544e3ea41ade10fbb325c4f | refs/heads/master | 2020-05-21T15:31:39.935000 | 2017-07-10T11:04:29 | 2017-07-10T11:04:29 | 84,629,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,766 | py | # coding:utf-8
from django.shortcuts import render
from django.shortcuts import render_to_response
# Create your views here.
from django.http import HttpResponse
from models import SSHInfo
# Create your views here.
try:
from ConfigParser import ConfigParser
except:
from configparser import ConfigParser
try:
import paramiko_client
except:
from . import paramiko_client
def home(request):
# 如果请求里有file
for key in request.FILES:
file = request.FILES[key]
config = ConfigParser() # 读取配置文件
config.readfp(file)
for section in config.sections():
print(section)
host_name = config.get(section, 'host_name')
host = config.get(section, 'host')
port = config.get(section, 'port')
usr = config.get(section, 'username')
pwd = config.get(section, 'password')
new_ssh, create = SSHInfo.objects.update_or_create(
host_name=host_name
, host=host
, port=port
, usr=usr
, pwd=pwd
)
new_ssh.save() # 保存配置信息到数据库
sshs = SSHInfo.objects.all() # 获取所有对象
if len(sshs) > 0:
return render_to_response('sshlist.html', {'sshs': sshs})
else:
return render_to_response('home_view.html')
def run_ssh_cmd(requset):
# 获取所有的信息
sshs = SSHInfo.objects.all()
cmd_res = {}
for ssh in sshs:
client = paramiko_client.ParamikoClient()
client.connect(ssh)
res = client.run_cmd('date') # 执行命令 接收返回
cmd_res[ssh.host_name] = res
return render_to_response('cmd_res.html', {'cmd_res': cmd_res})
| [
"[email protected]"
] | |
d3e3b20b1ce012f78bbc61c3eb7dc31075d016ca | c9094a4ed256260bc026514a00f93f0b09a5d60c | /tests/components/accuweather/test_system_health.py | 749f516e44c748caf05503460e8a72ec34d085d3 | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508000 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 1,785 | py | """Test AccuWeather system health."""
import asyncio
from unittest.mock import Mock
from aiohttp import ClientError
from homeassistant.components.accuweather.const import COORDINATOR, DOMAIN
from homeassistant.setup import async_setup_component
from tests.common import get_system_health_info
async def test_accuweather_system_health(hass, aioclient_mock):
"""Test AccuWeather system health."""
aioclient_mock.get("https://dataservice.accuweather.com/", text="")
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = {}
hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock(
accuweather=Mock(requests_remaining="42")
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info == {
"can_reach_server": "ok",
"remaining_requests": "42",
}
async def test_accuweather_system_health_fail(hass, aioclient_mock):
"""Test AccuWeather system health."""
aioclient_mock.get("https://dataservice.accuweather.com/", exc=ClientError)
hass.config.components.add(DOMAIN)
assert await async_setup_component(hass, "system_health", {})
hass.data[DOMAIN] = {}
hass.data[DOMAIN]["0123xyz"] = {}
hass.data[DOMAIN]["0123xyz"][COORDINATOR] = Mock(
accuweather=Mock(requests_remaining="0")
)
info = await get_system_health_info(hass, DOMAIN)
for key, val in info.items():
if asyncio.iscoroutine(val):
info[key] = await val
assert info == {
"can_reach_server": {"type": "failed", "error": "unreachable"},
"remaining_requests": "0",
}
| [
"[email protected]"
] | |
1b32ea37e4c7f6126f63d235f5bc196330d2dc7e | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /dimensions_of_motion/geometry.py | d7a317cb08a95e69785f8cd0af032ae5db8a1f29 | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502000 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 7,466 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
"""Functions for sampling and warping images.
We use texture coordinates to represent points and offsets in images. They go
from (0,0) in the top-left corner of an image to (1,1) in the bottom right. It
is convenient to work with these coordinates rather than counts of pixels,
because they are resolution-independent.
"""
import tensorflow as tf
import tensorflow_addons as tfa
import utils
def check_input_shape(name, tensor, axis, value):
"""Utility function for checking tensor shapes."""
shape = tensor.shape.as_list()
if shape[axis] != value:
raise ValueError('Input "%s": dimension %d should be %s. Shape = %s' %
(name, axis, value, shape))
def pixel_center_grid(height, width):
"""Produce a grid of (x,y) texture-coordinate pairs of pixel centers.
Args:
height: (integer) height, not a tensor
width: (integer) width, not a tensor
Returns:
A tensor of shape [height, width, 2] where each entry gives the (x,y)
texture coordinates of the corresponding pixel center. For example, for
pixel_center_grid(2, 3) the result is:
[[[1/6, 1/4], [3/6, 1/4], [5/6, 1/4]],
[[1/6, 3/4], [3/6, 3/4], [5/6, 3/4]]]
"""
height_float = tf.cast(height, dtype=tf.float32)
width_float = tf.cast(width, dtype=tf.float32)
ys = tf.linspace(0.5 / height_float, 1.0 - 0.5 / height_float, height)
xs = tf.linspace(0.5 / width_float, 1.0 - 0.5 / width_float, width)
xs, ys = tf.meshgrid(xs, ys)
grid = tf.stack([xs, ys], axis=-1)
assert grid.shape.as_list() == [height, width, 2]
return grid
def sample_image(image, coords):
"""Sample points from an image, using bilinear filtering.
Args:
image: [B0, ..., Bn-1, height, width, channels] image data
coords: [B0, ..., Bn-1, ..., 2] (x,y) texture coordinates
Returns:
[B0, ..., Bn-1, ..., channels] image data, in which each value is sampled
with bilinear interpolation from the image at position indicated by the
(x,y) texture coordinates. The image and coords parameters must have
matching batch dimensions B0, ..., Bn-1.
Raises:
ValueError: if shapes are incompatible.
"""
check_input_shape('coords', coords, -1, 2)
tfshape = tf.shape(image)[-3:-1]
height = tf.cast(tfshape[0], dtype=tf.float32)
width = tf.cast(tfshape[1], dtype=tf.float32)
# Resampler expects coordinates where (0,0) is the center of the top-left
# pixel and (width-1, height-1) is the center of the bottom-right pixel.
pixel_coords = coords * [width, height] - 0.5
# tfa.image.resampler only works with exactly one batch dimension, i.e. it
# expects image to be [batch, height, width, channels] and pixel_coords to be
# [batch, ..., 2]. So we need to reshape, perform the resampling, and then
# reshape back to what we had.
batch_dims = len(image.shape.as_list()) - 3
assert (image.shape.as_list()[:batch_dims] == pixel_coords.shape.as_list()
[:batch_dims])
batched_image, _ = utils.flatten_batch(image, batch_dims)
batched_coords, unflatten_coords = utils.flatten_batch(
pixel_coords, batch_dims)
resampled = tfa.image.resampler(batched_image, batched_coords)
# Convert back to the right shape to return
resampled = unflatten_coords(resampled)
return resampled
def bilinear_forward_warp(image, coords, weights=None):
"""Forward warp each point in an image using bilinear filtering.
This is a sort of reverse of sample_image, in the sense that scatter is the
reverse of gather. A new image is generated of the same size as the input, in
which each pixel has been splatted onto the 2x2 block containing the
corresponding coordinates, using bilinear weights (multiplied with the input
per-pixel weights, if supplied). Thus if two or more pixels warp to the same
point, the result will be a blend of the their values. If no pixels warp to a
location, the result at that location will be zero.
Args:
image: [B0, ..., Bn-1, height, width, channels] image data
coords: [B0, ..., Bn-1, height, width, 2] (x,y) texture coordinates
weights: [B0, ... ,Bn-1, height, width] weights for each point. If omitted,
all points are weighed equally. Use this to implement, for example, soft
z-buffering.
Returns:
[B0, ..., Bn-1, ..., channels] image data, in which each point in the
input image has been moved to the position indicated by the corresponding
(x,y) texture coordinates. The image and coords parameters must have
matching batch dimensions B0, ..., Bn-1.
"""
# Forward-warp computed using the gradient of reverse-warp. We use a dummy
# image of the right size for reverse-warping. An extra channel is used to
# accumulate the total weight for each pixel which we'll then divide by.
image_and_ones = tf.concat([image, tf.ones_like(image[Ellipsis, -1:])], axis=-1)
dummy = tf.zeros_like(image_and_ones)
if weights is None:
weighted_image = image_and_ones
else:
weighted_image = image_and_ones * weights[Ellipsis, tf.newaxis]
with tf.GradientTape(watch_accessed_variables=False) as g:
g.watch(dummy)
reverse = tf.reduce_sum(
sample_image(dummy, coords) * weighted_image, [-3, -2])
grads = g.gradient(reverse, dummy)
rgb = grads[Ellipsis, :-1]
total = grads[Ellipsis, -1:]
result = tf.math.divide_no_nan(rgb, total)
return result
def flow_warp(image, flow):
"""Warp images by resampling according to flow vectors.
Args:
image: [..., H, W, C] images
flow: [..., H, W, 2] (x, y) texture offsets
Returns:
[..., H, W, C] resampled images. Each pixel in each output image has been
bilinearly sampled from the corresponding pixel in its input image plus
the (x, y) flow vector. The flow vectors are texture coordinate offsets,
e.g. (1, 1) is an offset of the whole width and height of the image.
Sampling outside the image yields zero values.
"""
width = image.shape.as_list()[-2]
height = image.shape.as_list()[-3]
grid = pixel_center_grid(height, width)
coords = grid + flow
return sample_image(image, coords)
def flow_forward_warp(image, flow):
"""Forward-warp images according to flow vectors.
Args:
image: [..., H, W, C] images
flow: [..., H, W, 2] (x, y) texture offsets
Returns:
[..., H, W, C] warped images. Each pixel in each image is offset according
to the corresponding value in the flow, and splatted onto a 2x2 pixel block.
(See bilinear_forward_warp for details.) If no points warp to a location,
the result will be zero. The flow vectors are texture coordinate offsets,
e.g. (1, 1) is an offset of the whole width and height of the image.
"""
width = image.shape.as_list()[-2]
height = image.shape.as_list()[-3]
grid = pixel_center_grid(height, width)
coords = grid + flow
return bilinear_forward_warp(image, coords)
| [
"[email protected]"
] | |
cf4869a008091dac50e4e6d07bded0da84f85bb3 | 2bcf18252fa9144ece3e824834ac0e117ad0bdf3 | /zpt/trunk/site-packages/zpt/_pytz/zoneinfo/Asia/Ulan_Bator.py | 23ee14fe6b126706fac6097086cd541788e4110c | [
"MIT",
"ZPL-2.1"
] | permissive | chadwhitacre/public | 32f65ba8e35d38c69ed4d0edd333283a239c5e1d | 0c67fd7ec8bce1d8c56c7ff3506f31a99362b502 | refs/heads/master | 2021-05-10T14:32:03.016000 | 2010-05-13T18:24:20 | 2010-05-13T18:24:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,011 | py | '''tzinfo timezone information for Asia/Ulan_Bator.'''
from zpt._pytz.tzinfo import DstTzInfo
from zpt._pytz.tzinfo import memorized_datetime as d
from zpt._pytz.tzinfo import memorized_ttinfo as i
class Ulan_Bator(DstTzInfo):
'''Asia/Ulan_Bator timezone definition. See datetime.tzinfo for details'''
zone = 'Asia/Ulan_Bator'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1905,7,31,16,52,28),
d(1977,12,31,17,0,0),
d(1983,3,31,16,0,0),
d(1983,9,30,15,0,0),
d(1984,3,31,16,0,0),
d(1984,9,29,18,0,0),
d(1985,3,30,18,0,0),
d(1985,9,28,18,0,0),
d(1986,3,29,18,0,0),
d(1986,9,27,18,0,0),
d(1987,3,28,18,0,0),
d(1987,9,26,18,0,0),
d(1988,3,26,18,0,0),
d(1988,9,24,18,0,0),
d(1989,3,25,18,0,0),
d(1989,9,23,18,0,0),
d(1990,3,24,18,0,0),
d(1990,9,29,18,0,0),
d(1991,3,30,18,0,0),
d(1991,9,28,18,0,0),
d(1992,3,28,18,0,0),
d(1992,9,26,18,0,0),
d(1993,3,27,18,0,0),
d(1993,9,25,18,0,0),
d(1994,3,26,18,0,0),
d(1994,9,24,18,0,0),
d(1995,3,25,18,0,0),
d(1995,9,23,18,0,0),
d(1996,3,30,18,0,0),
d(1996,9,28,18,0,0),
d(1997,3,29,18,0,0),
d(1997,9,27,18,0,0),
d(1998,3,28,18,0,0),
d(1998,9,26,18,0,0),
d(2001,4,27,18,0,0),
d(2001,9,28,17,0,0),
d(2002,3,29,18,0,0),
d(2002,9,27,17,0,0),
d(2003,3,28,18,0,0),
d(2003,9,26,17,0,0),
d(2004,3,26,18,0,0),
d(2004,9,24,17,0,0),
d(2005,3,25,18,0,0),
d(2005,9,23,17,0,0),
d(2006,3,24,18,0,0),
d(2006,9,29,17,0,0),
d(2007,3,30,18,0,0),
d(2007,9,28,17,0,0),
d(2008,3,28,18,0,0),
d(2008,9,26,17,0,0),
d(2009,3,27,18,0,0),
d(2009,9,25,17,0,0),
d(2010,3,26,18,0,0),
d(2010,9,24,17,0,0),
d(2011,3,25,18,0,0),
d(2011,9,23,17,0,0),
d(2012,3,30,18,0,0),
d(2012,9,28,17,0,0),
d(2013,3,29,18,0,0),
d(2013,9,27,17,0,0),
d(2014,3,28,18,0,0),
d(2014,9,26,17,0,0),
d(2015,3,27,18,0,0),
d(2015,9,25,17,0,0),
d(2016,3,25,18,0,0),
d(2016,9,23,17,0,0),
d(2017,3,24,18,0,0),
d(2017,9,29,17,0,0),
d(2018,3,30,18,0,0),
d(2018,9,28,17,0,0),
d(2019,3,29,18,0,0),
d(2019,9,27,17,0,0),
d(2020,3,27,18,0,0),
d(2020,9,25,17,0,0),
d(2021,3,26,18,0,0),
d(2021,9,24,17,0,0),
d(2022,3,25,18,0,0),
d(2022,9,23,17,0,0),
d(2023,3,24,18,0,0),
d(2023,9,29,17,0,0),
d(2024,3,29,18,0,0),
d(2024,9,27,17,0,0),
d(2025,3,28,18,0,0),
d(2025,9,26,17,0,0),
d(2026,3,27,18,0,0),
d(2026,9,25,17,0,0),
d(2027,3,26,18,0,0),
d(2027,9,24,17,0,0),
d(2028,3,24,18,0,0),
d(2028,9,29,17,0,0),
d(2029,3,30,18,0,0),
d(2029,9,28,17,0,0),
d(2030,3,29,18,0,0),
d(2030,9,27,17,0,0),
d(2031,3,28,18,0,0),
d(2031,9,26,17,0,0),
d(2032,3,26,18,0,0),
d(2032,9,24,17,0,0),
d(2033,3,25,18,0,0),
d(2033,9,23,17,0,0),
d(2034,3,24,18,0,0),
d(2034,9,29,17,0,0),
d(2035,3,30,18,0,0),
d(2035,9,28,17,0,0),
d(2036,3,28,18,0,0),
d(2036,9,26,17,0,0),
d(2037,3,27,18,0,0),
d(2037,9,25,17,0,0),
]
_transition_info = [
i(25680,0,'LMT'),
i(25200,0,'ULAT'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
i(32400,3600,'ULAST'),
i(28800,0,'ULAT'),
]
Ulan_Bator = Ulan_Bator()
| [
"[email protected]"
] | |
85ef73de5c1fceffd5aff452e2b9902d1718602f | 5ca6730fa1178582d5f5875155f340ec0f406294 | /practice_problem-16.py | 44785ae4df282d5b7cc6f83173866d825eb41375 | [] | no_license | MahadiRahman262523/Python_Code_Part-1 | 9740d5ead27209d69af4497eea410f2faef50ff3 | e2f08e3d0564a003400743ae6050fd687c280639 | refs/heads/main | 2023-07-25T09:10:53.649000 | 2021-09-05T19:39:14 | 2021-09-05T19:39:14 | 403,396,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | # Write a program to count the number of zeros in the following tuple:
# a = (7,0,8,0,0,9)
a = (7,0,8,0,0,9)
print(a.count(0)) | [
"[email protected]"
] | |
1b406b2dc38004db14248af19fb7f7be9b8e7f6c | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/BuildLinks1.10/test_input/CJ_16_1/16_1_1_FreeTShirt_a.py | 0207b362ff64f55d6e7a49f758c368374d2c5dc1 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405000 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 404 | py | def argmax(s):
z = max(s)
return [(idx, c) for idx, c in enumerate(s) if c == z]
def last(s):
if len(s) <= 1:
return s
return max([s[idx]+last(s[:idx])+s[idx+1:] for idx, c in argmax(s)])
fw = open('a-o', 'w')
for idx, line in enumerate(open('A-small-i')):
if idx == 0:
continue
s = line.strip()
print(s)
fw.write('Case #{0}: {1}\n'.format(idx,last(s)))
| [
"[[email protected]]"
] | |
8732c9af3fea83ea57fa51e58d56b098749760f6 | 6561baa7ca68875e62fbf2d20c7887e4aadebe9f | /tests/cds_test_20_sf_ukmo.py | efa292077e335becd6970c33d7b3c44900ea5f35 | [
"Apache-2.0"
] | permissive | EXWEXs/cfgrib | 9057c9e5abbc38a32f113f832f1506988839ee82 | 8a1727af2c3bbcf2e17f250dfafcb4cc4e959354 | refs/heads/master | 2020-04-01T15:44:45.140000 | 2018-10-14T14:39:13 | 2018-10-14T14:39:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,089 | py |
import pytest
import cfgrib
import cdscommon
TEST_FILES = {
'seasonal-original-single-levels-ukmo': [
'seasonal-original-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'year': '2018',
'month': ['04', '05'],
'day': [
'01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',
'25', '26', '27', '28', '29', '30', '31'
],
'leadtime_hour': ['24', '48'],
'grid': ['3', '3'],
'format': 'grib',
},
192,
],
'seasonal-original-pressure-levels-ukmo': [
'seasonal-original-pressure-levels',
{
'originating_centre': 'ukmo',
'variable': 'temperature',
'pressure_level': ['500', '850'],
'year': '2018',
'month': ['04', '05'],
'day': [
'01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12',
'13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24',
'25', '26', '27', '28', '29', '30', '31'
],
'leadtime_hour': ['24', '48'],
'grid': ['3', '3'],
'format': 'grib',
},
192,
],
'seasonal-postprocessed-single-levels-ukmo': [
'seasonal-postprocessed-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours_anomaly',
'product_type': 'monthly_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-monthly_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'monthly_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-ensemble_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'ensemble_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
'seasonal-monthly-single-levels-hindcast_climate_mean-ukmo': [
'seasonal-monthly-single-levels',
{
'originating_centre': 'ukmo',
'variable': 'maximum_2m_temperature_in_the_last_24_hours',
'product_type': 'hindcast_climate_mean',
'year': '2018',
'month': ['04', '05'],
'leadtime_month': ['1', '2'],
'grid': ['3', '3'],
'format': 'grib',
},
210,
],
}
@pytest.mark.parametrize('test_file', TEST_FILES.keys())
def test_reanalysis_Stream(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
stream = cfgrib.FileStream(path)
leader = stream.first()
assert len(leader) == key_count
assert sum(1 for _ in stream) == leader['count']
@pytest.mark.parametrize('test_file', TEST_FILES.keys())
def test_reanalysis_Dataset(test_file):
dataset, request, key_count = TEST_FILES[test_file]
path = cdscommon.ensure_data(dataset, request, name='cds-' + test_file + '-{uuid}.grib')
res = cfgrib.xarray_store.open_dataset(path, flavour_name='cds')
res.to_netcdf(path[:-5] + '.nc')
| [
"[email protected]"
] | |
5e0bde2a16193651c22bf50efd429a326bf6f474 | 6b564e24a99b2d2c6a384d8674974f10ef9461d5 | /iptv_proxy/providers/crystalclear/data_model.py | 53c6ad0d72865ecf54ed3413a6d9df1d667e4c12 | [
"MIT"
] | permissive | Onemars/IPTVProxy | 1c1421c6962c1f7cf4cef90d8a2c98e98f5ded25 | 06d5472f49ecaa7eafb90832a1c9ac85a09cd268 | refs/heads/master | 2020-05-24T14:34:48.486000 | 2019-05-17T14:17:21 | 2019-05-17T14:17:21 | 187,311,948 | 1 | 0 | null | 2019-05-18T03:58:48 | 2019-05-18T03:58:47 | null | UTF-8 | Python | false | false | 6,858 | py | import logging
from sqlalchemy import Column
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import String
from sqlalchemy.ext.hybrid import hybrid_property
from iptv_proxy.data_model import DateTimeUTC
from iptv_proxy.providers.crystalclear.constants import CrystalClearConstants
from iptv_proxy.providers.crystalclear.db import Base
logger = logging.getLogger(__name__)
class CrystalClearChannel(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'channel'
_id = Column('id', String, primary_key=True, autoincrement=False)
_m3u8_group = Column('m3u8_group', String, nullable=False)
_number = Column('number', Integer, nullable=False)
_name = Column('name', String, nullable=False)
_pickle = Column('pickle', LargeBinary, nullable=False)
_complete_xmltv = Column('complete_xmltv', String, nullable=False)
_minimal_xmltv = Column('minimal_xmltv', String, nullable=False)
__table_args__ = (Index('{0}_channel_ix_id'.format(_provider_name), _id.asc()),
Index('{0}_channel_ix_m3u8_group'.format(_provider_name), _m3u8_group.asc()),
Index('{0}_channel_ix_m3u8_group_&_number'.format(_provider_name),
_m3u8_group.asc(),
_number.asc()),
Index('{0}_channel_ix_number'.format(_provider_name), _number.asc()))
def __init__(self, id_, m3u8_group, number, name, pickle, complete_xmltv, minimal_xmltv):
self._id = id_
self._m3u8_group = m3u8_group
self._number = number
self._name = name
self._pickle = pickle
self._complete_xmltv = complete_xmltv
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def complete_xmltv(self):
return self._complete_xmltv
@complete_xmltv.setter
def complete_xmltv(self, complete_xmltv):
self._complete_xmltv = complete_xmltv
@hybrid_property
def id(self):
return self._id
@id.setter
def id(self, id_):
self._id = id_
@hybrid_property
def m3u8_group(self):
return self._m3u8_group
@m3u8_group.setter
def m3u8_group(self, m3u8_group):
self._m3u8_group = m3u8_group
@hybrid_property
def minimal_xmltv(self):
return self._minimal_xmltv
@minimal_xmltv.setter
def minimal_xmltv(self, minimal_xmltv):
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@hybrid_property
def number(self):
return self._number
@number.setter
def number(self, number):
self._number = number
@hybrid_property
def pickle(self):
return self._pickle
@pickle.setter
def pickle(self, pickle):
self._pickle = pickle
class CrystalClearProgram(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'program'
_id = Column('id', String, primary_key=True, autoincrement=False)
_start = Column('start', DateTimeUTC(timezone=True), nullable=False)
_stop = Column('stop', DateTimeUTC(timezone=True), nullable=False)
_channel_xmltv_id = Column('channel_xmltv_id', String, nullable=False)
_channel_number = Column('channel_number', Integer, nullable=False)
_pickle = Column('pickle', LargeBinary, nullable=False)
_complete_xmltv = Column('complete_xmltv', String, nullable=False)
_minimal_xmltv = Column('minimal_xmltv', String, nullable=False)
__table_args__ = (
Index('{0}_program_ix_id'.format(_provider_name), _id.asc()),
Index('{0}_program_ix_channel_number_&_start'.format(_provider_name), _channel_number.asc(), _start.asc()),
Index('{0}_program_ix_channel_xmltv_id_&_start'.format(_provider_name), _channel_xmltv_id.asc(), _start.asc()),
Index('{0}_program_ix_channel_xmltv_id_&_start_&_stop'.format(_provider_name),
_channel_xmltv_id.asc(),
_start.asc(),
_stop.asc()),
Index('{0}_program_ix_start'.format(_provider_name), _start.asc()))
def __init__(self,
id_,
start,
stop,
channel_xmltv_id,
channel_number,
pickle,
complete_xmltv,
minimal_xmltv):
self._id = id_
self._start = start
self._stop = stop
self._channel_xmltv_id = channel_xmltv_id
self._channel_number = channel_number
self._pickle = pickle
self._complete_xmltv = complete_xmltv
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def channel_number(self):
return self._channel_number
@channel_number.setter
def channel_number(self, channel_number):
self._channel_number = channel_number
@hybrid_property
def channel_xmltv_id(self):
return self._channel_xmltv_id
@channel_xmltv_id.setter
def channel_xmltv_id(self, channel_xmltv_id):
self._channel_xmltv_id = channel_xmltv_id
@hybrid_property
def complete_xmltv(self):
return self._complete_xmltv
@complete_xmltv.setter
def complete_xmltv(self, complete_xmltv):
self._complete_xmltv = complete_xmltv
@hybrid_property
def id(self):
return self._id
@id.setter
def id(self, id_):
self._id = id_
@hybrid_property
def minimal_xmltv(self):
return self._minimal_xmltv
@minimal_xmltv.setter
def minimal_xmltv(self, minimal_xmltv):
self._minimal_xmltv = minimal_xmltv
@hybrid_property
def pickle(self):
return self._pickle
@pickle.setter
def pickle(self, pickle):
self._pickle = pickle
@hybrid_property
def start(self):
return self._start
@start.setter
def start(self, start):
self._start = start
@hybrid_property
def stop(self):
return self._stop
@stop.setter
def stop(self, stop):
self._stop = stop
class CrystalClearSetting(Base):
_provider_name = CrystalClearConstants.PROVIDER_NAME.lower()
__tablename__ = 'setting'
_name = Column('name', String, primary_key=True)
_value = Column('value', String, nullable=False)
__table_args__ = (Index('setting_ix_name', _name.asc()),)
def __init__(self, name, value):
self._name = name
self._value = value
@hybrid_property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@hybrid_property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
| [
"[email protected]"
] | |
727c6dd5a9d6d63154d4df935778852dc73c00fa | c590571d129ead00bd1916025f854a1719d75683 | /zvt/recorders/joinquant/meta/china_stock_meta_recorder.py | fa4a0c4364dd713ab0f74d8b7829a1b6f86f10ac | [
"MIT"
] | permissive | ming123jew/zvt | f2fb8e157951e9440a6decd5ae0c08ea227a39db | de66a48ad2a3ac2c3fb22b9ea17a85f28e95cc62 | refs/heads/master | 2023-05-28T15:00:52.015000 | 2021-06-13T12:56:18 | 2021-06-13T12:56:18 | 570,070,597 | 1 | 0 | MIT | 2022-11-24T09:16:48 | 2022-11-24T09:16:47 | null | UTF-8 | Python | false | false | 5,733 | py | # -*- coding: utf-8 -*-
import pandas as pd
from jqdatapy.api import get_all_securities, run_query
from zvt.api.quote import china_stock_code_to_id, portfolio_relate_stock
from zvt.contract.api import df_to_db, get_entity_exchange, get_entity_code
from zvt.contract.recorder import Recorder, TimeSeriesDataRecorder
from zvt.domain import EtfStock, Stock, Etf, StockDetail
from zvt.recorders.joinquant.common import to_entity_id, jq_to_report_period
from zvt.utils.pd_utils import pd_is_not_null
from zvt.utils.time_utils import to_time_str
class BaseJqChinaMetaRecorder(Recorder):
provider = 'joinquant'
def __init__(self, batch_size=10, force_update=True, sleeping_time=10) -> None:
super().__init__(batch_size, force_update, sleeping_time)
def to_zvt_entity(self, df, entity_type, category=None):
df = df.set_index('code')
df.index.name = 'entity_id'
df = df.reset_index()
# 上市日期
df.rename(columns={'start_date': 'timestamp'}, inplace=True)
df['timestamp'] = pd.to_datetime(df['timestamp'])
df['list_date'] = df['timestamp']
df['end_date'] = pd.to_datetime(df['end_date'])
df['entity_id'] = df['entity_id'].apply(lambda x: to_entity_id(entity_type=entity_type, jq_code=x))
df['id'] = df['entity_id']
df['entity_type'] = entity_type
df['exchange'] = df['entity_id'].apply(lambda x: get_entity_exchange(x))
df['code'] = df['entity_id'].apply(lambda x: get_entity_code(x))
df['name'] = df['display_name']
if category:
df['category'] = category
return df
class JqChinaStockRecorder(BaseJqChinaMetaRecorder):
data_schema = Stock
def run(self):
# 抓取股票列表
df_stock = self.to_zvt_entity(get_all_securities(code='stock'), entity_type='stock')
df_to_db(df_stock, data_schema=Stock, provider=self.provider, force_update=self.force_update)
# persist StockDetail too
df_to_db(df=df_stock, data_schema=StockDetail, provider=self.provider, force_update=self.force_update)
# self.logger.info(df_stock)
self.logger.info("persist stock list success")
class JqChinaEtfRecorder(BaseJqChinaMetaRecorder):
data_schema = Etf
def run(self):
# 抓取etf列表
df_index = self.to_zvt_entity(get_all_securities(code='etf'), entity_type='etf', category='etf')
df_to_db(df_index, data_schema=Etf, provider=self.provider, force_update=self.force_update)
# self.logger.info(df_index)
self.logger.info("persist etf list success")
class JqChinaStockEtfPortfolioRecorder(TimeSeriesDataRecorder):
entity_provider = 'joinquant'
entity_schema = Etf
# 数据来自jq
provider = 'joinquant'
data_schema = EtfStock
def __init__(self, entity_type='etf', exchanges=['sh', 'sz'], entity_ids=None, codes=None, day_data=True, batch_size=10,
force_update=False, sleeping_time=5, default_size=2000, real_time=False, fix_duplicate_way='add',
start_timestamp=None, end_timestamp=None, close_hour=0, close_minute=0) -> None:
super().__init__(entity_type, exchanges, entity_ids, codes, day_data, batch_size, force_update, sleeping_time,
default_size, real_time, fix_duplicate_way, start_timestamp, end_timestamp, close_hour,
close_minute)
def record(self, entity, start, end, size, timestamps):
df = run_query(table='finance.FUND_PORTFOLIO_STOCK',
conditions=f'pub_date#>=#{to_time_str(start)}&code#=#{entity.code}',
parse_dates=None)
if pd_is_not_null(df):
# id code period_start period_end pub_date report_type_id report_type rank symbol name shares market_cap proportion
# 0 8640569 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 1 601318 中国平安 19869239.0 1.361043e+09 7.09
# 1 8640570 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 2 600519 贵州茅台 921670.0 6.728191e+08 3.50
# 2 8640571 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 3 600036 招商银行 18918815.0 5.806184e+08 3.02
# 3 8640572 159919 2018-07-01 2018-09-30 2018-10-26 403003 第三季度 4 601166 兴业银行 22862332.0 3.646542e+08 1.90
df['timestamp'] = pd.to_datetime(df['pub_date'])
df.rename(columns={'symbol': 'stock_code', 'name': 'stock_name'}, inplace=True)
df['proportion'] = df['proportion'] * 0.01
df = portfolio_relate_stock(df, entity)
df['stock_id'] = df['stock_code'].apply(lambda x: china_stock_code_to_id(x))
df['id'] = df[['entity_id', 'stock_id', 'pub_date', 'id']].apply(lambda x: '_'.join(x.astype(str)), axis=1)
df['report_date'] = pd.to_datetime(df['period_end'])
df['report_period'] = df['report_type'].apply(lambda x: jq_to_report_period(x))
df_to_db(df=df, data_schema=self.data_schema, provider=self.provider, force_update=self.force_update)
# self.logger.info(df.tail())
self.logger.info(f"persist etf {entity.code} portfolio success {df.iloc[-1]['pub_date']}")
return None
if __name__ == '__main__':
# JqChinaEtfRecorder().run()
JqChinaStockEtfPortfolioRecorder(codes=['510050']).run()
# the __all__ is generated
__all__ = ['BaseJqChinaMetaRecorder', 'JqChinaStockRecorder', 'JqChinaEtfRecorder', 'JqChinaStockEtfPortfolioRecorder'] | [
"[email protected]"
] | |
da4e65994020ecec1aae6923a1bd83b3951032e3 | a90ba084b85683f4c52d0e638cfb6108207ced38 | /896.py | 91ca187efe65342ba1e072994842f422f065f605 | [] | no_license | JiayuZhai/leetcode_python3 | 4a9260d00a52cde9ec37e6292e64d04161e66111 | 5755c3edd6d949af18d0247d2103379510dfab85 | refs/heads/master | 2020-04-02T21:22:42.270000 | 2019-03-29T23:28:48 | 2019-03-29T23:28:48 | 154,796,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 162 | py | class Solution:
def isMonotonic(self, A: List[int]) -> bool:
sortA = sorted(A)
return (A == sortA or list(reversed(A)) == sortA)
| [
"[email protected]"
] | |
90a5ad57cf62d7082f693f949d412f2d773b647a | 844c7f8fb8d6bfab912583c71b93695167c59764 | /fixação/Seção06/51-60/Sec06Ex51v2.py | 35580169e28f8bc9bc58b28718531dd96aa9d948 | [
"Apache-2.0"
] | permissive | gugajung/guppe | 2be10656cd9aa33be6afb8e86f20df82662bcc59 | a0ee7b85e8687e8fb8243fbb509119a94bc6460f | refs/heads/main | 2023-05-28T08:08:24.963000 | 2021-06-07T16:56:11 | 2021-06-07T16:56:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | from datetime import date
anoAtual = 1995
salarioAtual = 2000
percentAumento = 1.5
dataAtual = date.today()
anoReal = dataAtual.year
while anoAtual <= anoReal:
salarioAtual = salarioAtual + ((salarioAtual*percentAumento)/100)
print("----------------------------------------")
print(" --- debug")
print(f" --- > Ano Atual : {anoAtual}")
print(f" --- > Salario Atual : {salarioAtual:.2f}")
print(f" --- > Percente de Aumento : {percentAumento:.4f}")
anoAtual += 1
percentAumento *= 2
print("=================================================")
print("Final de O programas") | [
"[email protected]"
] | |
d3e7e9dae606fe6dc77d9c43997e9c592fbcd477 | 982bc95ab762829c8b6913e44504415cdd77241a | /account_easy_reconcile/base_reconciliation.py | b50c06b9eed699d96da272f0fb9dd9613177c235 | [] | no_license | smart-solution/natuurpunt-finance | 6b9eb65be96a4e3261ce46d7f0c31de3589e1e0d | 6eeb48468792e09d46d61b89499467a44d67bc79 | refs/heads/master | 2021-01-23T14:42:05.017000 | 2020-11-03T15:56:35 | 2020-11-03T15:56:35 | 39,186,046 | 0 | 1 | null | 2020-11-03T15:56:37 | 2015-07-16T08:36:54 | Python | UTF-8 | Python | false | false | 7,776 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright 2012-2013 Camptocamp SA (Guewen Baconnier)
# Copyright (C) 2010 Sébastien Beau
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
from operator import itemgetter, attrgetter
class easy_reconcile_base(orm.AbstractModel):
"""Abstract Model for reconciliation methods"""
_name = 'easy.reconcile.base'
_inherit = 'easy.reconcile.options'
_columns = {
'account_id': fields.many2one(
'account.account', 'Account', required=True),
'partner_ids': fields.many2many(
'res.partner', string="Restrict on partners"),
# other columns are inherited from easy.reconcile.options
}
def automatic_reconcile(self, cr, uid, ids, context=None):
""" Reconciliation method called from the view.
:return: list of reconciled ids, list of partially reconciled items
"""
if isinstance(ids, (int, long)):
ids = [ids]
assert len(ids) == 1, "Has to be called on one id"
rec = self.browse(cr, uid, ids[0], context=context)
return self._action_rec(cr, uid, rec, context=context)
def _action_rec(self, cr, uid, rec, context=None):
""" Must be inherited to implement the reconciliation
:return: list of reconciled ids
"""
raise NotImplementedError
def _base_columns(self, rec):
""" Mandatory columns for move lines queries
An extra column aliased as ``key`` should be defined
in each query."""
aml_cols = (
'id',
'debit',
'credit',
'date',
'period_id',
'ref',
'name',
'partner_id',
'account_id',
'move_id')
return ["account_move_line.%s" % col for col in aml_cols]
def _select(self, rec, *args, **kwargs):
return "SELECT %s" % ', '.join(self._base_columns(rec))
def _from(self, rec, *args, **kwargs):
return "FROM account_move_line"
def _where(self, rec, *args, **kwargs):
where = ("WHERE account_move_line.account_id = %s "
"AND account_move_line.reconcile_id IS NULL ")
# it would be great to use dict for params
# but as we use _where_calc in _get_filter
# which returns a list, we have to
# accomodate with that
params = [rec.account_id.id]
if rec.partner_ids:
where += " AND account_move_line.partner_id IN %s"
params.append(tuple([l.id for l in rec.partner_ids]))
return where, params
def _get_filter(self, cr, uid, rec, context):
ml_obj = self.pool.get('account.move.line')
where = ''
params = []
if rec.filter:
dummy, where, params = ml_obj._where_calc(
cr, uid, eval(rec.filter), context=context).get_sql()
if where:
where = " AND %s" % where
return where, params
def _below_writeoff_limit(self, cr, uid, rec, lines,
writeoff_limit, context=None):
precision = self.pool.get('decimal.precision').precision_get(
cr, uid, 'Account')
keys = ('debit', 'credit')
sums = reduce(
lambda line, memo:
dict((key, value + memo[key])
for key, value
in line.iteritems()
if key in keys), lines)
debit, credit = sums['debit'], sums['credit']
writeoff_amount = round(debit - credit, precision)
return bool(writeoff_limit >= abs(writeoff_amount)), debit, credit
def _get_rec_date(self, cr, uid, rec, lines,
based_on='end_period_last_credit', context=None):
period_obj = self.pool.get('account.period')
def last_period(mlines):
period_ids = [ml['period_id'] for ml in mlines]
periods = period_obj.browse(
cr, uid, period_ids, context=context)
return max(periods, key=attrgetter('date_stop'))
def last_date(mlines):
return max(mlines, key=itemgetter('date'))
def credit(mlines):
return [l for l in mlines if l['credit'] > 0]
def debit(mlines):
return [l for l in mlines if l['debit'] > 0]
if based_on == 'end_period_last_credit':
return last_period(credit(lines)).date_stop
if based_on == 'end_period':
return last_period(lines).date_stop
elif based_on == 'newest':
return last_date(lines)['date']
elif based_on == 'newest_credit':
return last_date(credit(lines))['date']
elif based_on == 'newest_debit':
return last_date(debit(lines))['date']
# reconcilation date will be today
# when date is None
return None
def _reconcile_lines(self, cr, uid, rec, lines, allow_partial=False, context=None):
""" Try to reconcile given lines
:param list lines: list of dict of move lines, they must at least
contain values for : id, debit, credit
:param boolean allow_partial: if True, partial reconciliation will be
created, otherwise only Full
reconciliation will be created
:return: tuple of boolean values, first item is wether the items
have been reconciled or not,
the second is wether the reconciliation is full (True)
or partial (False)
"""
if context is None:
context = {}
ml_obj = self.pool.get('account.move.line')
writeoff = rec.write_off
line_ids = [l['id'] for l in lines]
below_writeoff, sum_debit, sum_credit = self._below_writeoff_limit(
cr, uid, rec, lines, writeoff, context=context)
date = self._get_rec_date(
cr, uid, rec, lines, rec.date_base_on, context=context)
rec_ctx = dict(context, date_p=date)
if below_writeoff:
if sum_credit < sum_debit:
writeoff_account_id = rec.account_profit_id.id
else:
writeoff_account_id = rec.account_lost_id.id
period_id = self.pool.get('account.period').find(
cr, uid, dt=date, context=context)[0]
ml_obj.reconcile(
cr, uid,
line_ids,
type='auto',
writeoff_acc_id=writeoff_account_id,
writeoff_period_id=period_id,
writeoff_journal_id=rec.journal_id.id,
context=rec_ctx)
return True, True
elif allow_partial:
ml_obj.reconcile_partial(
cr, uid,
line_ids,
type='manual',
context=rec_ctx)
return True, False
return False, False
| [
"[email protected]"
] | |
ae535fe72253b6c574f7196c75a3b64e003c3ea3 | ccb6918eff9624bc890c4318462b3d04fe01ab25 | /d02/for/for/settings.py | 763917cea83d3de15fae9c387027213bdac3fd6e | [] | no_license | shchliu/19django | 431202f3b4a71fb2614f3f113174df327a338413 | 63af6aeff279a83fb170c1b5385d0804d96fafad | refs/heads/master | 2020-08-15T08:53:36.707000 | 2019-10-16T08:26:41 | 2019-10-16T08:28:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,121 | py | """
Django settings for for project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'n$s!ww49p_&vb4(^$4-n#s(98qsu+(61j_2w2)&7pbx+3(k_x+'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'for.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'for.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
b6e187de710d37037dd7c0d830a50e7eaee1aa28 | 786027545626c24486753351d6e19093b261cd7d | /ghidra9.2.1_pyi/ghidra/app/util/bin/format/xcoff/XCoffSectionHeaderFlags.pyi | 43a745532a3157885655ec9c25a175e6ac3df2ec | [
"MIT"
] | permissive | kohnakagawa/ghidra_scripts | 51cede1874ef2b1fed901b802316449b4bf25661 | 5afed1234a7266c0624ec445133280993077c376 | refs/heads/main | 2023-03-25T08:25:16.842000 | 2021-03-18T13:31:40 | 2021-03-18T13:31:40 | 338,577,905 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 772 | pyi | import java.lang
class XCoffSectionHeaderFlags(object):
STYP_BSS: int = 128
STYP_DATA: int = 64
STYP_DEBUG: int = 8192
STYP_EXCEPT: int = 128
STYP_INFO: int = 512
STYP_LOADER: int = 4096
STYP_OVRFLO: int = 32768
STYP_PAD: int = 8
STYP_TEXT: int = 32
STYP_TYPCHK: int = 16384
def __init__(self): ...
def equals(self, __a0: object) -> bool: ...
def getClass(self) -> java.lang.Class: ...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
| [
"[email protected]"
] | |
ac2cbb0b731b97e581da7a9f035b4ce7209d5dbf | f08336ac8b6f8040f6b2d85d0619d1a9923c9bdf | /223-rectangleArea.py | b77b9c32e8858d4b5b81adab6076c7a69ecfadeb | [] | no_license | MarshalLeeeeee/myLeetCodes | fafadcc35eef44f431a008c1be42b1188e7dd852 | 80e78b153ad2bdfb52070ba75b166a4237847d75 | refs/heads/master | 2020-04-08T16:07:47.943000 | 2019-02-21T01:43:16 | 2019-02-21T01:43:16 | 159,505,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 975 | py | '''
223.Rectangle Area
Find the total area covered by two rectilinear rectangles in a 2D plane.
Each rectangle is defined by its bottom left corner and top right corner as shown in the figure.
Example:
Input: A = -3, B = 0, C = 3, D = 4, E = 0, F = -1, G = 9, H = 2
Output: 45
Note:
Assume that the total area is never beyond the maximum possible value of int.
'''
class Solution:
def computeArea(self, A, B, C, D, E, F, G, H):
"""
:type A: int
:type B: int
:type C: int
:type D: int
:type E: int
:type F: int
:type G: int
:type H: int
:rtype: int
"""
X = [[A,0],[C,0],[E,1],[G,1]]
Y = [[B,0],[D,0],[F,1],[H,1]]
X.sort(key=lambda k: k[0])
Y.sort(key=lambda k: k[0])
#print(X,Y)
common = (X[2][0]-X[1][0])*(Y[2][0]-Y[1][0]) if X[0][1] ^ X[1][1] and Y[0][1] ^ Y[1][1] else 0
return (C-A)*(D-B) + (G-E)*(H-F) - common
| [
"[email protected]"
] | |
5575a34bb47b7f44bc2177357c0b7f8fb5fef18c | 6260fd806b3bf82a601c86c8a903b49c983d9dda | /w3resource/7.py | 03955a8d513c09e32bafc6d84f5fc6e5dfef3e0a | [] | no_license | skybohannon/python | 6162077e4f18d0ed273d47c342620942e531031b | b78ac8ff1758826d9dd9c969096fb1f10783a4be | refs/heads/master | 2021-09-05T07:09:23.844000 | 2018-01-25T02:58:59 | 2018-01-25T02:58:59 | 106,215,285 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 300 | py | # 7. Write a Python program to accept a filename from the user and print the extension of that. Go to the editor
# Sample filename : abc.java
# Output : java
user_file = input("Please enter a filename: ")
user_ext = user_file.split(".")
print("The file extension is .{}".format(repr(user_ext[-1]))) | [
"[email protected]"
] | |
84f43b493da4922aa43b8e092c662bce4e358e7d | 1ba59e2cf087fc270dd32b24ac1d76e4b309afcc | /config.py | 1b8fab6b06225fad9e290177b7e86c43413ce3c7 | [
"MIT"
] | permissive | yangtong1989/Deep-Residual-Matting | 2d96ce737b2b89859695e6f4f052c8984eba96bb | 24bd5342b862e447fb7f4dec7edebdd73221db18 | refs/heads/master | 2020-08-31T23:48:39.028000 | 2019-10-18T10:12:45 | 2019-10-18T10:12:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,143 | py | import torch
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # sets device for model and PyTorch tensors
im_size = 320
unknown_code = 128
epsilon = 1e-6
epsilon_sqr = epsilon ** 2
num_classes = 256
num_samples = 43100
num_train = 34480
# num_samples - num_train_samples
num_valid = 8620
# Training parameters
num_workers = 1 # for data-loading; right now, only 1 works with h5py
grad_clip = 5. # clip gradients at an absolute value of
print_freq = 100 # print training/validation stats every __ batches
checkpoint = None # path to checkpoint, None if none
##############################################################
# Set your paths here
# path to provided foreground images
fg_path = 'data/fg/'
# path to provided alpha mattes
a_path = 'data/mask/'
# Path to background images (MSCOCO)
bg_path = 'data/bg/'
# Path to folder where you want the composited images to go
out_path = 'data/merged/'
max_size = 1600
fg_path_test = 'data/fg_test/'
a_path_test = 'data/mask_test/'
bg_path_test = 'data/bg_test/'
out_path_test = 'data/merged_test/'
##############################################################
| [
"[email protected]"
] | |
2b05aafb513ea6ad66865aaa00981d7ff30884e1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2733/40186/320060.py | 85feba17c1b35b4a3536d8fcea4725c382ec5d13 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122000 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | inp=input()
a=input()
if inp=='8 3' and a=='10 7 9 3 4 5 8 17':
print(10)
print(17)
print(9)
elif a=='5 27 1 3 4 2 8 17':
print(5)
print(27)
print(5)
elif a=='105 2 9 3 8 5 7 7':
print(2)
print(8)
print(9)
print(105)
print(7)
elif inp=='101011':
print(18552)
elif inp=='10101101010111110100110100101010110001010010101001':
print(322173207)
else:
print(inp)
print(a)
print(b) | [
"[email protected]"
] | |
0d4ab487c9de86cce3e199c7f5a4c2c87e57c607 | 2612f336d667a087823234daf946f09b40d8ca3d | /python/lib/Lib/site-packages/django/contrib/gis/tests/geoapp/models.py | 89027eedfbc919466ac7c1335c42dfb57aea547a | [
"Apache-2.0"
] | permissive | tnorbye/intellij-community | df7f181861fc5c551c02c73df3b00b70ab2dd589 | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | refs/heads/master | 2021-04-06T06:57:57.974000 | 2018-03-13T17:37:00 | 2018-03-13T17:37:00 | 125,079,130 | 2 | 0 | Apache-2.0 | 2018-03-13T16:09:41 | 2018-03-13T16:09:41 | null | UTF-8 | Python | false | false | 1,546 | py | from django.contrib.gis.db import models
from django.contrib.gis.tests.utils import mysql, spatialite
# MySQL spatial indices can't handle NULL geometries.
null_flag = not mysql
class Country(models.Model):
name = models.CharField(max_length=30)
mpoly = models.MultiPolygonField() # SRID, by default, is 4326
objects = models.GeoManager()
def __unicode__(self): return self.name
class City(models.Model):
name = models.CharField(max_length=30)
point = models.PointField()
objects = models.GeoManager()
def __unicode__(self): return self.name
# This is an inherited model from City
class PennsylvaniaCity(City):
county = models.CharField(max_length=30)
objects = models.GeoManager() # TODO: This should be implicitly inherited.
class State(models.Model):
name = models.CharField(max_length=30)
poly = models.PolygonField(null=null_flag) # Allowing NULL geometries here.
objects = models.GeoManager()
def __unicode__(self): return self.name
class Track(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField()
objects = models.GeoManager()
def __unicode__(self): return self.name
if not spatialite:
class Feature(models.Model):
name = models.CharField(max_length=20)
geom = models.GeometryField()
objects = models.GeoManager()
def __unicode__(self): return self.name
class MinusOneSRID(models.Model):
geom = models.PointField(srid=-1) # Minus one SRID.
objects = models.GeoManager()
| [
"[email protected]"
] | |
33877bf7341e29b7edab2e7b7919f5bd03bfdc76 | 9507ff9e9bca2ca8104369c9e25acd74d308e9b3 | /sta8100_upload/upload.py | 6d962eeda6a0d7bd66233d1d52e6df9d0cd024bf | [] | no_license | yangkang411/python_tool | 03e483c7ec7e1e76284f93cf5b9086fdf98af826 | 713071a9fbabfabcbc3c16ce58d1382c410a7ea3 | refs/heads/master | 2023-03-17T16:14:03.332000 | 2020-09-10T02:37:05 | 2020-09-10T02:37:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | #!/usr/bin/python
import os
if __name__ == '__main__':
cmd = "TeseoProgrammer_v2.9.0.exe program -f t5 -i sta.bin -o log.txt -c com53 -b 230400 -m SQI -d 0x10000400 -e TRUE -r TRUE";
print ("cmd = %s" % cmd);
os.system(cmd)
| [
"[email protected]"
] | |
3bacf127b039262cc40bb14e97fd4da50cac4c40 | 1c19db866110afddb04d2e9715b49909c7fbb3d4 | /tests/test_user_locale.py | 4635899202d226e926f9194aa81e0dcb4a0fc936 | [
"BSD-2-Clause"
] | permissive | shane-kerr/peeringdb | 505dd5087abe29c9d6013e81b5322d7259a97106 | 5f189631a4d60d3fde662743508784affc6fa22a | refs/heads/master | 2020-09-14T16:25:33.442000 | 2019-11-21T13:54:32 | 2019-11-21T13:54:32 | 223,183,848 | 0 | 0 | NOASSERTION | 2019-11-21T13:54:34 | 2019-11-21T13:44:59 | null | UTF-8 | Python | false | false | 2,541 | py | import pytest
import json
from django.test import Client, TestCase, RequestFactory
from django.contrib.auth.models import Group
import peeringdb_server.models as models
#from django.template import Context, Template
#from django.utils import translation
class UserLocaleTests(TestCase):
"""
Test peeringdb_server.models.User functions
"""
@classmethod
def setUpTestData(cls):
user_group = Group.objects.create(name="user")
for name in ["user_undef", "user_en", "user_pt"]:
setattr(cls, name,
models.User.objects.create_user(
name, "%s@localhost" % name, first_name=name,
last_name=name, password=name))
cls.user_en.set_locale('en')
cls.user_pt.set_locale('pt')
user_group.user_set.add(cls.user_en)
user_group.user_set.add(cls.user_pt)
user_group.user_set.add(cls.user_undef)
cls.user_undef.save()
cls.user_en.save()
cls.user_pt.save()
def setUp(self):
self.factory = RequestFactory()
def test_user_locale(self):
"""
Tests if user profile page has the right language
Note: Don't use Client.login(...) since it will miss language setting in the session
"""
#t = Template("{% load i18n %}{% get_current_language as LANGUAGE_CODE %}{{ LANGUAGE_CODE }}")
#print(t.render(Context({})))
#translation.activate('pt')
#print(t.render(Context({})))
#u_pt = models.User.objects.get(username="user_pt")
#print(u_pt.get_locale())
c = Client()
resp = c.get("/profile", follow=True)
data = {
"next": "/profile",
"username": "user_en",
"password": "user_en"
}
resp = c.post("/auth", data, follow=True)
self.assertGreater(
resp.content.find('<!-- Current language: en -->'), -1)
c.logout()
data = {
"next": "/profile",
"username": "user_pt",
"password": "user_pt"
}
resp = c.post("/auth", data, follow=True)
self.assertGreater(
resp.content.find('<!-- Current language: pt -->'), -1)
c.logout()
data = {
"next": "/profile",
"username": "user_undef",
"password": "user_undef"
}
resp = c.post("/auth", data, follow=True)
self.assertGreater(
resp.content.find('<!-- Current language: en -->'), -1)
| [
"[email protected]"
] | |
a7f8d8f49b6809525e29121763627e7f50f9f9f7 | ab8a34e5b821dde7b09abe37c838de046846484e | /twilio/sample-code-master/notify/v1/user/read-default/read-default.6.x.py | 21a1ceb49f9637120f11fe5bf78cba619a151b3e | [] | no_license | sekharfly/twilio | 492b599fff62618437c87e05a6c201d6de94527a | a2847e4c79f9fbf5c53f25c8224deb11048fe94b | refs/heads/master | 2020-03-29T08:39:00.079000 | 2018-09-21T07:20:24 | 2018-09-21T07:20:24 | 149,721,431 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 467 | py | # Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
users = client.notify.services('ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.users \
.list()
for record in users:
print(record.sid)
| [
"[email protected]"
] | |
cb28e85295b024bb0498aa6b4989914be951cfa0 | 7963f09b4002249e73496c6cbf271fd6921b3d22 | /tests/test_cpy.py | 7b453154c26e92a9cf985753289721778c504e43 | [] | no_license | thales-angelino/py6502emulator | 6df908fc02f29b41fad550c8b773723a7b63c414 | 1cea28489d51d77d2dec731ab98a6fe8a515a2a8 | refs/heads/master | 2023-03-19T14:46:17.393000 | 2021-03-08T04:10:45 | 2021-03-08T04:10:45 | 345,754,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,773 | py | import unittest
from emulator_6502 import emulator_6502 as emulator
from emulator_6502.instructions import cpy
class TestCPX(unittest.TestCase):
def setUp(self):
self.memory = emulator.Memory()
self.cpu = emulator.CPU(self.memory)
self.cpu.reset()
def test_cpy_scenario_1(self):
operand = 0x10
expected_zero = 0
expected_negative = 0
expected_carry = 1
self.cpu.y = 0x50
self.cpu.cpy(operand)
self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry)
self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero)
self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative)
def test_cpy_scenario_2(self):
operand = 0x50
expected_zero = 1
expected_negative = 0
expected_carry = 1
self.cpu.y = 0x50
self.cpu.cpy(operand)
self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry)
self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero)
self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative)
def test_cpy_scenario_3(self):
operand = 0x60
expected_zero = 0
expected_negative = 1
expected_carry = 0
self.cpu.y = 0x50
self.cpu.cpy(operand)
self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry)
self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero)
self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative)
def test_cpy_immediate(self):
expected_cycles = 2
value = 0x10
self.cpu.y = 0x50
expected_zero = 0
expected_negative = 0
expected_carry = 1
self.memory.memory[emulator.START_ADDRESS] = cpy.CPY_IMMEDIATE_OPCODE
self.memory.memory[emulator.START_ADDRESS + 1] = value
self.cpu.execute(1)
self.assertEqual(self.cpu.cycles, expected_cycles, "CPU cycles should be %d" % expected_cycles)
self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry)
self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero)
self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative)
def test_cpy_absolute(self):
expected_cycles = 4
value = 0x10
self.cpu.y = 0x50
expected_zero = 0
expected_negative = 0
expected_carry = 1
self.memory.memory[emulator.START_ADDRESS] = cpy.CPY_ABSOLUTE_OPCODE
self.memory.memory[emulator.START_ADDRESS + 1] = 0xff # LSB FIRST!!!
self.memory.memory[emulator.START_ADDRESS + 2] = 0x02
self.memory.memory[0x02ff] = value
self.cpu.execute(1)
self.assertEqual(self.cpu.cycles, expected_cycles, "CPU cycles should be %d" % expected_cycles)
self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry)
self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero)
self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative)
def test_cpy_zeropage(self):
expected_cycles = 3
value = 0x10
self.cpu.y = 0x50
expected_zero = 0
expected_negative = 0
expected_carry = 1
self.memory.memory[emulator.START_ADDRESS] = cpy.CPY_ZEROPAGE_OPCODE
self.memory.memory[emulator.START_ADDRESS + 1] = 0xff
self.memory.memory[0x00ff] = value
self.cpu.execute(1)
self.assertEqual(self.cpu.cycles, expected_cycles, "CPU cycles should be %d" % expected_cycles)
self.assertEqual(self.cpu.processor_status['carry'], expected_carry, "CPU Carry flag should be %d" % expected_carry)
self.assertEqual(self.cpu.processor_status['zero'], expected_zero, "CPU zero flag should be %d" % expected_zero)
self.assertEqual(self.cpu.processor_status['negative'], expected_negative, "CPU negative flag should be %d" % expected_negative)
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
738b4c2e8ea71aa1374de72bcbdaff282bbe4f37 | 8ace8be98c5fb7baac267ca7f83c8085e5cad35c | /26_two_sum_unique_pairs.py | def053f435def022e8e58082e3376b6e647929d4 | [] | no_license | cyberbono3/amazon-oa-python | c063eb275a4d311e58f148c0300c7e19b0f03bea | 7ce502bbe3a30b1d6052a46e7a28b724a327b5ae | refs/heads/master | 2023-01-20T16:23:00.241000 | 2020-11-22T03:49:25 | 2020-11-22T03:49:25 | 293,693,115 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | """
Input: nums = [1, 1, 2, 45, 46, 46], target = 47
1, 1
"""
class Solution:
def unique_pairs(self, nums, target):
s = set()
dic = {}
for i,x in enumerate(nums):
if target - x in s:
dic[target-x] = x
else:
s.add(x)
print(dic)
return len(dic)
sol = Solution()
print(sol.unique_pairs([1, 1, 2, 45, 46, 46], 47)) | [
"[email protected]"
] | |
cb07a323abf8740806bebc941c841ab0e659081b | e6ad1014aacaa92643f42952c278469177defc15 | /napalm_ansible/napalm_diff_yang.py | d134e9bb1a69665bbfabcb13f326bcf956c8cb1d | [
"Apache-2.0"
] | permissive | cspeidel/napalm-ansible | d290ee7cc1abd9dd7d11044d5ddc542bd6658906 | 8ad4badb38d79ec5efd96faa666c71f7438dfa28 | refs/heads/develop | 2022-02-09T05:40:10.302000 | 2017-11-06T20:51:58 | 2017-11-06T20:51:58 | 110,727,639 | 0 | 0 | Apache-2.0 | 2022-01-31T16:25:25 | 2017-11-14T18:18:35 | Python | UTF-8 | Python | false | false | 3,409 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
(c) 2017 David Barroso <[email protected]>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
from ansible.module_utils.basic import AnsibleModule
try:
import napalm_yang
except ImportError:
napalm_yang = None
DOCUMENTATION = '''
---
module: napalm_diff_yang
author: "David Barroso (@dbarrosop)"
version_added: "0.0"
short_description: "Return diff of two YANG objects"
description:
- "Create two YANG objects from dictionaries and runs mehtod"
- "napalm_yang.utils.diff on them."
requirements:
- napalm-yang
options:
models:
description:
- List of models to parse
required: True
first:
description:
- Dictionary with the data to load into the first YANG object
required: True
second:
description:
- Dictionary with the data to load into the second YANG object
required: True
'''
EXAMPLES = '''
napalm_diff_yang:
first: "{{ candidate.yang_model }}"
second: "{{ running_config.yang_model }}"
models:
- models.openconfig_interfaces
register: diff
'''
RETURN = '''
diff:
description: "Same output as the method napalm_yang.utils.diff"
returned: always
type: dict
sample: {
"interfaces": {
"interface": {
"both": {
"Port-Channel1": {
"config": {
"description": {
"first": "blah",
"second": "Asadasd"
}
}
}
}
}
}
'''
def get_root_object(models):
"""
Read list of models and returns a Root object with the proper models added.
"""
root = napalm_yang.base.Root()
for model in models:
current = napalm_yang
for p in model.split("."):
current = getattr(current, p)
root.add_model(current)
return root
def main():
module = AnsibleModule(
argument_spec=dict(
models=dict(type="list", required=True),
first=dict(type='dict', required=True),
second=dict(type='dict', required=True),
),
supports_check_mode=True
)
if not napalm_yang:
module.fail_json(msg="the python module napalm-yang is required")
first = get_root_object(module.params["models"])
first.load_dict(module.params["first"])
second = get_root_object(module.params["models"])
second.load_dict(module.params["second"])
diff = napalm_yang.utils.diff(first, second)
module.exit_json(yang_diff=diff)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
fe4155275d3a9240634ebe2b2de50705201231ac | a140a7ca1bc5f0af773cb3d22081b4bb75138cfa | /234_palindromLinkedList.py | b1b3a195574aefe83cc26bf49500c32c48a8a3b2 | [] | no_license | YeahHuang/Leetcode | d02bc99d2e890ed0e829515b6f85c4ca6394a1a1 | 78d36486ad4ec2bfb88fd35a5fd7fd4f0003ee97 | refs/heads/master | 2021-07-14T01:53:06.701000 | 2020-06-22T03:01:46 | 2020-06-22T03:01:46 | 166,235,118 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 585 | py | class Solution:
def isPalindrome(self, head: ListNode) -> bool:
rev = None
slow = fast = head
while fast and fast.next:
fast = fast.next.next
rev, rev.next, slow = slow, rev, slow.next
if fast:
# fast is at the end, move slow one step further for comparison(cross middle one)
slow = slow.next
while rev and rev.val == slow.val:
slow = slow.next
rev = rev.next
# if equivalent then rev become None, return True; otherwise return False
return not rev | [
"[email protected]"
] | |
f50a62262f8a5fd229e3a174e46c8c9fedf3c950 | cef09d1e6d5e7cd335387d0829211ffb0da18f48 | /tests2/tests/wedge100/test_psumuxmon.py | 73784296b42bf03dd786c25cca01bc61c37967ce | [] | no_license | theopolis/openbmc | a1ef2e3335efd19bf750117d79c1477d47948ff3 | 1784748ba29ee89bccacb2019a0bb86bd181c651 | refs/heads/master | 2020-12-14T07:20:40.273000 | 2019-04-20T05:25:17 | 2019-04-20T05:25:17 | 43,323,632 | 0 | 1 | null | 2015-09-28T19:56:24 | 2015-09-28T19:56:24 | null | UTF-8 | Python | false | false | 2,143 | py | #!/usr/bin/env python
#
# Copyright 2018-present Facebook. All Rights Reserved.
#
# This program file is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program in a file named COPYING; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301 USA
#
import unittest
import os
import re
from utils.shell_util import run_shell_cmd
from utils.cit_logger import Logger
class PsumuxmonTest(unittest.TestCase):
def setUp(self):
Logger.start(name=__name__)
def tearDown(self):
Logger.info("Finished logging for {}".format(self._testMethodName))
pass
def test_psumuxmon_runit_sv_status(self):
cmd = ["/usr/bin/sv status psumuxmon"]
data = run_shell_cmd(cmd)
self.assertIn("run", data, "psumuxmon process not running")
def get_ltc_hwmon_path(self, path):
pcard_vin = None
result = re.split("hwmon", path)
if os.path.isdir(result[0]):
construct_hwmon_path = result[0] + "hwmon"
x = None
for x in os.listdir(construct_hwmon_path):
if x.startswith('hwmon'):
construct_hwmon_path = construct_hwmon_path + "/" + x + "/" + result[2].split("/")[1]
return construct_hwmon_path
return None
def test_psumuxmon_ltc_sensor_path_exists(self):
# Based on lab device deployment, sensor data might not be accessible.
# Verify that path exists
cmd = "/sys/bus/i2c/devices/7-006f/hwmon/hwmon*/in1_input"
self.assertTrue(os.path.exists(self.get_ltc_hwmon_path(cmd)),
"psumuxmon LTC sensor path accessible")
| [
"[email protected]"
] | |
34bb012d42ec90f93b307b447f5c5cd8c6a26646 | c7a1c1ae40e9d95dfb92251dcfbf3c5010e6ba81 | /sensehat/pi_surveillance_py.py | 260dc24e20057985e9e1a46675745b948e2da882 | [] | no_license | pranavlathigara/Raspberry-Pi-DIY-Projects | efd18e2e5b9b8369bb1a5f5418782480cf9bc729 | 0c14c316898d4d06015912ac4a8cb7b71a3980c0 | refs/heads/master | 2021-04-06T09:14:28.088000 | 2018-02-19T00:15:22 | 2018-02-19T00:15:22 | 124,649,553 | 1 | 2 | null | 2018-03-10T11:30:59 | 2018-03-10T11:30:59 | null | UTF-8 | Python | false | false | 3,605 | py | from pyimagesearch.tempimage import TempImage
import dropbox as dbx
from picamera.array import PiRGBArray
from picamera import PiCamera
import warnings
import datetime
import imutils
import json
import time
import cv2
# filter warnings, load the configuration and initialize the Dropbox
# client
warnings.filterwarnings("ignore")
client = None
# Put your token here:
db = dbx.Dropbox("YOUR_TOKEN_HERE")
# initialize the camera and grab a reference to the raw camera capture
camera = PiCamera()
camera.resolution = (640,480)
camera.framerate = 16
rawCapture = PiRGBArray(camera, size=(640,480))
# allow the camera to warmup, then initialize the average frame, last
# uploaded timestamp, and frame motion counter
print "[INFO] warming up..."
time.sleep(2.5)
avg = None
lastUploaded = datetime.datetime.now()
motionCounter = 0
# capture frames from the camera
for f in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# grab the raw NumPy array representing the image and initialize
# the timestamp and occupied/unoccupied text
frame = f.array
timestamp = datetime.datetime.now()
# resize the frame, convert it to grayscale, and blur it
frame = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21, 21), 0)
# if the average frame is None, initialize it
if avg is None:
print "[INFO] starting background model..."
avg = gray.copy().astype("float")
rawCapture.truncate(0)
continue
# accumulate the weighted average between the current frame and
# previous frames, then compute the difference between the current
# frame and running average
cv2.accumulateWeighted(gray, avg, 0.5)
frameDelta = cv2.absdiff(gray, cv2.convertScaleAbs(avg))
# threshold the delta image, dilate the thresholded image to fill
# in holes, then find contours on thresholded image
thresh = cv2.threshold(frameDelta, 5, 255,
cv2.THRESH_BINARY)[1]
thresh = cv2.dilate(thresh, None, iterations=2)
(cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < 5000:
continue
# compute the bounding box for the contour, draw it on the frame,
# and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
text = "!"
# draw the text and timestamp on the frame
ts = timestamp.strftime("%A %d %B %Y %I:%M:%S%p")
cv2.putText(frame, "{}".format(ts), (10, 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
# check to see if the room is occupied
if text == "!":
# check to see if enough time has passed between uploads
if (timestamp - lastUploaded).seconds >= 3.0:
# increment the motion counter
motionCounter += 1
# check to see if the number of frames with consistent motion is
# high enough
if motionCounter >= 8:
# write the image to temporary file
t = TempImage()
cv2.imwrite(t.path, frame)
print "[UPLOAD] {}".format(ts)
path = "{base_path}/{timestamp}.jpg".format(base_path="/", timestamp=ts)
client.put_file(open(t.path, "rb").read(), path)
t.cleanup()
# update the last uploaded timestamp and reset the motion
# counter
lastUploaded = timestamp
motionCounter = 0
# otherwise, the room is not occupied
else:
motionCounter = 0
# clear the stream in preparation for the next frame
rawCapture.truncate(0) | [
"[email protected]"
] | |
a9edbeaf88bade93d05aedb3c436f9b864421475 | 5139e63dfbc2b01a10b20bdd283005bfb64bc3e1 | /api/api.py | 998d101f9f00203f1225a882ce89d54334c0ff78 | [] | no_license | Merevoli-DatLuu/SGUInfo | 121098a67128d3ede72ce9f9f51955637c22fb9c | 501d676ad1e02f00573cc879fbba6c44ab1b0ffb | refs/heads/master | 2023-05-26T08:50:41.899000 | 2021-01-11T16:11:45 | 2021-01-11T16:11:45 | 281,350,587 | 4 | 1 | null | 2023-05-22T23:38:11 | 2020-07-21T09:13:00 | Python | UTF-8 | Python | false | false | 1,848 | py | from flask import Flask, render_template, request, jsonify
import sys
sys.path.append("..")
from sguinfo import sguinfo
app = Flask(__name__)
@app.route("/api/v1/students", methods=['GET'])
def get_student_list():
sgu = sguinfo()
if "from_id" in request.args and "to_id" in request.args and "id_list" not in request.args:
from_id = request.args['from_id']
to_id = request.args['to_id']
if sgu.validate_range_mssv(from_id, to_id):
data = []
for d in sgu.find_range_info(from_id, to_id):
data.append(sgu.change_to_eng_info(d))
return jsonify(data)
else:
return jsonify({})
elif "from_id" not in request.args and "to_id" not in request.args and "id_list" in request.args:
list_id = request.args['id_list'].split(",")
data = []
for id in list_id:
if sgu.validate_mssv(id):
data.append(sgu.change_to_eng_info(sgu.find_info(id)))
return jsonify(data)
else:
return jsonify({})
@app.route("/api/v1/students/<id>", methods = ['GET'])
def get_a_student(id):
sgu = sguinfo()
if sgu.validate_mssv(id):
return jsonify(sgu.change_to_eng_info(sgu.find_info(id)))
else:
return jsonify({})
@app.route("/api/v1/students/<id>/<param>", methods = ['GET'])
def get_a_student_with_param(id, param):
sgu = sguinfo()
if sgu.validate_mssv(id):
data = sgu.change_to_eng_info(sgu.find_info(id))
if param in data.keys():
return jsonify(data[param])
else:
return jsonify({})
else:
return jsonify({})
@app.route("/test")
def tessst():
return request.args
if __name__ == '__main__':
app.config['JSON_AS_ASCII'] = False
app.config['JSON_SORT_KEYS'] = False
app.run(debug = True) | [
"[email protected]"
] | |
33f504c5e1c391f90e11226e1c15be67091ee79f | 0124528676ee3bbaec60df5d6950b408e6da37c8 | /Projects/QTPy/circuitpython-community-bundle-7.x-mpy-20220601/examples/animation/main.py | ee50a4f811bdd29fdf5d3d51de532f353ba0b5a1 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | land-boards/lb-boards | 8127658dc537dcfde0bb59a5018ab75c3f0087f6 | eeb98cc2003dac1924845d949f6f5bd387376568 | refs/heads/master | 2023-06-07T15:44:46.110000 | 2023-06-02T22:53:24 | 2023-06-02T22:53:24 | 4,847,305 | 10 | 12 | null | null | null | null | UTF-8 | Python | false | false | 1,421 | py | import board
import dotstar_featherwing
wing = dotstar_featherwing.DotstarFeatherwing(board.D13, board.D11)
xmas_colors = {'w': ( 32, 32, 32),
'W': (255, 255, 255),
'G': ( 0, 32, 0),
'y': ( 32, 32, 0),
'Y': (255, 255, 0)}
xmas_animation = [["..y.w......w",
"..G.....w...",
"..G..w....w.",
".GGG...w....",
"GGGGG.......",
"wwwwwwwwwwww"],
["..y.........",
"..G.W......w",
"..G.....w...",
".GGG.w....W.",
"GGGGG..w....",
"wwwwwwwwwwww"],
["..Y....W....",
"..G.........",
"..G.w......w",
".GGG....w...",
"GGGGGw....W.",
"wwwwwwwwwwww"],
["..y..w....w.",
"..G....W....",
"..G.........",
".GGGW......w",
"GGGGG...w...",
"wwwwwwwwwwww"],
["..Y.....w...",
"..G..w....W.",
"..G....w....",
".GGG........",
"GGGGG......W",
"wwwwwwwwwwww"]]
wing.display_animation(xmas_animation, xmas_colors, 10000, 0.05)
| [
"[email protected]"
] | |
4ffcafc58e0e171a78a295d77ad213c80a5bb0e5 | 5d2404f62e58d5fd1f6112744ff32c3166183ac7 | /Exercicios/ex036.py | 6fc9f4561d2c0ecd7c5e81514824facf4042177e | [] | no_license | Leownhart/My_Course_of_python | 236cfc84d841c5883e5aa1cc0c0730e7a9a83c40 | 5abb21f8cdad91ab54247a007d40bf9ecd2cff8c | refs/heads/master | 2020-08-28T15:04:33.628000 | 2020-08-24T19:25:39 | 2020-08-24T19:25:39 | 217,733,877 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | valorcasa = float(input('Informe o valor da imovel: R$ '))
salcom = float(input('Informe o sálario do comprador: R$ '))
anos = int(input('Informe o tempo de financiamento em anos: '))
valpresta = (valorcasa / anos) / 12 # casa / (anos / * 12)
porcent = salcom * 30.0 / 100
print('Para pagar uma casa de R${:.2f} em {} anos a '
'prestação será de R${:.2f} mensais'.format(valorcasa, anos, valpresta))
if valpresta > porcent:
print('\033[31mEmpréstimo NEGADO!\033[m')
else:
print('\033[32mEmpréstimo APROVADO!\033[m')
| [
"[email protected]"
] | |
9f6ac6ecefb20871f98905fe6225b28a48eaf51d | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /9szPm9Mg5D2vJyTvf_14.py | c4b1eb7103a2e128742d7e447be9653582eade63 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210000 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | """
Write a function that takes three arguments `(x, y, z)` and returns a list
containing `x` sublists (e.g. `[[], [], []]`), each containing `y` number of
item `z`.
* `x` Number of sublists contained within the main list.
* `y` Number of items contained within each sublist.
* `z` Item contained within each sublist.
### Examples
matrix(3, 2, 3) ➞ [[3, 3], [3, 3], [3, 3]]
matrix(2, 1, "edabit") ➞ [["edabit"], ["edabit"]]
matrix(3, 2, 0) ➞ [[0, 0], [0, 0], [0, 0]]
### Notes
* The first two arguments will always be integers.
* The third argument is either a string or an integer.
"""
def matrix(x, y, z):
return [[z] * y] * x
| [
"[email protected]"
] | |
d5cd7cfe45515f1a0899cf0344254ae70d9a69c6 | 8ef8e6818c977c26d937d09b46be0d748022ea09 | /cv/3d_detection/pointnet2/pytorch/mmdetection3d/mmdet/version.py | 0e03a9d35749aef5d396e532d5ab8c5a0bae223f | [
"Apache-2.0"
] | permissive | Deep-Spark/DeepSparkHub | eb5996607e63ccd2c706789f64b3cc0070e7f8ef | 9d643e88946fc4a24f2d4d073c08b05ea693f4c5 | refs/heads/master | 2023-09-01T11:26:49.648000 | 2023-08-25T01:50:18 | 2023-08-25T01:50:18 | 534,133,249 | 7 | 6 | Apache-2.0 | 2023-03-28T02:54:59 | 2022-09-08T09:07:01 | Python | UTF-8 | Python | false | false | 529 | py | # Copyright (c) OpenMMLab. All rights reserved.
__version__ = '2.24.0'
short_version = __version__
def parse_version_info(version_str):
version_info = []
for x in version_str.split('.'):
if x.isdigit():
version_info.append(int(x))
elif x.find('rc') != -1:
patch_version = x.split('rc')
version_info.append(int(patch_version[0]))
version_info.append(f'rc{patch_version[1]}')
return tuple(version_info)
version_info = parse_version_info(__version__)
| [
"[email protected]"
] | |
a8694b72dc9f4ac269b718d8c743574a18cfc288 | 1fc45a47f0e540941c87b04616f3b4019da9f9a0 | /tests/sentry/api/endpoints/test_commit_filechange.py | 49eefdcd009d8d4020c56be8b1609185bc95f982 | [
"BSD-2-Clause"
] | permissive | seukjung/sentry-8.15.0 | febc11864a74a68ddb97b146cc1d2438ef019241 | fd3cab65c64fcbc32817885fa44df65534844793 | refs/heads/master | 2022-10-28T06:39:17.063000 | 2018-01-17T12:31:55 | 2018-01-17T12:31:55 | 117,833,103 | 0 | 0 | BSD-3-Clause | 2022-10-05T18:09:54 | 2018-01-17T12:28:13 | Python | UTF-8 | Python | false | false | 2,225 | py | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.models import Commit, CommitFileChange, Release, ReleaseCommit, Repository
from sentry.testutils import APITestCase
class CommitFileChangeTest(APITestCase):
def test_simple(self):
project = self.create_project(
name='foo',
)
release = Release.objects.create(
organization_id=project.organization_id,
version='1',
)
release.add_project(project)
repo = Repository.objects.create(
organization_id=project.organization_id,
name=project.name,
)
commit = Commit.objects.create(
organization_id=project.organization_id,
repository_id=repo.id,
key='a' * 40,
)
commit2 = Commit.objects.create(
organization_id=project.organization_id,
repository_id=repo.id,
key='b' * 40,
)
ReleaseCommit.objects.create(
organization_id=project.organization_id,
release=release,
commit=commit,
order=1,
)
ReleaseCommit.objects.create(
organization_id=project.organization_id,
release=release,
commit=commit2,
order=0,
)
CommitFileChange.objects.create(
organization_id=project.organization_id,
commit=commit,
filename='.gitignore',
type='M'
)
CommitFileChange.objects.create(
organization_id=project.organization_id,
commit=commit2,
filename='/static/js/widget.js',
type='A'
)
url = reverse('sentry-api-0-release-commitfilechange', kwargs={
'organization_slug': project.organization.slug,
'version': release.version,
})
self.login_as(user=self.user)
response = self.client.get(url)
assert response.status_code == 200, response.content
assert len(response.data) == 2
assert response.data[0]['filename'] == '.gitignore'
assert response.data[1]['filename'] == '/static/js/widget.js'
| [
"[email protected]"
] | |
63e3fa0e7d86c5133e69ba329a533e4edfdc34c1 | 0d4ec25fb2819de88a801452f176500ccc269724 | /sub_two_binaries.py | d4f6682fa6bf8cad577240ddabce0a9eaa7818a1 | [] | no_license | zopepy/leetcode | 7f4213764a6a079f58402892bd0ede0514e06fcf | 3bfee704adb1d94efc8e531b732cf06c4f8aef0f | refs/heads/master | 2022-01-09T16:13:09.399000 | 2019-05-29T20:00:11 | 2019-05-29T20:00:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | class Solution:
def addBinary(self, a, b):
"""
:type a: str
:type b: str
:rtype: str
"""
s = ""
a,b = a[::-1], b[::-1]
la,lb = len(a), len(b)
l = max(la, lb)
i = 0
carry = 0
while i<l or carry==1:
b1 = int(a[i] if i<la else 0)
b2 = int(b[i] if i<lb else 0)
curbit = b1^b2^carry
carry = (b1&b2)|(carry&(b1|b2))
s += str(curbit)
# print(curbit, carry)
i+=1
return s[::-1]
a,b="000", "000000"
print(Solution().addBinary(a,b))
| [
"[email protected]"
] | |
cc5695f1470140f25b2cb77800818102059fa4d6 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /kdhgEC2ECXAfoXWQP_1.py | 18cfc39baa91a8ce324e7628429be8a4c0702226 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210000 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | """
In this challenge, you have to obtain a sentence from the elements of a given
matrix. In the matrix, each word of the sentence follows a columnar order from
the top to the bottom, instead of the usual left-to-right order: it's time for
**transposition**!
Given a matrix `mtx`, implement a function that returns the complete sentence
as a string, with the words separated by a space between them.
### Examples
transpose_matrix([
["Enter"],
["the"],
["Matrix!"]
]) ➞ "Enter the Matrix!"
transpose_matrix([
["The", "are"],
["columns", "rows."]
]) ➞ "The columns are rows."
transpose_matrix([
["You", "the"],
["must", "table"],
["transpose", "order."]
]) ➞ "You must transpose the table order."
### Notes
* All given matrices are regular, as to say that each column has the same length.
* Punctuation is already given, you just have to add the spaces in the returned string.
"""
def transpose_matrix(mtx):
result = ""
for i in range(len(mtx[0])):
for j in mtx:
result += j[i]+" "
return result[:-1]
| [
"[email protected]"
] | |
3da334d08f98f8cf06aa4794ea35ab1bdecc8c8a | 8c8159691382ab8759ec637a97ef107ba898ad4c | /Recursive/removeInvalidParentheses.py | 44953cd000adfcd6e1707c07b5da6c12c0038303 | [] | no_license | windhaunting/Coding_practices | 3c89cddaeb13bfe36eab7ff664d6e16d0e86d46f | 8375988ac391376159438877b6729bb94340106b | refs/heads/master | 2021-02-05T21:40:07.858000 | 2020-02-28T19:25:29 | 2020-02-28T19:25:29 | 243,836,816 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 690 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 16 16:43:09 2018
@author: fubao
"""
#301. Remove Invalid Parentheses
'''
Remove the minimum number of invalid parentheses in order to make the input string valid. Return all possible results.
Note: The input string may contain letters other than the parentheses ( and ).
Examples:
"()())()" -> ["()()()", "(())()"]
"(a)())()" -> ["(a)()()", "(a())()"]
")(" -> [""]
'''
#reference: http://zxi.mytechroad.com/blog/searching/leetcode-301-remove-invalid-parentheses/
class Solution(object):
def removeInvalidParentheses(self, s):
"""
:type s: str
:rtype: List[str]
"""
| [
"[email protected]"
] | |
6017ff5d62258b8bdc613d2deb7b6f19177ac641 | d01fa1b6668c66236405b799e39e529d1492af7c | /{{cookiecutter.project_slug}}/pages/migrations/0016_sitebranding.py | 9068f89e8055a2b76d16b1f85251befee436df7b | [
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
] | permissive | chrisdev/wagtail-cookiecutter-foundation | 426ffd974aa08ab10e4b0e44d5003476c597f2e4 | e7d56ee01eb5976588129d7bd4d5fc6dab2d794a | refs/heads/master | 2023-08-31T06:05:43.999000 | 2022-03-31T18:44:37 | 2022-03-31T18:44:37 | 33,870,540 | 189 | 72 | MIT | 2023-09-14T03:30:34 | 2015-04-13T13:36:50 | Python | UTF-8 | Python | false | false | 1,105 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-10 14:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0019_delete_filter'),
('wagtailcore', '0040_page_draft_title'),
('pages', '0015_advert_button_text'),
]
operations = [
migrations.CreateModel(
name='SiteBranding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site_name', models.CharField(blank=True, max_length=250, null=True)),
('logo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image')),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'abstract': False,
},
),
]
| [
"[email protected]"
] | |
0b4285bff2df5cd19b3e3e2f31c78b854999b8f5 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/65/usersdata/185/34920/submittedfiles/investimento.py | c02de528254c4d919d01652089a4c2aa1ade2813 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773000 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 515 | py | # -*- coding: utf-8 -*-
from __future__ import division
i0=float(input('digite o valor do investimesnto:'))
taxa=float(input('digite o valor da taxa:'))
i1=(i0+(i0*taxa))
i2=(i1+(i1*taxa))
i3=(i2+(i2*taxa))
i4=(i3+(i3*taxa))
i5=(i4+(i4*taxa))
i6=(i5+(i5*taxa))
i7=(i6+(i6*taxa))
i8=(i7+(i7*taxa))
i9=(i8+(i8*taxa))
i10=(i9+(i9*taxa))
print('%.2f' %i1)
print('%.2f' %i2)
print('%.2f' %i3)
print('%.2f' %i4)
print('%.2f' %i5)
print('%.2f' %i6)
print('%.2f' %i7)
print('%.2f' %i8)
print('%.2f' %i9)
print('%.2f' %i10) | [
"[email protected]"
] | |
7e29e532d2f1285cd50e39b2cb2212b658e5b9a8 | 149db911cd5b9f404e5d74fd6c8ed047482d2c22 | /backend/menu/migrations/0001_initial.py | 2c07fd16d8ed613c8286821c487d80336fef03b4 | [] | no_license | crowdbotics-apps/bigbitesgrill-22907 | 45814458930ad7aed64a1f4941aabd930f1f2587 | 6cd1b7b663de21c7587cdbce1612c4807e2cc5f6 | refs/heads/master | 2023-01-14T05:10:18.129000 | 2020-11-23T03:27:17 | 2020-11-23T03:27:17 | 315,189,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,144 | py | # Generated by Django 2.2.17 on 2020-11-23 03:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('delivery_user_profile', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('image', models.URLField()),
('icon', models.URLField()),
],
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('prefix', models.CharField(max_length=8)),
('flag', models.URLField()),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('image', models.URLField()),
('category', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='item_category', to='menu.Category')),
],
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.FloatField()),
('review_text', models.TextField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='review_item', to='menu.Item')),
('profile', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='review_profile', to='delivery_user_profile.Profile')),
],
),
migrations.CreateModel(
name='ItemVariant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
('price', models.FloatField()),
('image', models.URLField()),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='itemvariant_country', to='menu.Country')),
('item', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='itemvariant_item', to='menu.Item')),
],
),
]
| [
"[email protected]"
] | |
512cccdff042b753e66c88811c3fe1daaa5ce10b | d488f052805a87b5c4b124ca93494bc9b78620f7 | /google-cloud-sdk/lib/googlecloudsdk/command_lib/accesscontextmanager/zones.py | 7769f86c280257e290b19cd283c994d3d59183d5 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | PacktPublishing/DevOps-Fundamentals | 5ce1fc938db66b420691aa8106ecfb3f9ceb1ace | 60597e831e08325c7e51e8557591917f7c417275 | refs/heads/master | 2023-02-02T04:48:15.346000 | 2023-01-30T08:33:35 | 2023-01-30T08:33:35 | 131,293,311 | 13 | 19 | null | null | null | null | UTF-8 | Python | false | false | 7,142 | py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command line processing utilities for access zones."""
from googlecloudsdk.api_lib.accesscontextmanager import util
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.command_lib.accesscontextmanager import common
from googlecloudsdk.command_lib.accesscontextmanager import levels
from googlecloudsdk.command_lib.accesscontextmanager import policies
from googlecloudsdk.command_lib.util.apis import arg_utils
from googlecloudsdk.command_lib.util.args import repeated
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import resources
REGISTRY = resources.REGISTRY
def AddAccessLevels(ref, args, req):
if args.IsSpecified('access_levels'):
access_levels = []
for access_level in args.access_levels:
level_ref = resources.REGISTRY.Create(
'accesscontextmanager.accessPolicies.accessLevels',
accessLevelsId=access_level, **ref.Parent().AsDict())
access_levels.append(level_ref.RelativeName())
req.accessZone.accessLevels = access_levels
return req
def AddImplicitServiceWildcard(ref, args, req):
"""Add an implicit wildcard for services if they are modified.
If either restricted services or unrestricted services is given, the other
must also be provided as a wildcard (`*`).
If neither is given, this is a no-op.
Args:
ref: resources.Resource, the (unused) resource
args: argparse namespace, the parse arguments
req: AccesscontextmanagerAccessPoliciesAccessZonesCreateRequest
Returns:
The modified request.
"""
del ref # Unused in AddImplicitServiceWildcard
if args.IsSpecified('restricted_services'):
req.accessZone.unrestrictedServices = ['*']
elif args.IsSpecified('unrestricted_services'):
req.accessZone.restrictedServices = ['*']
return req
def _GetAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='zone',
help_text='The ID of the access zone.'
)
def _GetResourceSpec():
return concepts.ResourceSpec(
'accesscontextmanager.accessPolicies.accessZones',
resource_name='zone',
accessPoliciesId=policies.GetAttributeConfig(),
accessZonesId=_GetAttributeConfig())
def AddResourceArg(parser, verb):
"""Add a resource argument for an access zone.
NOTE: Must be used only if it's the only resource arg in the command.
Args:
parser: the parser for the command.
verb: str, the verb to describe the resource, such as 'to update'.
"""
concept_parsers.ConceptParser.ForResource(
'zone',
_GetResourceSpec(),
'The access zone {}.'.format(verb),
required=True).AddToParser(parser)
def GetTypeEnumMapper():
return arg_utils.ChoiceEnumMapper(
'--type',
util.GetMessages().AccessZone.ZoneTypeValueValuesEnum,
custom_mappings={
'ZONE_TYPE_REGULAR': 'regular',
'ZONE_TYPE_BRIDGE': 'bridge'
},
required=False,
help_str="""\
Type of the zone.
A *regular* zone allows resources within this access zone to import
and export data amongst themselves. A project may belong to at most
one regular access zone.
A *bridge* access zone allows resources in different regular access
zones to import and export data between each other. A project may
belong to multiple bridge access zones (only if it also belongs to a
regular access zone). Both restricted and unrestricted service lists,
as well as access level lists, must be empty.
""",
)
def AddZoneUpdateArgs(parser):
"""Add args for zones update command."""
args = [
common.GetDescriptionArg('access zone'),
common.GetTitleArg('access zone'),
GetTypeEnumMapper().choice_arg
]
for arg in args:
arg.AddToParser(parser)
_AddResources(parser)
_AddUnrestrictedServices(parser)
_AddRestrictedServices(parser)
_AddLevelsUpdate(parser)
def _AddResources(parser):
repeated.AddPrimitiveArgs(
parser, 'zone', 'resources', 'resources',
additional_help=('Resources must be projects, in the form '
'`project/<projectnumber>`.'))
def ParseResources(args, zone_result):
return repeated.ParsePrimitiveArgs(
args, 'resources', zone_result.GetAttrThunk('resources'))
def _AddUnrestrictedServices(parser):
repeated.AddPrimitiveArgs(
parser, 'zone', 'unrestricted-services', 'unrestricted services',
metavar='SERVICE',
additional_help=(
'The zone boundary DOES NOT apply to these services (for example, '
'`storage.googleapis.com`). A wildcard (```*```) may be given to '
'denote all services.\n\n'
'If restricted services are set, unrestricted services must be a '
'wildcard.'))
def ParseUnrestrictedServices(args, zone_result):
return repeated.ParsePrimitiveArgs(
args, 'unrestricted_services',
zone_result.GetAttrThunk('unrestrictedServices'))
def _AddRestrictedServices(parser):
repeated.AddPrimitiveArgs(
parser, 'zone', 'restricted-services', 'restricted services',
metavar='SERVICE',
additional_help=(
'The zone boundary DOES apply to these services (for example, '
'`storage.googleapis.com`). A wildcard (```*```) may be given to '
'denote all services.\n\n'
'If unrestricted services are set, restricted services must be a '
'wildcard.'))
def ParseRestrictedServices(args, zone_result):
return repeated.ParsePrimitiveArgs(
args, 'restricted_services',
zone_result.GetAttrThunk('restrictedServices'))
def _AddLevelsUpdate(parser):
repeated.AddPrimitiveArgs(
parser, 'zone', 'access-levels', 'access levels',
metavar='LEVEL',
additional_help=(
'An intra-zone request must satisfy these access levels (for '
'example, `MY_LEVEL`; must be in the same access policy as this '
'zone) to be allowed.'))
def _GetLevelIdFromLevelName(level_name):
return REGISTRY.Parse(level_name, collection=levels.COLLECTION).accessLevelsId
def ParseLevels(args, zone_result, policy_id):
level_ids = repeated.ParsePrimitiveArgs(
args, 'access_levels',
zone_result.GetAttrThunk('accessLevels',
transform=_GetLevelIdFromLevelName))
if level_ids is None:
return None
return [REGISTRY.Create(levels.COLLECTION,
accessPoliciesId=policy_id,
accessLevelsId=l) for l in level_ids]
| [
"[email protected]"
] | |
46420d6d79533b4847126b91595955ff96211153 | 0a46b027e8e610b8784cb35dbad8dd07914573a8 | /scripts/venv/lib/python2.7/site-packages/cogent/maths/stats/information_criteria.py | ead8f6417df1dd2fa2049a479c7f9aa4b4de1829 | [
"MIT"
] | permissive | sauloal/cnidaria | bb492fb90a0948751789938d9ec64677052073c3 | fe6f8c8dfed86d39c80f2804a753c05bb2e485b4 | refs/heads/master | 2021-01-17T13:43:17.307000 | 2016-10-05T14:14:46 | 2016-10-05T14:14:46 | 33,726,643 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,145 | py | from __future__ import division
import numpy
__author__ = "Gavin Huttley"
__copyright__ = "Copyright 2007-2012, The Cogent Project"
__credits__ = ["Gavin Huttley"]
__license__ = "GPL"
__version__ = "1.5.3"
__maintainer__ = "Gavin Huttley"
__email__ = "[email protected]"
__status__ = "Production"
def aic(lnL, nfp, sample_size=None):
"""returns Aikake Information Criterion
Arguments:
- lnL: the maximum log-likelihood of a model
- nfp: the number of free parameters in the model
- sample_size: if provided, the second order AIC is returned
"""
if sample_size is None:
correction = 1
else:
assert sample_size > 0, "Invalid sample_size %s" % sample_size
correction = sample_size / (sample_size - nfp - 1)
return -2* lnL + 2 * nfp * correction
def bic(lnL, nfp, sample_size):
"""returns Bayesian Information Criterion
Arguments:
- lnL: the maximum log-likelihood of a model
- nfp: the number of free parameters in the model
- sample_size: size of the sample
"""
return -2* lnL + nfp * numpy.log(sample_size)
| [
"[email protected]"
] | |
b254df743e617dfd1390743f0e04bbe4d12cb542 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03227/s922156594.py | 3367f99a10180d83d75fbea989fb7e0b5a810cdd | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763000 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | import sys
def input(): return sys.stdin.readline().rstrip()
def main():
s=input()
if len(s)==2:print(s)
else:print(s[::-1])
if __name__=='__main__':
main() | [
"[email protected]"
] | |
ae180e8cf37b46499f5232dd71f2789e8e56b342 | a16691abb472e2d57cf417cc671e7574f97aaf23 | /src/13_millas.py | 785d28eea60a685a38043e0f43f552b1e14265d4 | [
"MIT"
] | permissive | agomusa/oop-algorithms-python-platzi | fbb16208b68e822c6232ffb944c414c176004ac1 | 56e5f636c9243fbd81148a6e6e8405034f362c70 | refs/heads/main | 2023-06-19T21:38:19.925000 | 2021-07-07T20:49:09 | 2021-07-07T20:49:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | class Millas:
def __init__(self):
self._distancia = 0
# Función para obtener el valor de _distancia
def obtener_distancia(self):
print("Llamada al método getter...")
return self._distancia
# Función para definir el valor de _distancia
def definir_distancia(self, recorrido):
print("Llamada al método setter...")
self._distancia = recorrido
# Función para eliminar el atributo _distancia
def eliminar_distancia(self):
del self._distancia
distancia = property(obtener_distancia, definir_distancia, eliminar_distancia)
if __name__ == '__main__':
avion = Millas()
avion.distancia = int(input('¿Cuantas millas vas a viajar? '))
print('Vas a viajar '+str(avion.distancia*1.609344)+' Kilometros')
| [
"[email protected]"
] | |
5c713e71b6d36c733a3c7071ffaec82c80094caa | f8826a479f2b9d2f28993ceea7a7d0e3847aaf3d | /apps/requestlogger/models.py | 9fa6f370798fdbd62b4484b8acf1d332f55c10a0 | [] | no_license | icomms/wqmanager | bec6792ada11af0ff55dc54fd9b9ba49242313b7 | f683b363443e1c0be150656fd165e07a75693f55 | refs/heads/master | 2021-01-20T11:59:42.299000 | 2012-02-20T15:28:40 | 2012-02-20T15:28:40 | 2,154,449 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,869 | py | from django.db import models
from django.contrib.auth.models import User
from domain.models import Domain
import os
import logging
import settings
# this is a really bad place for this class to live, but reference it here for now
from scheduler.fields import PickledObjectField
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
REQUEST_TYPES = (
('GET', 'Get'),
('POST', 'Post'),
('PUT', 'Put'),
)
class RequestLog(models.Model):
'''Keeps track of incoming requests'''
# Lots of stuff here is replicated in Submission.
# They should ultimately point here, but that's a data migration
# problem.
method = models.CharField(max_length=4, choices=REQUEST_TYPES)
url = models.CharField(max_length=200)
time = models.DateTimeField(_('Request Time'), default = datetime.now)
ip = models.IPAddressField(_('Submitting IP Address'), null=True, blank=True)
is_secure = models.BooleanField(default=False)
# The logged in user
user = models.ForeignKey(User, null=True, blank=True)
# Some pickled fields for having access to the raw info
headers = PickledObjectField(_('Request Headers'))
parameters = PickledObjectField(_('Request Parameters'))
def __unicode__(self):
return "%s to %s at %s from %s" % (self.method, self.url,
self.time, self.ip)
@classmethod
def from_request(cls, request):
'''Creates an instance of a RequestLog from a standard
django HttpRequest object.
'''
log = RequestLog()
log.method = request.method
log.url = request.build_absolute_uri(request.path)
log.time = datetime.now()
log.is_secure = request.is_secure()
if request.META.has_key('REMOTE_ADDR') and request.META['REMOTE_ADDR']:
log.ip = request.META['REMOTE_ADDR']
elif request.META.has_key('REMOTE_HOST') and request.META['REMOTE_HOST']:
log.ip = request.META['REMOTE_HOST']
# if request.user != User, then user is anonymous
if isinstance(request.user, User):
log.user = request.user
def _convert_to_dict(obj):
# converts a django-querydict to a true python dict
# and converts any values to strings. This could result
# in a loss of information
to_return = {}
for key, value in obj.items():
to_return[key] = str(value)
return to_return
log.headers = _convert_to_dict(request.META)
if request.method == "GET":
log.parameters = _convert_to_dict(request.GET)
else:
log.parameters = _convert_to_dict(request.POST)
return log | [
"[email protected]"
] | |
ba11fe85c801d07e0e7c25b58d3aee09665d8952 | 77a7508c3a647711191b924959db80fb6d2bd146 | /src/gamesbyexample/worms.py | 2bea231d0dbdaeacc62cad083fcc56fafc920fb4 | [
"MIT"
] | permissive | surlydev/PythonStdioGames | ff7edb4c8c57a5eb6e2036e2b6ebc7e23ec994e0 | d54c2509c12a5b1858eda275fd07d0edd456f23f | refs/heads/master | 2021-05-22T21:01:15.529000 | 2020-03-26T07:34:10 | 2020-03-26T07:34:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,750 | py | """Worm animation, by Al Sweigart [email protected]
A screensaver of multicolor worms moving around.
NOTE: Do not resize the terminal window while this program is running.
Tags: large, artistic, simulation, bext"""
__version__ = 0
import random, shutil, sys, time
try:
import bext
except ImportError:
print('''This program requires the bext module, which you can
install by opening a Terminal window (on macOS & Linux) and running:
python3 -m pip install --user bext
or a Command Prompt window (on Windows) and running:
python -m pip install --user bext''')
sys.exit()
# Set up the constants:
PAUSE_LENGTH = 0.1
# Get the size of the terminal window:
WIDTH, HEIGHT = shutil.get_terminal_size()
# We can't print to the last column on Windows without it adding a
# newline automatically, so reduce the width by one:
WIDTH -= 1
WIDTH //= 2
NUMBER_OF_WORMS = 12 # (!) Try changing this value.
MIN_WORM_LENGTH = 6 # (!) Try changing this value.
MAX_WORM_LENGTH = 16 # (!) Try changing this value.
ALL_COLORS = bext.ALL_COLORS
NORTH = 'north'
SOUTH = 'south'
EAST = 'east'
WEST = 'west'
BLOCK = chr(9608) # Character 9608 is '█'
def main():
# Generate worm data structures:
worms = []
for i in range(NUMBER_OF_WORMS):
worms.append(Worm())
bext.clear()
while True: # Main simulation loop.
# Draw quit message.
bext.fg('white')
bext.goto(0, 0)
print('Ctrl-C to quit.', end='')
for worm in worms:
worm.display()
for worm in worms:
worm.moveRandom()
sys.stdout.flush()
time.sleep(PAUSE_LENGTH)
class Worm:
def __init__(self):
self.length = random.randint(MIN_WORM_LENGTH, MAX_WORM_LENGTH)
coloration = random.choice(['solid', 'stripe', 'random'])
if coloration == 'solid':
self.colors = [random.choice(ALL_COLORS)] * self.length
elif coloration == 'stripe':
color1 = random.choice(ALL_COLORS)
color2 = random.choice(ALL_COLORS)
self.colors = []
for i in range(self.length):
self.colors.append((color1, color2)[i % 2])
elif coloration == 'random':
self.colors = []
for i in range(self.length):
self.colors.append(random.choice(ALL_COLORS))
x = random.randint(0, WIDTH - 1)
y = random.randint(0, HEIGHT - 1)
self.body = []
for i in range(self.length):
self.body.append((x, y))
x, y = getRandomNeighbor(x, y)
def moveNorth(self):
headx, heady = self.body[0]
if self.isBlocked(NORTH):
return False
self.body.insert(0, (headx, heady - 1))
self._eraseLastBodySegment()
return True
def moveSouth(self):
headx, heady = self.body[0]
if self.isBlocked(SOUTH):
return False
self.body.insert(0, (headx, heady + 1))
self._eraseLastBodySegment()
return True
def moveEast(self):
headx, heady = self.body[0]
if self.isBlocked(EAST):
return False
self.body.insert(0, (headx + 1, heady))
self._eraseLastBodySegment()
return True
def moveWest(self):
headx, heady = self.body[0]
if self.isBlocked(WEST):
return False
self.body.insert(0, (headx - 1, heady))
self._eraseLastBodySegment()
return True
def isBlocked(self, direction):
headx, heady = self.body[0]
if direction == NORTH:
return heady == 0 or (headx, heady - 1) in self.body
elif direction == SOUTH:
return heady == HEIGHT - 1 or (headx, heady + 1) in self.body
elif direction == EAST:
return headx == WIDTH - 1 or (headx + 1, heady) in self.body
elif direction == WEST:
return headx == 0 or (headx - 1, heady) in self.body
def moveRandom(self):
if self.isBlocked(NORTH) and self.isBlocked(SOUTH) and self.isBlocked(EAST) and self.isBlocked(WEST):
self.body.reverse()
if self.isBlocked(NORTH) and self.isBlocked(SOUTH) and self.isBlocked(EAST) and self.isBlocked(WEST):
return False
hasMoved = False
while not hasMoved:
direction = random.choice([NORTH, SOUTH, EAST, WEST])
if direction == NORTH:
hasMoved = self.moveNorth()
elif direction == SOUTH:
hasMoved = self.moveSouth()
elif direction == EAST:
hasMoved = self.moveEast()
elif direction == WEST:
hasMoved = self.moveWest()
def _eraseLastBodySegment(self):
# Erase the last body segment:
bext.goto(self.body[-1][0] * 2, self.body[-1][1])
print(' ', end='')
self.body.pop() # Delete the last (x, y) tuple in self.body.
def display(self):
for i, (x, y) in enumerate(self.body):
bext.goto(x * 2, y)
bext.fg(self.colors[i])
print(BLOCK + BLOCK, end='')
def getRandomNeighbor(x, y):
while True:
direction = random.choice((NORTH, SOUTH, EAST, WEST))
if direction == NORTH and y != 0:
return (x, y - 1)
elif direction == SOUTH and y != HEIGHT - 1:
return (x, y + 1)
elif direction == EAST and x != WIDTH - 1:
return (x + 1, y)
elif direction == WEST and x != 0:
return (x - 1, y)
# If this program was run (instead of imported), run the game:
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.exit() # When Ctrl-C is pressed, end the program.
| [
"[email protected]"
] | |
11cd4d65d01665c0d10e4866ca5ef1b2c881800c | be9d18c3ac86921e8899a830ec42d35edd440919 | /moztrap/view/runtests/finders.py | 233321205408e7918ea9601274a03b83139b0057 | [
"BSD-2-Clause"
] | permissive | AlinT/moztrap | abcbf74893d10f7bcf77b4ed44fa77bd017353d6 | 13927ae3f156b27e4dd064ea37f2feae14728398 | refs/heads/master | 2021-01-18T08:21:52.894000 | 2012-09-26T19:54:57 | 2012-09-26T19:54:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 906 | py | """
Finder for running tests.
"""
from django.core.urlresolvers import reverse
from ... import model
from ..lists import finder
class RunTestsFinder(finder.Finder):
template_base = "runtests/finder"
columns = [
finder.Column(
"products",
"_products.html",
model.Product.objects.order_by("name"),
),
finder.Column(
"productversions",
"_productversions.html",
model.ProductVersion.objects.all(),
),
finder.Column(
"runs",
"_runs.html",
model.Run.objects.filter(status=model.Run.STATUS.active),
),
]
def child_query_url(self, obj):
if isinstance(obj, model.Run):
return reverse("runtests_environment", kwargs={"run_id": obj.id})
return super(RunTestsFinder, self).child_query_url(obj)
| [
"[email protected]"
] | |
b2c759567b93cac768c610e6337ebe2ca19626e0 | 735a315ea82893f2acd5ac141f1a9b8be89f5cb9 | /pylib/v6.1.84/mdsscalar.py | 7cf7fe6e0ba174ecd9dc55b37dbdca77b5786088 | [] | no_license | drsmith48/pppl-mdsplus-python | 5ce6f7ccef4a23ea4b8296aa06f51f3a646dd36f | 0fb5100e6718c8c10f04c3aac120558f521f9a59 | refs/heads/master | 2021-07-08T02:29:59.069000 | 2017-10-04T20:17:32 | 2017-10-04T20:17:32 | 105,808,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,108 | py | if '__package__' not in globals() or __package__ is None or len(__package__)==0:
def _mimport(name,level):
return __import__(name,globals())
else:
def _mimport(name,level):
return __import__(name,globals(),{},[],level)
import numpy,copy
_dtypes=_mimport('_mdsdtypes',1)
_data=_mimport('mdsdata',1)
def makeScalar(value):
if isinstance(value,str):
return String(value)
if isinstance(value,Scalar):
return copy.deepcopy(value)
if isinstance(value,numpy.generic):
if isinstance(value,numpy.string_):
return String(value)
try:
if isinstance(value,numpy.bytes_):
return String(str(value,encoding='utf8'))
except:
pass
if isinstance(value,numpy.bool_):
return makeScalar(int(value))
return globals()[value.__class__.__name__.capitalize()](value)
try:
if isinstance(value,long):
return Int64(value)
if isinstance(value,int):
return Int32(value)
except:
if isinstance(value,int):
return Int64(value)
if isinstance(value,float):
return Float32(value)
if isinstance(value,str):
return String(value)
if isinstance(value,bytes):
return String(value.decode())
if isinstance(value,bool):
return Int8(int(value))
if isinstance(value,complex):
return Complex128(numpy.complex128(value))
if isinstance(value,numpy.complex64):
return Complex64(value)
if isinstance(value,numpy.complex128):
return Complex128(value)
raise TypeError('Cannot make Scalar out of '+str(type(value)))
class Scalar(_data.Data):
def __new__(cls,value=0):
try:
import numpy
_array=_mimport('mdsarray',1)
if (isinstance(value,_array.Array)) or isinstance(value,list) or isinstance(value,numpy.ndarray):
return _array.__dict__[cls.__name__+'Array'](value)
except:
pass
return super(Scalar,cls).__new__(cls)
def __init__(self,value=0):
if self.__class__.__name__ == 'Scalar':
raise TypeError("cannot create 'Scalar' instances")
if self.__class__.__name__ == 'String':
self._value=numpy.string_(value)
return
self._value=numpy.__dict__[self.__class__.__name__.lower()](value)
def __getattr__(self,name):
return self._value.__getattribute__(name)
def _getValue(self):
"""Return the numpy scalar representation of the scalar"""
return self._value
value=property(_getValue)
def __str__(self):
formats={'Int8':'%dB','Int16':'%dW','Int32':'%d','Int64':'0X%0xQ',
'Uint8':'%uBU','Uint16':'%uWU','Uint32':'%uLU','Uint64':'0X%0xQU',
'Float32':'%g'}
ans=formats[self.__class__.__name__] % (self._value,)
if ans=='nan':
ans="$ROPRAND"
elif isinstance(self,Float32) and ans.find('.')==-1:
ans=ans+"."
return ans
def decompile(self):
return str(self)
def __int__(self):
"""Integer: x.__int__() <==> int(x)
@rtype: int"""
return self._value.__int__()
def __long__(self):
"""Long: x.__long__() <==> long(x)
@rtype: int"""
return self.__value.__long__()
def _unop(self,op):
return _data.makeData(getattr(self.value,op)())
def _binop(self,op,y):
try:
y=y.value
except AttributeError:
pass
ans=getattr(self.value,op)(y)
return _data.makeData(ans)
def _triop(self,op,y,z):
try:
y=y.value
except AttributeError:
pass
try:
z=z.value
except AttributeError:
pass
return _data.makeData(getattr(self.value,op)(y,z))
def _getMdsDtypeNum(self):
return {'Uint8':DTYPE_BU,'Uint16':DTYPE_WU,'Uint32':DTYPE_LU,'Uint64':DTYPE_QU,
'Int8':DTYPE_B,'Int16':DTYPE_W,'Int32':DTYPE_L,'Int64':DTYPE_Q,
'String':DTYPE_T,
'Float32':DTYPE_FS,
'Float64':DTYPE_FT,'Complex64':DTYPE_FSC,'Complex128':DTYPE_FTC}[self.__class__.__name__]
mdsdtype=property(_getMdsDtypeNum)
def all(self):
return self._unop('all')
def any(self):
return self._unop('any')
def argmax(self,*axis):
if axis:
return self._binop('argmax',axis[0])
else:
return self._unop('argmax')
def argmin(self,*axis):
if axis:
return self._binop('argmin',axis[0])
else:
return self._unop('argmin')
def argsort(self,axis=-1,kind='quicksort',order=None):
return _data.makeData(self.value.argsort(axis,kind,order))
def astype(self,type):
return _data.makeData(self.value.astype(type))
def byteswap(self):
return self._unop('byteswap')
def clip(self,y,z):
return self._triop('clip',y,z)
class Int8(Scalar):
"""8-bit signed number"""
class Int16(Scalar):
"""16-bit signed number"""
class Int32(Scalar):
"""32-bit signed number"""
class Int64(Scalar):
"""64-bit signed number"""
class Uint8(Scalar):
"""8-bit unsigned number"""
class Uint16(Scalar):
"""16-bit unsigned number"""
class Uint32(Scalar):
"""32-bit unsigned number"""
class Uint64(Scalar):
"""64-bit unsigned number"""
def _getDate(self):
return _data.Data.execute('date_time($)',self)
date=property(_getDate)
class Float32(Scalar):
"""32-bit floating point number"""
class Complex64(Scalar):
"""32-bit complex number"""
def __str__(self):
return "Cmplx(%g,%g)" % (self._value.real,self._value.imag)
class Float64(Scalar):
"""64-bit floating point number"""
def __str__(self):
return ("%E" % self._value).replace("E","D")
class Complex128(Scalar):
"""64-bit complex number"""
def __str__(self):
return "Cmplx(%s,%s)" % (str(Float64(self._value.real)),str(Float64(self._value.imag)))
class String(Scalar):
"""String"""
def __radd__(self,y):
"""Reverse add: x.__radd__(y) <==> y+x
@rtype: Data"""
return self.execute('$//$',y,self)
def __add__(self,y):
"""Add: x.__add__(y) <==> x+y
@rtype: Data"""
return self.execute('$//$',self,y)
def __str__(self):
"""String: x.__str__() <==> str(x)
@rtype: String"""
if len(self._value) > 0:
return str(self.value.tostring().decode())
else:
return ''
def __len__(self):
return len(str(self))
def decompile(self):
if len(self._value) > 0:
return repr(self._value.tostring())
else:
return "''"
class Int128(Scalar):
"""128-bit number"""
def __init__(self):
raise TypeError("Int128 is not yet supported")
class Uint128(Scalar):
"""128-bit unsigned number"""
def __init__(self):
raise TypeError("Uint128 is not yet supported")
| [
"[email protected]"
] | |
f0714282ca1bed1a0bc706dfd5e96c9a2e87dc47 | a94770c70704c22590c72d7a90f38e3a7d2e3e5c | /Algo/Leetcode/123BestTimeToBuyAndSellStockIII.py | 2a292d28fef14431391bc62620bd69b4e46bf158 | [] | no_license | lawy623/Algorithm_Interview_Prep | 00d8a1c0ac1f47e149e95f8655d52be1efa67743 | ca8b2662330776d14962532ed8994dfeedadef70 | refs/heads/master | 2023-03-22T16:19:12.382000 | 2023-03-21T02:42:05 | 2023-03-21T02:42:05 | 180,056,076 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
buy1 = -2**31
buy2 = -2**31
sell1 = 0
sell2 = 0
for p in prices:
buy1 = max(buy1, -p)
sell1 = max(sell1, buy1+p)
buy2 = max(buy2, sell1-p)
sell2 = max(sell2, buy2+p)
return sell2 | [
"[email protected]"
] | |
8cb57215a38cae611c55923ca5e461bd7f9fed84 | 44b87d9faad99d542914c35410ba7d354d5ba9cd | /1/EXAM 2/start a following num end in b.py | 6fdd809b6c2423f47c4a8dc46bc3723b23095a91 | [] | no_license | append-knowledge/pythondjango | 586292d1c7d0ddace3630f0d77ca53f442667e54 | 0e5dab580e8cc48e9940fb93a71bcd36e8e6a84e | refs/heads/master | 2023-06-24T07:24:53.374000 | 2021-07-13T05:55:25 | 2021-07-13T05:55:25 | 385,247,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py | import re
x='[a]\d+[b]'
input=input('enter ')
match=re.fullmatch(x,input)
if match is not None:
print('VALID')
else:
print('INVALID') | [
"[email protected]"
] | |
b7e335ec5f9b7c481858b08725dd834ca4d73b3b | 917d4f67f6033a0cc01ba2b3b7b07dab94dcffdf | /property/pages/views.py | 6104059f52839acc79ba33851e228e4120171433 | [] | no_license | hghimanshu/Django | 011156c484e6710a379be3fb7faf6ab814bde02c | 75bef769e615df2719b213884f7269a56b7ccb7b | refs/heads/master | 2023-02-19T08:49:35.691000 | 2022-03-21T09:03:58 | 2022-03-21T09:03:58 | 242,301,089 | 0 | 0 | null | 2023-02-15T18:19:31 | 2020-02-22T07:43:13 | CSS | UTF-8 | Python | false | false | 856 | py | from django.shortcuts import render
from django.http import HttpResponse
from listings.models import Listing
from realtors.models import Realtor
from listings.choices import price_choices, bedroom_choices, state_choices
# Create your views here.
def index(request):
listings = Listing.objects.order_by('-list_date').filter(is_published=True)[:3]
context = {
'listings': listings,
'state_choices': state_choices,
'bedroom_choices': bedroom_choices,
'price_choices': price_choices
}
return render(request, 'pages/index.html', context)
def about(request):
realtors = Realtor.objects.order_by('-hire_date')
mvp_realtors = Realtor.objects.all().filter(is_mvp=True)
context = {
'realtors': realtors,
'mvp': mvp_realtors
}
return render(request, 'pages/about.html', context)
| [
"[email protected]"
] |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 5