blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
288
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 684
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 147
values | src_encoding
stringclasses 25
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 128
12.7k
| extension
stringclasses 142
values | content
stringlengths 128
8.19k
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f9c962a39baa75c624eed77ea4bb3ed83b1d85ba | 4851d160a423b4a65e81a75d5b4de5218de958ee | /Number Format.py | 63d6f6c7b330bb5a08ff5f80773c51da98bf8514 | [] | no_license | LarisaOvchinnikova/python_codewars | 519508e5626303dcead5ecb839c6d9b53cb3c764 | 5399f4be17e4972e61be74831703a82ce9badffd | refs/heads/master | 2023-05-05T14:52:02.100435 | 2021-05-25T18:36:51 | 2021-05-25T18:36:51 | 319,399,343 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | # https://www.codewars.com/kata/565c4e1303a0a006d7000127
def number_format(n):
sign = "" if n >=0 else "-"
n = str(abs(n))
if len(n) <= 3: return sign+n
s = []
while len(n)>0:
s.append(n[-3:])
n = n[:-3]
return sign+",".join(s[::-1]) | [
"[email protected]"
] | |
eaeecb735041bbbe5891d953465fba1e4783f1c7 | 43b9eb11e90dbf984f950e4885085c83daa719b2 | /migrations/versions/339a6b145e56_user_status.py | f81e899bdc79017e6803c52bc8c09c0dbee04e15 | [
"Apache-2.0"
] | permissive | dpdi-unifor/thorn | 8ec7982812fe07906567514ad6628154ea99f620 | 37695c66607f60b29afd25ac512c0242079e1342 | refs/heads/master | 2023-01-02T19:48:27.409446 | 2020-09-09T14:31:51 | 2020-09-09T14:31:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | """User status
Revision ID: 339a6b145e56
Revises: 9f52309f0d44
Create Date: 2020-03-26 11:53:32.044767
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '339a6b145e56'
down_revision = '9f52309f0d44'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('status', sa.Enum('ENABLED', 'DELETED', 'PENDING_APPROVAL', name='UserStatusEnumType'), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'status')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
57260f6b5d40f289986b5d8fb601c421eafeae75 | 35c1a591ce5ea045b72a1d9f89fc0d8f46cdd78b | /rice/deps/prompt_toolkit/application/dummy.py | ebe2f334fe11c7f8340bb99e392d2af6fc46a457 | [
"MIT"
] | permissive | jimhester/rice | 0a0aef48ccab3d6b2d7f700cc311977e8c4a3740 | 61cafc717d9398a57ecd2afb2a086afe1c676e30 | refs/heads/master | 2021-07-07T21:37:00.826756 | 2017-09-27T14:02:49 | 2017-09-27T14:02:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 998 | py | from __future__ import unicode_literals
from .application import Application
from prompt_toolkit.input import DummyInput
from prompt_toolkit.output import DummyOutput
__all__ = (
'DummyApplication',
)
class DummyApplication(Application):
"""
When no `Application` is running,
`prompt_toolkit.application.current.get_app` will run an instance of this
`Application`.
"""
def __init__(self):
super(DummyApplication, self).__init__(output=DummyOutput(), input=DummyInput())
def run(self):
raise NotImplementedError('A DummyApplication is not supposed to run.')
def run_async(self):
raise NotImplementedError('A DummyApplication is not supposed to run.')
def run_in_terminal(self):
raise NotImplementedError
def run_coroutine_in_terminal(self):
raise NotImplementedError
def run_system_command(self):
raise NotImplementedError
def suspend_to_background(self):
raise NotImplementedError
| [
"[email protected]"
] | |
2bd40a80b828137202059058e88f7504df2e6470 | 8613ec7f381a6683ae24b54fb2fb2ac24556ad0b | /boot/hard/2017.py | 36601afabce20178c45edae2db36c8014b9864eb | [] | no_license | Forest-Y/AtCoder | 787aa3c7dc4d999a71661465349428ba60eb2f16 | f97209da3743026920fb4a89fc0e4d42b3d5e277 | refs/heads/master | 2023-08-25T13:31:46.062197 | 2021-10-29T12:54:24 | 2021-10-29T12:54:24 | 301,642,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 715 | py | q = int(input())
l, r = [0] * q, [0] * q
for i in range(q):
l[i], r[i] = map(int, input().split())
mini = min(min(l), min(r))
maxi = max(max(l), max(r))
ans = [0] * (maxi + 1)
prime = [0] * (maxi + 1)
def judge_prime(n):
for i in range(2, int(n ** 0.5) + 1):
if n % i == 0:
return False
return True if n != 1 else False
for i in range((mini + 1) // 2, maxi + 1):
prime[i] = judge_prime(i)
for i in range(mini, maxi + 1, 2):
ans[i] = ans[i - 2] + 1 if prime[i] and prime[(i + 1) // 2] else ans[i - 2]
#print(i, ans[i], ans[i - 2])
#print(ans[1:])
for i in range(q):
#print(ans[r[i]], ans[l[i] - 2], ans[l[i] - 1])
print(ans[r[i]] - ans[max(0, l[i] - 2)])
| [
"[email protected]"
] | |
6018b78f698286b8dcc5c68df4f3473b415eb318 | bf8870d923adca9877d4b4dacef67f0a454727a8 | /_other/decorator.py | 23fb94a26518ba636f1e500656ad6d7d4a7a468e | [] | no_license | artkpv/code-dojo | 6f35a785ee5ef826e0c2188b752134fb197b3082 | 0c9d37841e7fc206a2481e4640e1a024977c04c4 | refs/heads/master | 2023-02-08T22:55:07.393522 | 2023-01-26T16:43:33 | 2023-01-26T16:43:33 | 158,388,327 | 1 | 0 | null | 2023-01-26T08:39:46 | 2018-11-20T12:45:44 | C# | UTF-8 | Python | false | false | 922 | py | import threading
def persistant_caller(max_calls=None, timeout_ms=None):
def actual(function):
def persistant_function(*args, **kwargs):
count = 0
while True:
try:
count += 1
return function(*args, **kwargs)
except Exception as e:
if count > max_calls:
# report exception
raise e
# report exception
if timeout_ms:
threading.sleep(timeout_ms)
return persistant_function
return actual
count = 0
@persistant_caller(max_calls=2, timeout_ms=100)
def printer(arg1, key1=None, key2=None):
global count
if count < 0:
count += 1
raise Exception('first exception')
print('printer', arg1, key1, key2)
printer(1, key1='key1val', key2='key2val')
| [
"[email protected]"
] | |
eacb9522092aa5e0ceb98aa10b3504cb2ba0ef10 | aceaf99df06081676f33145ff104009fcf30e922 | /core/permissions.py | 709d90b2de6a29e2b5eb15e460ad2a721747fd68 | [] | no_license | jonatasoli/process_monitoring | 2222d692b5255cbdf1e982940bf4f8a749257295 | 6ba2b3cf68b8bf8be6e1a4547b98c09e08d91794 | refs/heads/master | 2021-01-24T01:59:50.231667 | 2018-02-27T20:00:52 | 2018-02-27T20:00:52 | 122,831,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | from rest_framework import permissions
class UpdateOwnProfile(permissions.BasePermission):
"""Allow users to edit their own profile"""
def has_object_permission(self, request, view, obj):
"""Check user is trying to edit their own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id == request.user.id
class PostOwnStatus(permissions.BasePermission):
"""Allow users to update their own profile."""
def has_object_permission(self, request, view, obj):
"""Check the user is trying to update their own status."""
if request.method in permissions.SAFE_METHODS:
return True
return obj.user_profile.id == request.user.id
| [
"[email protected]"
] | |
c1bcd65d34b7a3e59e2d47a48b25316f3ee6c058 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/detection/SOLOv1/mmdet/models/mask_heads/fcn_mask_head.py | 26cb3c0ff0c362870863dc2fddb5f9a2379cb87e | [
"GPL-1.0-or-later",
"LicenseRef-scancode-proprietary-license",
"BSD-2-Clause",
"Apache-2.0",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 8,012 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mmcv
import numpy as np
import pycocotools.mask as mask_util
import torch
import torch.nn as nn
from torch.nn.modules.utils import _pair
from mmdet.core import auto_fp16, force_fp32, mask_target
from ..builder import build_loss
from ..registry import HEADS
from ..utils import ConvModule
@HEADS.register_module
class FCNMaskHead(nn.Module):
def __init__(self,
num_convs=4,
roi_feat_size=14,
in_channels=256,
conv_kernel_size=3,
conv_out_channels=256,
upsample_method='deconv',
upsample_ratio=2,
num_classes=81,
class_agnostic=False,
conv_cfg=None,
norm_cfg=None,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)):
super(FCNMaskHead, self).__init__()
if upsample_method not in [None, 'deconv', 'nearest', 'bilinear']:
raise ValueError(
'Invalid upsample method {}, accepted methods '
'are "deconv", "nearest", "bilinear"'.format(upsample_method))
self.num_convs = num_convs
# WARN: roi_feat_size is reserved and not used
self.roi_feat_size = _pair(roi_feat_size)
self.in_channels = in_channels
self.conv_kernel_size = conv_kernel_size
self.conv_out_channels = conv_out_channels
self.upsample_method = upsample_method
self.upsample_ratio = upsample_ratio
self.num_classes = num_classes
self.class_agnostic = class_agnostic
self.conv_cfg = conv_cfg
self.norm_cfg = norm_cfg
self.fp16_enabled = False
self.loss_mask = build_loss(loss_mask)
self.convs = nn.ModuleList()
for i in range(self.num_convs):
in_channels = (
self.in_channels if i == 0 else self.conv_out_channels)
padding = (self.conv_kernel_size - 1) // 2
self.convs.append(
ConvModule(
in_channels,
self.conv_out_channels,
self.conv_kernel_size,
padding=padding,
conv_cfg=conv_cfg,
norm_cfg=norm_cfg))
upsample_in_channels = (
self.conv_out_channels if self.num_convs > 0 else in_channels)
if self.upsample_method is None:
self.upsample = None
elif self.upsample_method == 'deconv':
self.upsample = nn.ConvTranspose2d(
upsample_in_channels,
self.conv_out_channels,
self.upsample_ratio,
stride=self.upsample_ratio)
else:
self.upsample = nn.Upsample(
scale_factor=self.upsample_ratio, mode=self.upsample_method)
out_channels = 1 if self.class_agnostic else self.num_classes
logits_in_channel = (
self.conv_out_channels
if self.upsample_method == 'deconv' else upsample_in_channels)
self.conv_logits = nn.Conv2d(logits_in_channel, out_channels, 1)
self.relu = nn.ReLU(inplace=True)
self.debug_imgs = None
def init_weights(self):
for m in [self.upsample, self.conv_logits]:
if m is None:
continue
nn.init.kaiming_normal_(
m.weight, mode='fan_out', nonlinearity='relu')
nn.init.constant_(m.bias, 0)
@auto_fp16()
def forward(self, x):
for conv in self.convs:
x = conv(x)
if self.upsample is not None:
x = self.upsample(x)
if self.upsample_method == 'deconv':
x = self.relu(x)
mask_pred = self.conv_logits(x)
return mask_pred
def get_target(self, sampling_results, gt_masks, rcnn_train_cfg):
pos_proposals = [res.pos_bboxes for res in sampling_results]
pos_assigned_gt_inds = [
res.pos_assigned_gt_inds for res in sampling_results
]
mask_targets = mask_target(pos_proposals, pos_assigned_gt_inds,
gt_masks, rcnn_train_cfg)
return mask_targets
@force_fp32(apply_to=('mask_pred', ))
def loss(self, mask_pred, mask_targets, labels):
loss = dict()
if self.class_agnostic:
loss_mask = self.loss_mask(mask_pred, mask_targets,
torch.zeros_like(labels))
else:
loss_mask = self.loss_mask(mask_pred, mask_targets, labels)
loss['loss_mask'] = loss_mask
return loss
def get_seg_masks(self, mask_pred, det_bboxes, det_labels, rcnn_test_cfg,
ori_shape, scale_factor, rescale):
"""Get segmentation masks from mask_pred and bboxes.
Args:
mask_pred (Tensor or ndarray): shape (n, #class+1, h, w).
For single-scale testing, mask_pred is the direct output of
model, whose type is Tensor, while for multi-scale testing,
it will be converted to numpy array outside of this method.
det_bboxes (Tensor): shape (n, 4/5)
det_labels (Tensor): shape (n, )
img_shape (Tensor): shape (3, )
rcnn_test_cfg (dict): rcnn testing config
ori_shape: original image size
Returns:
list[list]: encoded masks
"""
if isinstance(mask_pred, torch.Tensor):
mask_pred = mask_pred.sigmoid().cpu().numpy()
assert isinstance(mask_pred, np.ndarray)
# when enabling mixed precision training, mask_pred may be float16
# numpy array
mask_pred = mask_pred.astype(np.float32)
cls_segms = [[] for _ in range(self.num_classes - 1)]
bboxes = det_bboxes.cpu().numpy()[:, :4]
labels = det_labels.cpu().numpy() + 1
if rescale:
img_h, img_w = ori_shape[:2]
else:
img_h = np.round(ori_shape[0] * scale_factor).astype(np.int32)
img_w = np.round(ori_shape[1] * scale_factor).astype(np.int32)
scale_factor = 1.0
for i in range(bboxes.shape[0]):
if not isinstance(scale_factor, (float, np.ndarray)):
scale_factor = scale_factor.cpu().numpy()
bbox = (bboxes[i, :] / scale_factor).astype(np.int32)
label = labels[i]
w = max(bbox[2] - bbox[0] + 1, 1)
h = max(bbox[3] - bbox[1] + 1, 1)
if not self.class_agnostic:
mask_pred_ = mask_pred[i, label, :, :]
else:
mask_pred_ = mask_pred[i, 0, :, :]
bbox_mask = mmcv.imresize(mask_pred_, (w, h))
bbox_mask = (bbox_mask > rcnn_test_cfg.mask_thr_binary).astype(
np.uint8)
if rcnn_test_cfg.get('crop_mask', False):
im_mask = bbox_mask
else:
im_mask = np.zeros((img_h, img_w), dtype=np.uint8)
im_mask[bbox[1]:bbox[1] + h, bbox[0]:bbox[0] + w] = bbox_mask
if rcnn_test_cfg.get('rle_mask_encode', True):
rle = mask_util.encode(
np.array(im_mask[:, :, np.newaxis], order='F'))[0]
cls_segms[label - 1].append(rle)
else:
cls_segms[label - 1].append(im_mask)
return cls_segms
| [
"[email protected]"
] | |
f488fb1b598893609ff4510a7ee334fda84ad105 | 212724dd876c15ef801fb781e907b1c7dd08f4ae | /skyline/webapp/gunicorn.py | 50d0f6817e3af53960712284da80d1324840b628 | [
"MIT"
] | permissive | wfloutier/skyline | b9e769cddccdefeeb7c7cc258524bbf489f9d5eb | b12758dc11564de93c7ad76c1f8ed3327db78aa4 | refs/heads/master | 2020-08-08T03:19:40.283298 | 2019-10-09T11:05:13 | 2019-10-09T11:05:13 | 213,693,601 | 0 | 0 | NOASSERTION | 2019-10-08T16:20:15 | 2019-10-08T16:20:15 | null | UTF-8 | Python | false | false | 1,378 | py | import sys
import os.path
import logging
# import multiprocessing
# import traceback
from logging.handlers import TimedRotatingFileHandler, MemoryHandler
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
sys.path.insert(0, os.path.dirname(__file__))
import settings
bind = '%s:%s' % (settings.WEBAPP_IP, str(settings.WEBAPP_PORT))
# workers = multiprocessing.cpu_count() * 2 + 1
workers = 2
backlog = 10
skyline_app = 'webapp'
skyline_app_logger = '%sLog' % skyline_app
logfile = '%s/%s.log' % (settings.LOG_PATH, skyline_app)
logger = logging.getLogger(skyline_app_logger)
pidfile = '%s/%s.pid' % (settings.PID_PATH, skyline_app)
accesslog = '%s/webapp.access.log' % (settings.LOG_PATH)
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
errorlog = '%s/webapp.log' % (settings.LOG_PATH)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s :: %(process)s :: %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
handler = logging.handlers.TimedRotatingFileHandler(
logfile,
when="midnight",
interval=1,
backupCount=5)
memory_handler = logging.handlers.MemoryHandler(100,
flushLevel=logging.DEBUG,
target=handler)
handler.setFormatter(formatter)
logger.addHandler(memory_handler)
| [
"[email protected]"
] | |
f96b6739d30de98f438bfc15e544eb95f5523574 | 4a7a6f629e4dd16b5ba3db23a6b6369dbb19c10d | /a038- 數字翻轉.py | f4df8c96273b40ed14b01a7ac42e057c5349a0a8 | [] | no_license | jlhung/ZeroJudge-Python | 1170fc70ffc6a1a577d035cd70289529d2bbc07e | 356381363891ba05302736746c698ea85668af50 | refs/heads/master | 2022-12-08T06:42:12.160731 | 2020-09-03T12:04:49 | 2020-09-03T12:04:49 | 282,219,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | py | '''
20200723 v1.0 jlhung "0"也要輸出 要消除尾數的0
'''
while True:
try:
n = int(input())
if n == 0:
print(0)
break
while n % 10 == 0:
n //= 10
print(str(n)[::-1])
except(EOFError):
break | [
"[email protected]"
] | |
3bdbd3cfdd89c89ececba6bd06fdd7af1e184e39 | 6e68584f2819351abe628b659c01184f51fec976 | /Centre_College/CSC_339_SP2015/vindiniumAI/pybrain/optimization/memetic/inversememetic.py | f38eec205b10fd0408828968956d69f6cf8c206b | [
"WTFPL"
] | permissive | DanSGraham/code | 0a16a2bfe51cebb62819cd510c7717ae24b12d1b | fc54b6d50360ae12f207385b5d25adf72bfa8121 | refs/heads/master | 2020-03-29T21:09:18.974467 | 2017-06-14T04:04:48 | 2017-06-14T04:04:48 | 36,774,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | __author__ = 'Tom Schaul, [email protected]'
from memetic import MemeticSearch
class InverseMemeticSearch(MemeticSearch):
""" Interleaving local search with topology search (inverse of memetic search) """
def _learnStep(self):
self.switchMutations()
MemeticSearch._learnStep(self)
self.switchMutations()
| [
"[email protected]"
] | |
9866fcf46bab6408ee2d067adcfed3f1ed0287ad | fcdfb4231b64e38a5f6611057097def815a6a987 | /baidumap/tests/autopilot/baidumap/tests/__init__.py | 0bf71b2ca06c109fd48386845c2031198789f74a | [] | no_license | liu-xiao-guo/baidumap | f2967efc845347bb40769ea7202bb8d4b4c6e66d | e6ba8ba6bb3df4e2956af55414e5e8a1a34ac06a | refs/heads/master | 2021-01-10T08:45:01.423685 | 2016-03-23T04:47:49 | 2016-03-23T04:47:49 | 54,531,442 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,687 | py | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
"""Ubuntu Touch App Autopilot tests."""
import os
import logging
import baidumap
from autopilot.testcase import AutopilotTestCase
from autopilot import logging as autopilot_logging
import ubuntuuitoolkit
from ubuntuuitoolkit import base
logger = logging.getLogger(__name__)
class BaseTestCase(AutopilotTestCase):
"""A common test case class
"""
local_location = os.path.dirname(os.path.dirname(os.getcwd()))
local_location_qml = os.path.join(local_location, 'Main.qml')
click_package = '{0}.{1}'.format('baidumap', 'liu-xiao-guo')
def setUp(self):
super(BaseTestCase, self).setUp()
self.launcher, self.test_type = self.get_launcher_and_type()
self.app = baidumap.TouchApp(self.launcher(), self.test_type)
def get_launcher_and_type(self):
if os.path.exists(self.local_location_qml):
launcher = self.launch_test_local
test_type = 'local'
else:
launcher = self.launch_test_click
test_type = 'click'
return launcher, test_type
@autopilot_logging.log_action(logger.info)
def launch_test_local(self):
return self.launch_test_application(
base.get_qmlscene_launch_command(),
self.local_location_qml,
app_type='qt',
emulator_base=ubuntuuitoolkit.UbuntuUIToolkitCustomProxyObjectBase)
@autopilot_logging.log_action(logger.info)
def launch_test_click(self):
return self.launch_click_package(
self.click_package,
emulator_base=ubuntuuitoolkit.UbuntuUIToolkitCustomProxyObjectBase)
| [
"[email protected]"
] | |
e29850d4bc107cdd9a707c816fea75d159dd1ae1 | 4cae2a0808d0f200a5f91a9724419a081b8c3eb0 | /create_biometric_users/models/ecube_bio_machine.py | 57453c1b50ef85bdab1500893f326346dbd611f0 | [] | no_license | khyasir/Latitude_Custom_Modules | 7392ba47da8c172f46902d32454e13614b5d5e8b | 6758fc2a97073609dc305e71571f9ea42916f71b | refs/heads/master | 2021-05-02T12:04:37.953490 | 2018-02-08T08:52:24 | 2018-02-08T08:52:24 | 120,735,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from openerp import models, fields, api
class EcubeMachine(models.Model):
_name = 'ecube.machine'
_description = 'EcubeMachine'
name = fields.Char('Machine Name')
machine_ip = fields.Char('Machine IP')
machine_status = fields.Boolean('Machine Status') | [
"[email protected]"
] | |
8258490a8523ca5ddcc472087885ef1dc25aa68b | f2cc1dc87486833613fb83543c68784849fd7319 | /subtests/test_search_item.py | e063b884b30225a9c67e4b1ebc4d511584d3914c | [] | no_license | EduardoUrzuaBo/platziChallenge | cc953e2615653d575cf079bceea4fdcad75a4da0 | a8f06c98f14ee58db47848ec287dcd105b685dcb | refs/heads/master | 2023-07-29T10:44:16.469765 | 2021-09-05T20:14:42 | 2021-09-05T20:14:42 | 403,379,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from src.testproject.decorator import report_assertion_errors
"""
This pytest test was automatically generated by TestProject
Project: Test Projects
Package: TestProject.Generated.Tests.TestProjects
Test: Search Item
Generated by: Eduardo Andres Urzuas ([email protected])
Generated on 09/02/2021, 03:56:35
"""
@report_assertion_errors
def test_main(driver):
"""This test was auto generated from steps of the 'CreateAccount' test."""
# Test Parameters
SearchItem = "Books"
# 1. Click 'q'
q = driver.find_element(By.CSS_SELECTOR,
"#search")
q.click()
# 2. Type '{SearchItem}' in 'q'
q = driver.find_element(By.CSS_SELECTOR,
"#search")
q.send_keys(f'{SearchItem}')
# 3. Send 'ENTER' key(s)
ActionChains(driver).send_keys(Keys.ENTER).perform()
| [
"[email protected]"
] | |
92aa7a25070d981b4443680ae1a1621f0f40d582 | ce4d1c3a1522f382d9b3f73b7f126e7a3616bfb5 | /projects/DensePose/densepose/data/datasets/coco.py | ddd03c25b6956e8afa7d78ac0a259d255fb51541 | [
"Apache-2.0"
] | permissive | davidnvq/detectron2 | 6c01512326687e86ab50c0f89af4e926c0007ae6 | eaca19840e5db014c3dd37dee9920d780b3b6165 | refs/heads/master | 2022-04-26T03:29:08.080258 | 2020-04-24T09:05:07 | 2020-04-24T09:05:07 | 258,421,912 | 1 | 0 | Apache-2.0 | 2020-04-24T06:08:26 | 2020-04-24T06:08:25 | null | UTF-8 | Python | false | false | 4,143 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import os
from dataclasses import dataclass
from typing import Any, Dict, Iterable, Optional
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.data.datasets import load_coco_json
DENSEPOSE_KEYS = ["dp_x", "dp_y", "dp_I", "dp_U", "dp_V", "dp_masks"]
DENSEPOSE_METADATA_URL_PREFIX = "https://dl.fbaipublicfiles.com/densepose/data/"
@dataclass
class CocoDatasetInfo:
name: str
images_root: str
annotations_fpath: str
DATASETS = [
CocoDatasetInfo(
name="densepose_coco_2014_train",
images_root="coco/train2014",
annotations_fpath="coco/annotations/densepose_train2014.json",
),
CocoDatasetInfo(
name="densepose_coco_2014_minival",
images_root="coco/val2014",
annotations_fpath="coco/annotations/densepose_minival2014.json",
),
CocoDatasetInfo(
name="densepose_coco_2014_minival_100",
images_root="coco/val2014",
annotations_fpath="coco/annotations/densepose_minival2014_100.json",
),
CocoDatasetInfo(
name="densepose_coco_2014_valminusminival",
images_root="coco/val2014",
annotations_fpath="coco/annotations/densepose_valminusminival2014.json",
),
CocoDatasetInfo(
name="densepose_chimps",
images_root="densepose_evolution/densepose_chimps",
annotations_fpath="densepose_evolution/annotations/densepose_chimps_densepose.json",
),
]
def _is_relative_local_path(path: os.PathLike):
path_str = os.fsdecode(path)
return ("://" not in path_str) and not os.path.isabs(path)
def _maybe_prepend_base_path(base_path: Optional[os.PathLike], path: os.PathLike):
"""
Prepends the provided path with a base path prefix if:
1) base path is not None;
2) path is a local path
"""
if base_path is None:
return path
if _is_relative_local_path(path):
return os.path.join(base_path, path)
return path
def get_metadata(base_path: Optional[os.PathLike]) -> Dict[str, Any]:
"""
Returns metadata associated with COCO DensePose datasets
Args:
base_path: Optional[os.PathLike]
Base path used to load metadata from
Returns:
Dict[str, Any]
Metadata in the form of a dictionary
"""
meta = {
"densepose_transform_src": _maybe_prepend_base_path(
base_path, "UV_symmetry_transforms.mat"
),
"densepose_smpl_subdiv": _maybe_prepend_base_path(base_path, "SMPL_subdiv.mat"),
"densepose_smpl_subdiv_transform": _maybe_prepend_base_path(
base_path, "SMPL_SUBDIV_TRANSFORM.mat"
),
}
return meta
def register_dataset(dataset_data: CocoDatasetInfo, datasets_root: Optional[os.PathLike] = None):
"""
Registers provided COCO DensePose dataset
Args:
dataset_data: CocoDatasetInfo
Dataset data
datasets_root: Optional[os.PathLike]
Datasets root folder (default: None)
"""
annotations_fpath = _maybe_prepend_base_path(datasets_root, dataset_data.annotations_fpath)
images_root = _maybe_prepend_base_path(datasets_root, dataset_data.images_root)
def load_annotations():
return load_coco_json(
json_file=annotations_fpath,
image_root=images_root,
dataset_name=dataset_data.name,
extra_annotation_keys=DENSEPOSE_KEYS,
)
DatasetCatalog.register(dataset_data.name, load_annotations)
MetadataCatalog.get(dataset_data.name).set(
json_file=annotations_fpath,
image_root=images_root,
**get_metadata(DENSEPOSE_METADATA_URL_PREFIX)
)
def register_datasets(
datasets_data: Iterable[CocoDatasetInfo], datasets_root: Optional[os.PathLike] = None
):
"""
Registers provided COCO DensePose datasets
Args:
datasets_data: Iterable[CocoDatasetInfo]
An iterable of dataset datas
datasets_root: Optional[os.PathLike]
Datasets root folder (default: None)
"""
for dataset_data in datasets_data:
register_dataset(dataset_data, datasets_root)
| [
"[email protected]"
] | |
a071d653f678661efe5f76d8153b380eb2aa9da1 | 5456320f03ed956ff7b1ad6a9539d65a602c71d4 | /mozCingi/fuzzers/mutagen/mutagenExecutor.py | 03e99823055ea5ebf8b00cf285f353720184fc6d | [] | no_license | ShakoHo/mozCingi | 9020cbb4aa65308ca5fd5bf9c074230f1fddb751 | 39239411abc840cd58a05f1fa41a24ae7cf9695f | refs/heads/master | 2016-08-12T12:51:16.331671 | 2016-03-25T09:35:54 | 2016-03-25T09:35:54 | 49,626,247 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | __author__ = 'shako'
import os
from mozCingi.util.mozITPWrapper import MozITPWrapper
from mozCingi.steps.executor import AbsExecutor
class MutagenExecutor(AbsExecutor):
DEFAULT_EXEC_LOG_NAME = "exec.log"
def launch_execute_file(self):
mozitp_obj = MozITPWrapper()
pack_file_name = self.fuzzer_name + "_" + str(self.obj_index) + ".zip"
pack_file_path = os.path.join(self.working_dir, self.DEFAULT_ROOT_TMP_DIR, pack_file_name)
execution_log_dir = os.path.join(self.working_dir, self.DEFAULT_ROOT_LOG_DIR, self.fuzzer_name)
if os.path.exists(execution_log_dir) is False:
os.makedirs(execution_log_dir)
execution_log_path = os.path.join(execution_log_dir, self.DEFAULT_EXEC_LOG_NAME)
mozitp_obj.launch_itp_for_fuzz(pack_file_path, execution_log_path)
mozitp_obj.stop_itp()
| [
"[email protected]"
] | |
0bea0ecced4c778b22f949d0bfa1c3a5954fc139 | e0519908caa23bef1873ff69ebd17c5d81f741e1 | /calabiyau/views/sessions.py | 23c24167a6ff049d9af607d405a9047b9d2be499 | [
"BSD-3-Clause"
] | permissive | TachyonicProject/calabiyau | 2fb7af37bd656a686a5f741cadd082b2500718ff | 415a8ada4a93ee84c4776e89c9442af328dcfdd6 | refs/heads/latest | 2020-05-02T04:14:43.953841 | 2019-12-06T04:12:39 | 2019-12-06T04:12:39 | 177,745,608 | 0 | 3 | NOASSERTION | 2019-12-06T04:12:40 | 2019-03-26T08:31:25 | Python | UTF-8 | Python | false | false | 4,448 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Christiaan Frans Rademan <[email protected]>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
from luxon import register
from luxon import router
from luxon.utils import sql
from luxon.helpers.api import sql_list
from luxon import MBClient
from calabiyau.lib.ctx import ctx
@register.resources()
class Sessions(object):
def __init__(self):
# Services Users
router.add('GET', '/v1/sessions', self.sessions,
tag='services:view')
router.add('PUT', '/v1/disconnect/{session_id}', self.disconnect,
tag='services:admin')
router.add('PUT', '/v1/clear/{nas_id}', self.clear,
tag='services:admin')
def sessions(self, req, resp):
def ctx_val(ctx_id):
try:
return {'ctx': ctx[ctx_id]}
except IndexError:
return {'ctx': ctx_id}
f_session_id = sql.Field('calabiyau_session.id')
f_session_ctx = sql.Field('calabiyau_session.ctx')
f_session_accttype = sql.Field('calabiyau_session.accttype')
f_session_start = sql.Field('calabiyau_session.acctstarttime')
f_session_updated = sql.Field('calabiyau_session.acctupdated')
f_session_unique_id = sql.Field('calabiyau_session.acctuniqueid')
f_session_ip = sql.Field(
'INET6_NTOA(calabiyau_session.framedipaddress)')
f_nas_ip = sql.Field(
'INET6_NTOA(calabiyau_session.nasipaddress)')
f_session_username = sql.Field('calabiyau_session.username')
f_session_user_id = sql.Field('calabiyau_session.id')
select = sql.Select('calabiyau_session')
select.fields = (f_session_id,
f_session_unique_id,
f_session_start,
f_session_updated,
f_session_user_id,
f_session_username,
f_session_ip,
f_nas_ip,
f_session_ctx,
)
select.where = f_session_accttype != sql.Value('stop')
return sql_list(
req,
select,
search={
'calabiyau_session.acctstarttime': 'datetime',
'calabiyau_session.acctupdated': 'datetime',
'calabiyau_session.user_id': str,
'calabiyau_session.username': str,
'calabiyau_session.acctuniqueid': str,
'calabiyau_session.framedipaddress': 'ip',
'calabiyau_session.nasipaddress': 'ip'},
callbacks={'ctx': ctx_val})
def disconnect(self, req, resp, session_id):
with MBClient('subscriber') as mb:
mb.send('disconnect_session', {'session_id': session_id})
def clear(self, req, resp, nas_id):
with MBClient('subscriber') as mb:
mb.send('clear_nas_sessions', {'nas_id': nas_id})
| [
"[email protected]"
] | |
8bcd9aa863af02fbda6ca89f80c595b263e35e8a | 49a167d942f19fc084da2da68fc3881d44cacdd7 | /kubernetes_asyncio/client/api/authorization_api.py | 420284fe2740cbf841d0d542f50e1b597a20ae81 | [
"Apache-2.0"
] | permissive | olitheolix/kubernetes_asyncio | fdb61323dc7fc1bade5e26e907de0fe6e0e42396 | 344426793e4e4b653bcd8e4a29c6fa4766e1fff7 | refs/heads/master | 2020-03-19T12:52:27.025399 | 2018-06-24T23:34:03 | 2018-06-24T23:34:03 | 136,546,270 | 1 | 0 | Apache-2.0 | 2018-06-24T23:52:47 | 2018-06-08T00:39:52 | Python | UTF-8 | Python | false | false | 4,144 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from kubernetes_asyncio.client.api_client import ApiClient
class AuthorizationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_api_group(self, **kwargs): # noqa: E501
"""get_api_group # noqa: E501
get information of a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_group(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroup
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_api_group_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_api_group_with_http_info(**kwargs) # noqa: E501
return data
def get_api_group_with_http_info(self, **kwargs): # noqa: E501
"""get_api_group # noqa: E501
get information of a group # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_api_group_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: V1APIGroup
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_group" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # noqa: E501
# Authentication setting
auth_settings = ['BearerToken'] # noqa: E501
return self.api_client.call_api(
'/apis/authorization.k8s.io/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIGroup', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
] | |
dc765d76018ce6cd8317283edeebe0aa3727ded8 | 86b293ef3df8a276c97db49f25e5a9c36822776e | /0x08-python-more_classes/1-rectangle.py | a89c2a3ea1af8ed99a9cdb92d6e584860d74097b | [] | no_license | tayloradam1999/holbertonschool-higher_level_programming | 3c6ceab832ad85448df320a437ddf6c39130f0dd | 70068c87f3058324dca58fc5ef988af124a9a965 | refs/heads/main | 2023-08-19T16:13:04.240756 | 2021-09-28T00:37:03 | 2021-09-28T00:37:03 | 361,856,354 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,179 | py | #!/usr/bin/python3
"""
This module defines a class 'Rectangle' that
defines a rectangle based on '0-rectangle.py'
"""
class Rectangle:
"""Defines a rectangle with a private instance attribute 'width'
and 'height'"""
def __init__(self, width=0, height=0):
self.width = width
self.height = height
@property
def width(self):
"""Property getter for width"""
return self.__width
@width.setter
def width(self, value):
"""Property setter for width that raises Type and Value errors"""
if not isinstance(value, int):
raise TypeError("width must be an integer")
if value < 0:
raise ValueError("width must be >= 0")
self.__width = value
@property
def height(self):
"""Property getter for height"""
return self.__height
@height.setter
def height(self, value):
"""Property setter for height that raises Type and Value errors"""
if not isinstance(value, int):
raise TypeError("height must be an integer")
if value < 0:
raise ValueError("height must be >= 0")
self.__height = value
| [
"[email protected]"
] | |
0d155686d2b7d638897fc2d02dc556dd3da8babb | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /other_time/last_part_and_thing/problem_or_world.py | e42537b5fd907d85a61cab4911bd521a6bc81f4a | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 236 | py |
#! /usr/bin/env python
def feel_eye(str_arg):
early_life(str_arg)
print('find_next_part_about_small_person')
def early_life(str_arg):
print(str_arg)
if __name__ == '__main__':
feel_eye('take_company_at_little_case')
| [
"[email protected]"
] | |
0dac5829221d058f43409e95e5d6afb11cbbcefd | 2e2c9cf0bf1f6218f82e7ecddbec17da49756114 | /day14ddt_opnpyxl/day14_封装Hand_excel/demo5列表推导式.py | 450b10ac6ae56ab647b20b0e36b9b6e0c8cf6283 | [] | no_license | guoyunfei0603/py31 | c3cc946cd9efddb58dad0b51b72402a77e9d7592 | 734a049ecd84bfddc607ef852366eb5b7d16c6cb | refs/heads/master | 2023-03-02T20:50:02.052878 | 2021-02-05T06:17:24 | 2021-02-05T06:17:24 | 279,454,793 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | """
============================
Author:小白31
Time:2020/8/2 22:03
E-mail:[email protected]
============================
"""
# 生成1-100到列表
# li = []
# for i in range(1,101):
# li.append(i)
#
# print(li)
#
# print(list(range(1,101)))
# 生成 ["学号101","学号102","学号103"..."学号150"]
li = []
for i in range(101,151):
li.append("学号{}".format(i))
print(li)
print("-----------------列表推导式------------------")
li2 = ["学号{}".format(i) for i in range(101,151)]
print(li2) | [
"[email protected]"
] | |
013ad4f8eb3ba02e9770aed25cb228d75475289b | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/test/python/030d0c5ebc377ba768e6bdbbc82d64a6cfcbb7d4__main__.py | 030d0c5ebc377ba768e6bdbbc82d64a6cfcbb7d4 | [
"MIT"
] | permissive | harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | null | UTF-8 | Python | false | false | 580 | py | import gi
from ghue.controller import Controller
from ghue.device.hue import HueDeviceManager
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GLib
import phue
from .application import GHueApplication
if __name__ == '__main__':
GLib.set_application_name("Philips Hue")
controller = Controller()
hue_device_manager = HueDeviceManager(bridge=phue.Bridge('philips-hue.local'),
controller=controller)
controller.add_device_manager(hue_device_manager)
app = GHueApplication(controller)
app.run(None)
| [
"[email protected]"
] | |
a59ca1f99c5f53cd1737d4fcb2670dc70f7ec927 | ca22c441ec0eabf61b3b415fc9be8453855481cf | /rapid/__init__.py | b47d1e365bbc7cdc678e8f7640ddc873208950a7 | [
"MIT"
] | permissive | linhao1998/rapid | cc1d45a119a4c7c3c384ad708c3220226c5c7edd | 1611e47fffac0f61e6c07ad5388eb2368a426f06 | refs/heads/main | 2023-06-16T16:24:57.498657 | 2021-07-08T14:48:47 | 2021-07-08T14:48:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | from gym.envs.registration import register
register(
id='EpisodeInvertedPendulum-v2',
entry_point='rapid.mujoco_envs:EpisodeInvertedPendulumEnv',
max_episode_steps=1000,
reward_threshold=950.0,
)
register(
id='EpisodeSwimmer-v2',
entry_point='rapid.mujoco_envs:EpisodeSwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='DensityEpisodeSwimmer-v2',
entry_point='rapid.mujoco_envs:DensityEpisodeSwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='ViscosityEpisodeSwimmer-v2',
entry_point='rapid.mujoco_envs:ViscosityEpisodeSwimmerEnv',
max_episode_steps=1000,
reward_threshold=360.0,
)
register(
id='EpisodeWalker2d-v2',
max_episode_steps=1000,
entry_point='rapid.mujoco_envs:EpisodeWalker2dEnv',
)
register(
id='EpisodeHopper-v2',
entry_point='rapid.mujoco_envs:EpisodeHopperEnv',
max_episode_steps=1000,
reward_threshold=3800.0,
)
| [
"[email protected]"
] | |
5e8e9e4cc04b87577c04e4b09ce745dd68a85d04 | 706fcc0630a2a1befa32e8d0e9e0a61978dcc947 | /config.py | 7fcc7def7a3c78d71daf7c805bc812e5aabcc542 | [] | no_license | paulgowdy/hal_split | a8f731a5a6e77f605d45de345d1c48bbc774738d | f618a6b1a132e192f4778c237a92c86f24540ca0 | refs/heads/master | 2022-11-17T00:51:37.343265 | 2020-07-07T22:25:49 | 2020-07-07T22:25:49 | 277,934,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | BOARD_SIZE = 9
MAX_NB_SHIPS = 2
NB_SHIP_ACTIONS = 5
#TRAIN_EPISODES = 10
STEPS_PER_EP = 200
GAMMA = 0.99
PPO_BATCHES = 10000000
PPO_STEPS = 32
LOSS_CLIPPING = 0.2
ENTROPY_LOSS = 5e-2
| [
"[email protected]"
] | |
21b9c07f5745ad1954c3ca3af77d74dac67620d0 | bfb113c3076f5b0570953583e7a2321c774d73ea | /venv/Scripts/easy_install-3.8-script.py | 88b18d55d3cfc7b1e4ca627aaafb9710ef93d7c3 | [] | no_license | gsudarshan1990/Training_Projects | 82c48d5492cb4be94db09ee5c66142c370794e1c | 2b7edfafc4e448bd558c034044570496ca68bf2d | refs/heads/master | 2022-12-10T15:56:17.535096 | 2020-09-04T06:02:31 | 2020-09-04T06:02:31 | 279,103,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | #!E:\Training_Projects\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
| [
"[email protected]"
] | |
543db5403219ea73e88c510b470c95cc4e6a7ff0 | 18c8a7cb838702cdf1c4d4e9f66b2cffd63130aa | /{{cookiecutter.project_slug}}/config/settings/test.py | 69b8d3be1f0ba85b3bda931e3cd01983f435d82f | [
"MIT"
] | permissive | DiscordApps/launchr | c304008a0d05bdf2d3ed77ada365f80d861f307d | 61049879591ba851ce50d1651abc7193aae4aca0 | refs/heads/master | 2022-02-26T21:22:36.656108 | 2019-10-11T13:05:35 | 2019-10-11T13:05:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,695 | py | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="!!!SET DJANGO_SECRET_KEY!!!",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
TEMPLATES[0]['OPTIONS']['debug'] = True # noqa F405
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# Your stuff...
# ------------------------------------------------------------------------------
| [
"[email protected]"
] | |
63309f5b16e32ac3d1a5c83f1cabc9d2e02f0132 | d05a59feee839a4af352b7ed2fd6cf10a288a3cb | /xlsxwriter/test/workbook/test_write_workbook_view.py | 683d301b318446951f7cca09b7fc061d5ee04506 | [
"BSD-2-Clause-Views"
] | permissive | elessarelfstone/XlsxWriter | 0d958afd593643f990373bd4d8a32bafc0966534 | bb7b7881c7a93c89d6eaac25f12dda08d58d3046 | refs/heads/master | 2020-09-24T06:17:20.840848 | 2019-11-24T23:43:01 | 2019-11-24T23:43:01 | 225,685,272 | 1 | 0 | NOASSERTION | 2019-12-03T18:09:06 | 2019-12-03T18:09:05 | null | UTF-8 | Python | false | false | 4,953 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2019, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...workbook import Workbook
class TestWriteWorkbookView(unittest.TestCase):
"""
Test the Workbook _write_workbook_view() method.
"""
def setUp(self):
self.fh = StringIO()
self.workbook = Workbook()
self.workbook._set_filehandle(self.fh)
def test_write_workbook_view1(self):
"""Test the _write_workbook_view() method"""
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view2(self):
"""Test the _write_workbook_view() method"""
self.workbook.worksheet_meta.activesheet = 1
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" activeTab="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view3(self):
"""Test the _write_workbook_view() method"""
self.workbook.worksheet_meta.firstsheet = 1
self.workbook.worksheet_meta.activesheet = 1
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" firstSheet="2" activeTab="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view4(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(0, 0)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view5(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(None, None)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view6(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(1073, 644)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view7(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(123, 70)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="1845" windowHeight="1050"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view8(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_size(719, 490)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="10785" windowHeight="7350"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view9(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio()
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view10(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio(34.6)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" tabRatio="346"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view11(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio(0)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" tabRatio="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_workbook_view12(self):
"""Test the _write_workbook_view() method"""
self.workbook.set_tab_ratio(100)
self.workbook._write_workbook_view()
exp = """<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660" tabRatio="1000"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def tearDown(self):
self.workbook.fileclosed = 1
| [
"[email protected]"
] | |
f36bac8cb3c65b13ba04323591cf99f819b50868 | 431c8beacf2b1a54982bf2d06b3dc5cebba87c69 | /buttontest.py | 1b228e5bfeb4437a78e6f55ab31ba9c5574807e5 | [
"MIT"
] | permissive | watrt/micropython-tft-gui | 290c27ba810943033d26214b7f9ec38129fa774e | 1ae9eafccb7084093eb80354e9e30d1f02367221 | refs/heads/master | 2020-12-10T06:49:51.299653 | 2019-05-25T07:30:57 | 2019-05-25T07:30:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,508 | py | # buttontest.py Test/demo of pushbutton classes for Pybboard TFT GUI
# The MIT License (MIT)
#
# Copyright (c) 2016 Peter Hinch
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from constants import *
from ugui import Button, ButtonList, RadioButtons, Checkbox, Label, Screen
import font14
import font10
from tft_local import setup
class ButtonScreen(Screen):
def __init__(self):
super().__init__()
# These tables contain args that differ between members of a set of related buttons
table = [
{'fgcolor' : GREEN, 'text' : 'Yes', 'args' : ('Oui', 2), 'fontcolor' : (0, 0, 0)},
{'fgcolor' : RED, 'text' : 'No', 'args' : ('Non', 2)},
{'fgcolor' : BLUE, 'text' : '???', 'args' : ('Que?', 2), 'fill': False},
{'fgcolor' : GREY, 'text' : 'Rats', 'args' : ('Rats', 2), 'shape' : CLIPPED_RECT,},
]
# Highlight buttons: only tabulate data that varies
table_highlight = [
{'text' : 'P', 'args' : ('p', 2)},
{'text' : 'Q', 'args' : ('q', 2)},
{'text' : 'R', 'args' : ('r', 2)},
{'text' : 'S', 'args' : ('s', 2)},
]
# A Buttonset with two entries
table_buttonset = [
{'fgcolor' : GREEN, 'shape' : CLIPPED_RECT, 'text' : 'Start', 'args' : ('Live', 2)},
{'fgcolor' : RED, 'shape' : CLIPPED_RECT, 'text' : 'Stop', 'args' : ('Die', 2)},
]
table_radiobuttons = [
{'text' : '1', 'args' : ('1', 3)},
{'text' : '2', 'args' : ('2', 3)},
{'text' : '3', 'args' : ('3', 3)},
{'text' : '4', 'args' : ('4', 3)},
]
labels = { 'width' : 70,
'fontcolor' : WHITE,
'border' : 2,
'fgcolor' : RED,
'bgcolor' : (0, 40, 0),
'font' : font14,
}
# Uncomment this line to see 'skeleton' style greying-out:
# Screen.tft.grey_color()
# Labels
self.lstlbl = []
for n in range(5):
self.lstlbl.append(Label((390, 40 * n), **labels))
# Button assortment
x = 0
for t in table:
Button((x, 0), font = font14, callback = self.callback, **t)
x += 70
# Highlighting buttons
x = 0
for t in table_highlight:
Button((x, 60), fgcolor = GREY, fontcolor = BLACK, litcolor = WHITE,
font = font14, callback = self.callback, **t)
x += 70
# Start/Stop toggle
self.bs = ButtonList(self.callback)
self.bs0 = None
for t in table_buttonset: # Buttons overlay each other at same location
button = self.bs.add_button((0, 240), font = font14, fontcolor = BLACK, height = 30, **t)
if self.bs0 is None: # Save for reset button callback
self.bs0 = button
# Radio buttons
x = 0
self.rb = RadioButtons(BLUE, self.callback) # color of selected button
self.rb0 = None
for t in table_radiobuttons:
button = self.rb.add_button((x, 140), font = font14, fontcolor = WHITE,
fgcolor = (0, 0, 90), height = 40, width = 40, **t)
if self.rb0 is None: # Save for reset button callback
self.rb0 = button
x += 60
# Checkbox
self.cb1 = Checkbox((340, 0), callback = self.cbcb, args = (0,))
self.cb2 = Checkbox((340, 40), fillcolor = RED, callback = self.cbcb, args = (1,))
# Reset button
self.lbl_reset = Label((200, 220), font = font10, value = 'Reset also responds to long press')
self.btn_reset = Button((300, 240), font = font14, height = 30, width = 80,
fgcolor = BLUE, shape = RECTANGLE, text = 'Reset', fill = True,
callback = self.cbreset, args = (4,), onrelease = False,
lp_callback = self.callback, lp_args = ('long', 4))
# Quit
self.btn_quit = Button((390, 240), font = font14, height = 30, width = 80,
fgcolor = RED, shape = RECTANGLE, text = 'Quit',
callback = self.quit)
# Enable/Disable toggle
self.bs_en = ButtonList(self.cb_en_dis)
self.tup_en_dis = (self.cb1, self.cb2, self.rb, self.bs) # Items affected by enable/disable button
self.bs_en.add_button((200, 240), font = font14, fontcolor = BLACK, height = 30, width = 90,
fgcolor = GREEN, shape = RECTANGLE, text = 'Disable', args = (True,))
self.bs_en.add_button((200, 240), font = font14, fontcolor = BLACK, height = 30, width = 90,
fgcolor = RED, shape = RECTANGLE, text = 'Enable', args = (False,))
def callback(self, button, arg, idx_label):
self.lstlbl[idx_label].value(arg)
def quit(self, button):
Screen.shutdown()
def cbcb(self, checkbox, idx_label):
if checkbox.value():
self.lstlbl[idx_label].value('True')
else:
self.lstlbl[idx_label].value('False')
def cbreset(self, button, idx_label):
self.cb1.value(False)
self.cb2.value(False)
self.bs.value(self.bs0)
self.rb.value(self.rb0)
self.lstlbl[idx_label].value('Short')
def cb_en_dis(self, button, disable):
for item in self.tup_en_dis:
item.greyed_out(disable)
def test():
print('Testing TFT...')
setup()
Screen.change(ButtonScreen)
test()
| [
"[email protected]"
] | |
f17ed08bf47fc77482e427e5e7c87e52a0ab5d46 | 756d50be34245115ad28e79f4dfceb5516d17225 | /relsearch.py | af268beec2663fa43b51c0f5de63ab395fea2d2b | [] | no_license | abyssonym/gg3 | f1ce189a2a70786da8b2ab78281b39615fc59af2 | 1e6adadc6765d339ebbd7ca650d9b435d56fb366 | refs/heads/master | 2021-01-18T13:51:25.702975 | 2017-11-16T22:26:30 | 2017-11-16T22:26:30 | 34,976,112 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,264 | py | from sys import argv
from string import ascii_lowercase
from shutil import copyfile
filename = argv[1]
outfile = "test.smc"
searchstr = argv[2].lower()
if '.' in searchstr:
searchstr = map(int, searchstr.split('.'))
else:
numdict = dict([(b, a) for (a, b) in enumerate(ascii_lowercase)])
searchstr = [numdict[c] if c in numdict else c for c in searchstr]
print searchstr
f = open(filename, 'r+b')
addr = 0
checkstr = None
while True:
f.seek(addr)
bytestr = f.read(len(searchstr))
if len(bytestr) != len(searchstr):
break
bytestr = map(ord, bytestr)
offset = bytestr[0] - searchstr[0]
newbytestr = [i - offset for i in bytestr]
if all([a == b for (a, b) in zip(newbytestr, searchstr)]):
print "%x" % addr
print bytestr
check = None
if not checkstr:
check = raw_input("> ")
if check and check.lower()[0] == 'y':
checkstr = bytestr
if checkstr and all([a == b for (a, b) in zip(checkstr, bytestr)]):
copyfile(filename, outfile)
f2 = open(outfile, 'r+b')
f2.seek(addr)
f2.write("".join([chr(bytestr[0]) for _ in bytestr]))
f2.close()
check = raw_input("> ")
addr += 1
| [
"none"
] | none |
761cee9bc33bc3cdd7d2e32c4faecdbf2ed7481f | bab76d8cf312ee3eae66472b6abd119903e17e8e | /CountAndSay.py | 13420321dce191e20923da4c08ead73e60c68669 | [] | no_license | lixuanhong/LeetCode | 91131825d5eca144a46abe82a2ef04ea1f3ff025 | 48d436701840f8c162829cb101ecde444def2307 | refs/heads/master | 2020-04-05T02:54:52.473259 | 2018-11-07T05:31:30 | 2018-11-07T05:31:30 | 156,494,213 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,485 | py | """
The count-and-say sequence is the sequence of integers with the first five terms as following:
1. 1
2. 11
3. 21
4. 1211
5. 111221
1 is read off as "one 1" or 11.
11 is read off as "two 1s" or 21.
21 is read off as "one 2, then one 1" or 1211.
Given an integer n, generate the nth term of the count-and-say sequence.
Note: Each term of the sequence of integers will be represented as a string.
Example 1:
Input: 1
Output: "1"
Example 2:
Input: 4
Output: "1211"
"""
#题目大意:
#n=1 返回1
#n=2由于n=1的结果为1,有1个1,所以返回11
#n=3由于n=2结果为11,有2个1,返回21
#n=4由于n=3结果为21,有1个2和1个1,所以返回1211
#给定n,以此类推
class Solution(object):
def countAndSay(self, n):
def count(s):
res = ""
count = 1
for idx, value in enumerate(s):
if idx < len(s) - 1 and s[idx] != s[idx+1]: #因为要从第一个元素开始,所以比较idx和idx+1;要判断idx < len(s) - 1
res += str(count) + value
count = 1
elif idx < len(s) - 1:
count += 1
res += str(count) + value #对最后一个元素操作
return res
s = "1"
for i in range(1, n):
s = count(s) #初始化s = "1", 所以循环n-1次就可以得到结果
return s
obj = Solution()
print(obj.countAndSay(6)) #312211
| [
"[email protected]"
] | |
efe854c65e8348927573faaf27d384468a2f32dc | a90d490bf8a9df04334746acbafa5f8dad20c677 | /recipes/migrations/0009_auto_20160410_2021.py | 6dc28757abd45521c92ee402fe3f6ff6cb9d9162 | [
"MIT"
] | permissive | vanatteveldt/luctor | 8e8ffc20c05cc20a241c677bbe5400a5d71f2882 | 9871fa7afa85f36353b3f4740f73ae3e36d68643 | refs/heads/master | 2023-03-15T20:05:29.220407 | 2023-03-08T22:06:53 | 2023-03-08T22:06:53 | 14,639,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-10 20:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('recipes', '0008_auto_20160405_0044'),
]
operations = [
migrations.CreateModel(
name='Recipe',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('ingredients', models.TextField()),
('instructions', models.TextField()),
],
),
migrations.AlterField(
model_name='lesson',
name='parsed',
field=models.TextField(help_text="Pas hier de opdeling van de kookles in recepten aan. De titel van elk recept wordt aangegeven met ## titel, en ingredienten met | ingredient |. Als je klaar bent klik dan op 'save and continue editing' en op 'view on site'", null=True),
),
migrations.AddField(
model_name='recipe',
name='lesson',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='recipes.Lesson'),
),
]
| [
"[email protected]"
] | |
171f8ff483c3386ba48cab36f6dbfbfd0b5a1471 | 72ab330a358e3d85fb7d3ce29f9da3b9fb1aa6b8 | /quickbooks/objects/timeactivity.py | 459175558f4a1afa9c1e9e0294a3ead28c38c5c9 | [
"MIT"
] | permissive | fuhrysteve/python-quickbooks | d21415c2eb0e758dece4dbdcd3890361781f9ca5 | c017355fa0e9db27001040bf45bc8c48bbd1de45 | refs/heads/master | 2021-01-21T16:04:24.393172 | 2016-01-03T17:50:50 | 2016-01-03T17:50:50 | 48,954,178 | 0 | 0 | null | 2016-01-03T17:18:51 | 2016-01-03T17:18:49 | null | UTF-8 | Python | false | false | 1,302 | py | from six import python_2_unicode_compatible
from .base import Ref, QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin, AttachableRef
@python_2_unicode_compatible
class TimeActivity(QuickbooksManagedObject, QuickbooksTransactionEntity, LinkedTxnMixin):
"""
QBO definition: The TimeActivity entity represents a record of time worked by a vendor or employee.
"""
class_dict = {
"VendorRef": Ref,
"CustomerRef": Ref,
"DepartmentRef": Ref,
"EmployeeRef": Ref,
"ItemRef": Ref,
"ClassRef": Ref,
"AttachableRef": AttachableRef
}
qbo_object_name = "TimeActivity"
def __init__(self):
super(TimeActivity, self).__init__()
self.NameOf = ""
self.TimeZone = ""
self.TxnDate = ""
self.BillableStatus = ""
self.Taxable = False
self.HourlyRate = 0
self.Hours = 0
self.Minutes = 0
self.BreakHours = 0
self.BreakMinutes = 0
self.StartTime = ""
self.EndTime = ""
self.Description = ""
self.VendorRef = None
self.CustomerRef = None
self.DepartmentRef = None
self.EmployeeRef = None
self.ItemRef = None
self.ClassRef = None
self.AttachableRef = None | [
"[email protected]"
] | |
82cbd2304696415df1c92ba0cedca7acc29983b8 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_6/mdlyud002/question2.py | ebdc1dede1c8a1ab523e6c9a607a685c9867f7a7 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | # Yudhi Moodley
# Assignment 6 - Vector Calculator
# 23/04/2014
import math
vectorA = []
vectorB = []
addition = []
dotProduct = []
normalization = []
def vector_calculator():
vector1 = input("Enter vector A:\n")
vectorA = vector1.split(' ') # splits the input
vector2 = input("Enter vector B:\n")
vectorB = vector2.split(' ') # splits the input
# addition funtion
for i in range (3):
addNum = eval(vectorA[i]) + eval(vectorB[i])
addition.append(addNum)
print("A+B = [" + str(addition[0]) + ", " + str(addition[1]) + ", " + str(addition[2]) + "]")
# calculates the funtion of the vector
for i in range (3):
multNum = eval(vectorA[i]) * eval(vectorB[i])
dotProduct.append(multNum)
product = 0
for i in range (3):
product += dotProduct[i]
print("A.B = " + str(product))
# normalizes the vector
aSum = eval(vectorA[0])**2 + eval(vectorA[1])**2 + eval(vectorA[2])**2
aRoot = ("{0:.2f}".format(math.sqrt(aSum)))
print("|A| =",aRoot)
bSum = eval(vectorB[0])**2 + eval(vectorB[1])**2 + eval(vectorB[2])**2
bRoot = ("{0:.2f}".format(math.sqrt(bSum)))
print("|B| =",bRoot)
vector_calculator() | [
"[email protected]"
] | |
6f747b5fb9f472c6c4e89b6ca3610f1726436bee | a5fc521abe901fe9db46a605ec0ba71635bc308b | /managment/migrations/0001_initial.py | 059c4d114c5c4f2ebd8c9b576271218cb6f43401 | [] | no_license | revankarrajat/rms | 020b3736fb0855e547ffe7b3f91eae609cee80c7 | ed68bf427ab5612ae7f3a5308cd8075e19fc1daf | refs/heads/master | 2020-04-12T18:21:32.834786 | 2018-12-24T11:01:57 | 2018-12-24T11:01:57 | 162,676,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,218 | py | # Generated by Django 2.1.4 on 2018-12-14 06:29
from django.conf import settings
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0009_alter_user_last_name_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')),
('first_name', models.CharField(blank=True, max_length=30, verbose_name='first name')),
('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('is_owner', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'user',
'verbose_name_plural': 'users',
'abstract': False,
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.CreateModel(
name='owner',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('num_properties', models.IntegerField(default=0)),
('owner_name', models.CharField(max_length=30)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='property',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('description', models.TextField(default='')),
('price', models.IntegerField()),
('location', models.CharField(max_length=50)),
('num_views', models.IntegerField(default=0)),
('avg_rating', models.IntegerField(default=0)),
('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to='managment.owner')),
],
),
migrations.CreateModel(
name='review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rating', models.IntegerField(default=0)),
('comment', models.CharField(max_length=100)),
('prop_id', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='id+', to='managment.property')),
],
),
migrations.CreateModel(
name='visitor',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('profile', models.TextField()),
('pref_location', models.CharField(max_length=30)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='review',
name='visitor_id',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='id+', to='managment.visitor'),
),
]
| [
"[email protected]"
] | |
d4a8da26e252085f122e551fb397f2999bd76eec | 17c9bdd9f740f5549c2ae95c22d0f42907af6bf4 | /beautiful.py | 74906ad989a3ec033998b8f2093f95878b9d36ae | [] | no_license | vim-scripts/beautiful-pastebin | e8a2510aaeff1d782f7fd7552c5475edc1f9a380 | 854f3373b0b8e52a697e9856486906311efd138c | refs/heads/master | 2021-01-13T02:14:33.027077 | 2011-06-08T00:00:00 | 2011-06-23T22:51:24 | 1,865,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 646 | py | #File: beautiful.py
#Author : Aman Agarwal <[email protected]>
#License : MIT
#version 1.0
#Dependencies : BeautifulSoup <http://www.crummy.com/software/BeautifulSoup/>
#
import urllib2
from BeautifulSoup import BeautifulSoup
import sys
data=urllib2.urlopen(sys.argv[1]).read();
soup = BeautifulSoup(''.join(data))
code=soup('div', {'id' : 'code_frame'})
soup = BeautifulSoup(''.join(str(code[0]).strip()))
code_text = soup.div.div
text=''.join(BeautifulSoup(str(code_text).strip()).findAll(text=True))
code_for_vim = BeautifulSoup(str(text).strip(), convertEntities=BeautifulSoup.HTML_ENTITIES)
print code_for_vim
#print sys.argv[1]
| [
"[email protected]"
] | |
4b18bbafce196b41f74a02a0ded69010dc374a94 | 569db39ea53d67b695d5573e567e1b85cd83176f | /testcases/tutu/Android/AITest/__init__.py | 9596f08c4a577f19f599f5dd0c5ffe3af31631ff | [] | no_license | 1weifang/tutuandroidautotest | f38d9c86023e4d3857b04a8860f9d5ec810c485d | f3fb49eacee27682f478cb8b27a5e8f38d62e2b1 | refs/heads/master | 2022-11-15T04:48:25.333206 | 2020-07-14T03:38:16 | 2020-07-14T03:38:16 | 279,472,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# @Time : 2019/11/19 9:54
# @Author : Durat
# @Email : [email protected]
# @File : __init__.py.py
# @Software: PyCharm | [
"1qaz!QAZ1"
] | 1qaz!QAZ1 |
b212f30ce3a4c9af92e433cec3f79e72b4586b9f | c71a1053315e9277daf01f2b6d3b7b3f9cc77075 | /menu/urls.py | 7755a066a01883ca36d599c7d6927de8a072fdae | [] | no_license | ingafter60/dinner | f59bb42135d5dd8eb9a42bf665ea1dfc30e01937 | 08b4a33d899ffa45bb7f56b58cfef97703bd2083 | refs/heads/master | 2020-07-03T20:28:27.635316 | 2019-08-18T03:14:44 | 2019-08-18T03:14:44 | 202,040,200 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | # menu urls.py
from django.urls import path
from menu import views
app_name = 'menu'
urlpatterns = [
path('', views.menuList, name='menuList'),
path('<slug:slug>', views.menuDetail, name='menuDetail'),
] | [
"[email protected]"
] | |
5b9b16d3f350192012b8a8d223b402d78902b5c8 | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /Spacy/source2.7/spacy/lang/id/tokenizer_exceptions.py | 3bba57e4cbd39db28e872da9aa8cb1051962e24a | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 1,722 | py | # coding: utf8
from __future__ import unicode_literals
import regex as re
from ._tokenizer_exceptions_list import ID_BASE_EXCEPTIONS
from ..tokenizer_exceptions import URL_PATTERN
from ...symbols import ORTH
_exc = {}
for orth in ID_BASE_EXCEPTIONS:
_exc[orth] = [{ORTH: orth}]
orth_title = orth.title()
_exc[orth_title] = [{ORTH: orth_title}]
orth_caps = orth.upper()
_exc[orth_caps] = [{ORTH: orth_caps}]
orth_lower = orth.lower()
_exc[orth_lower] = [{ORTH: orth_lower}]
if '-' in orth:
orth_title = '-'.join([part.title() for part in orth.split('-')])
_exc[orth_title] = [{ORTH: orth_title}]
orth_caps = '-'.join([part.upper() for part in orth.split('-')])
_exc[orth_caps] = [{ORTH: orth_caps}]
for orth in [
"'d", "a.m.", "Adm.", "Bros.", "co.", "Co.", "Corp.", "D.C.", "Dr.", "e.g.",
"E.g.", "E.G.", "Gen.", "Gov.", "i.e.", "I.e.", "I.E.", "Inc.", "Jr.",
"Ltd.", "Md.", "Messrs.", "Mo.", "Mont.", "Mr.", "Mrs.", "Ms.", "p.m.",
"Ph.D.", "Rep.", "Rev.", "Sen.", "St.", "vs.",
"B.A.", "B.Ch.E.", "B.Sc.", "Dr.", "Dra.", "Drs.", "Hj.", "Ka.", "Kp.",
"M.Ag.", "M.Hum.", "M.Kes,", "M.Kom.", "M.M.", "M.P.", "M.Pd.", "M.Sc.",
"M.Si.", "M.Sn.", "M.T.", "M.Th.", "No.", "Pjs.", "Plt.", "R.A.", "S.Ag.",
"S.E.", "S.H.", "S.Hut.", "S.K.M.", "S.Kedg.", "S.Kedh.", "S.Kom.",
"S.Pd.", "S.Pol.", "S.Psi.", "S.S.", "S.Sos.", "S.T.", "S.Tekp.", "S.Th.",
"a.l.", "a.n.", "a.s.", "b.d.", "d.a.", "d.l.", "d/h", "dkk.", "dll.",
"dr.", "drh.", "ds.", "dsb.", "dst.", "faks.", "fax.", "hlm.", "i/o",
"n.b.", "p.p." "pjs.", "s.d.", "tel.", "u.p.",
]:
_exc[orth] = [{ORTH: orth}]
TOKENIZER_EXCEPTIONS = _exc
| [
"[email protected]"
] | |
51dd65811d72d74966faf28d8b397f1eb74579b0 | ddda55fcfc84ac5cd78cfc5c336a3df0b9096157 | /components/ble/mynewt-nimble/docs/conf.py | 629b8a4f14b0e686d9f12357cc72d9f04ee83c5c | [
"LicenseRef-scancode-gary-s-brown",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | liu-delong/lu_xing_xiang_one_os | 701b74fceb82dbb2806518bfb07eb85415fab43a | 0c659cb811792f2e190d5a004a531bab4a9427ad | refs/heads/master | 2023-06-17T03:02:13.426431 | 2021-06-28T08:12:41 | 2021-06-28T08:12:41 | 379,661,507 | 2 | 2 | Apache-2.0 | 2021-06-28T10:08:10 | 2021-06-23T16:11:54 | C | UTF-8 | Python | false | false | 5,476 | py | # -*- coding: utf-8 -*-
#
# Mynewt documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 10 11:33:44 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('_ext'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'breathe', 'sphinx.ext.todo',
'sphinx.ext.extlinks'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'NimBLE Bluetooth Stack'
copyright = u'Copyright © 2018 The Apache Software Foundation, Licensed under the Apache License, Version 2.0 Apache and the Apache feather logo are trademarks of The Apache Software Foundation.'
author = u'The Apache Software Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0-b1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'README.rst', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'none'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
html_theme_path = []
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html',
'searchbox.html',
'donate.html',
]
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Mynewtdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Mynewt.tex', u'NimBLE Bluetooth Stack',
u'The Apache Software Foundation', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'mynewt', u'Mynewt Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Mynewt', u'NimBLE Bluetooth Stack',
author, 'Mynewt', 'One line description of project.',
'Miscellaneous'),
]
breathe_projects = {
"mynewt": "_build/xml"
}
breathe_default_project = "mynewt"
breathe_domain_by_extension = {
"h" : "c",
}
| [
"[email protected]"
] | |
9b209805bbc3e5381db705ee82f66c38d2e5ef39 | a9243f735f6bb113b18aa939898a97725c358a6d | /0.15/_downloads/plot_compute_rt_average.py | fd3b17129bcbbdb519a78a19a35ccce09b59e38c | [] | permissive | massich/mne-tools.github.io | 9eaf5edccb4c35831400b03278bb8c2321774ef2 | 95650593ba0eca4ff8257ebcbdf05731038d8d4e | refs/heads/master | 2020-04-07T08:55:46.850530 | 2019-09-24T12:26:02 | 2019-09-24T12:26:02 | 158,233,630 | 0 | 0 | BSD-3-Clause | 2018-11-19T14:06:16 | 2018-11-19T14:06:16 | null | UTF-8 | Python | false | false | 1,912 | py | """
========================================================
Compute real-time evoked responses using moving averages
========================================================
This example demonstrates how to connect to an MNE Real-time server
using the RtClient and use it together with RtEpochs to compute
evoked responses using moving averages.
Note: The MNE Real-time server (mne_rt_server), which is part of mne-cpp,
has to be running on the same computer.
"""
# Authors: Martin Luessi <[email protected]>
# Mainak Jas <[email protected]>
#
# License: BSD (3-clause)
import matplotlib.pyplot as plt
import mne
from mne.datasets import sample
from mne.realtime import RtEpochs, MockRtClient
print(__doc__)
# Fiff file to simulate the realtime client
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = mne.io.read_raw_fif(raw_fname, preload=True)
# select gradiometers
picks = mne.pick_types(raw.info, meg='grad', eeg=False, eog=True,
stim=True, exclude=raw.info['bads'])
# select the left-auditory condition
event_id, tmin, tmax = 1, -0.2, 0.5
# create the mock-client object
rt_client = MockRtClient(raw)
# create the real-time epochs object
rt_epochs = RtEpochs(rt_client, event_id, tmin, tmax, picks=picks,
decim=1, reject=dict(grad=4000e-13, eog=150e-6))
# start the acquisition
rt_epochs.start()
# send raw buffers
rt_client.send_data(rt_epochs, picks, tmin=0, tmax=150, buffer_size=1000)
for ii, ev in enumerate(rt_epochs.iter_evoked()):
print("Just got epoch %d" % (ii + 1))
ev.pick_types(meg=True, eog=False) # leave out the eog channel
if ii == 0:
evoked = ev
else:
evoked = mne.combine_evoked([evoked, ev], weights='nave')
plt.clf() # clear canvas
evoked.plot(axes=plt.gca()) # plot on current figure
plt.pause(0.05)
| [
"[email protected]"
] | |
3da13c58c4199d31c98e3b0c81e7ab5d55abad24 | a873f3cd46a10ad879fc56d78e1f533d8bf486c0 | /z_python-stu1/first/廖雪峰/迭代.py | 4115de44525792f329471d5da4b183b906436215 | [] | no_license | shenhaiyu0923/resful | d0301b39363e6b3d3659f62fa4a9b2532ebcd225 | 1e66cae7d68fa231794776953cc1a5e999bf36c6 | refs/heads/master | 2021-07-08T20:46:57.300298 | 2021-06-01T08:17:27 | 2021-06-01T08:17:27 | 244,308,016 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 711 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = '[email protected]'
def findMinAndMax(L):
if len(L) == 0:
return (None, None)
else:
for i, x in enumerate(L):
if i == 0:
min = max = x
else:
if x > max:
max = x
if x < min:
min = x
return (min, max)
# 测试
if findMinAndMax([]) != (None, None):
print('测试失败!')
elif findMinAndMax([7]) != (7, 7):
print('测试失败!')
elif findMinAndMax([7, 1]) != (1, 7):
print('测试失败!')
elif findMinAndMax([7, 1, 3, 9, 5]) != (1, 9):
print('测试失败!')
else:
print('测试成功!')
| [
"[email protected]"
] | |
7cffd984d55e0708e92416f0d126056f75c33470 | ec062c479c09ce250c3e23ff47f144f423b55648 | /py/Lib/site-packages/azure/mgmt/compute/compute/v2016_04_30_preview/models/virtual_machine_paged.py | f4ce0dcbeaf516525bd3f7441a2a98148efea77a | [] | no_license | betisb/InputParser | c442ffc877a941bd5b7aac4d843a4d21594d8e96 | 68747d69e04d126f7ea679f93a291a6de244a95f | refs/heads/master | 2021-07-13T05:05:19.479329 | 2019-05-28T16:56:53 | 2019-05-28T16:56:53 | 188,087,891 | 0 | 2 | null | 2020-07-24T00:14:31 | 2019-05-22T17:52:13 | Python | UTF-8 | Python | false | false | 978 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class VirtualMachinePaged(Paged):
"""
A paging container for iterating over a list of :class:`VirtualMachine <azure.mgmt.compute.compute.v2016_04_30_preview.models.VirtualMachine>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[VirtualMachine]'}
}
def __init__(self, *args, **kwargs):
super(VirtualMachinePaged, self).__init__(*args, **kwargs)
| [
"[email protected]"
] | |
3017eff3a8d21fac6867ed2bc8da08b705f9d229 | cfc415c9b247521b872bf86fd22b55b4a3ff2ee3 | /tensorflow/tools/compatibility/tf_upgrade_v2_test.py | 4b83d50036b6c4e9572b40d7b6377685f94dacc8 | [
"Apache-2.0"
] | permissive | chengmengli06/tensorflow | f7fdb51d709e87b302d60a6dc9391cb6bbaaa3e1 | e81d0c5499eab1ae2d301c5caa128e0b69b0289b | refs/heads/master | 2021-06-24T21:54:28.571878 | 2018-11-16T06:45:48 | 2018-11-16T06:45:48 | 157,813,648 | 0 | 0 | Apache-2.0 | 2018-11-16T04:42:57 | 2018-11-16T04:42:57 | null | UTF-8 | Python | false | false | 6,225 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf 2.0 upgrader."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import six
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test as test_lib
from tensorflow.tools.compatibility import ast_edits
from tensorflow.tools.compatibility import tf_upgrade_v2
class TestUpgrade(test_util.TensorFlowTestCase):
"""Test various APIs that have been changed in 2.0.
We also test whether a converted file is executable. test_file_v1_10.py
aims to exhaustively test that API changes are convertible and actually
work when run with current TensorFlow.
"""
def _upgrade(self, old_file_text):
in_file = six.StringIO(old_file_text)
out_file = six.StringIO()
upgrader = ast_edits.ASTCodeUpgrader(tf_upgrade_v2.TFAPIChangeSpec())
count, report, errors = (
upgrader.process_opened_file("test.py", in_file,
"test_out.py", out_file))
return count, report, errors, out_file.getvalue()
def testParseError(self):
_, report, unused_errors, unused_new_text = self._upgrade(
"import tensorflow as tf\na + \n")
self.assertTrue(report.find("Failed to parse") != -1)
def testReport(self):
text = "tf.assert_near(a)\n"
_, report, unused_errors, unused_new_text = self._upgrade(text)
# This is not a complete test, but it is a sanity test that a report
# is generating information.
self.assertTrue(report.find("Renamed function `tf.assert_near` to "
"`tf.debugging.assert_near`"))
def testRename(self):
text = "tf.conj(a)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.math.conj(a)\n")
text = "tf.rsqrt(tf.log_sigmoid(3.8))\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.math.rsqrt(tf.math.log_sigmoid(3.8))\n")
def testRenameConstant(self):
text = "tf.MONOLITHIC_BUILD\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "tf.sysconfig.MONOLITHIC_BUILD\n")
text = "some_call(tf.MONOLITHIC_BUILD)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text, "some_call(tf.sysconfig.MONOLITHIC_BUILD)\n")
def testRenameArgs(self):
text = ("tf.nn.pool(input_a, window_shape_a, pooling_type_a, padding_a, "
"dilation_rate_a, strides_a, name_a, data_format_a)\n")
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text,
("tf.nn.pool(input=input_a, window_shape=window_shape_a,"
" pooling_type=pooling_type_a, padding=padding_a, "
"dilations=dilation_rate_a, strides=strides_a, "
"name=name_a, data_format=data_format_a)\n"))
def testReorder(self):
text = "tf.boolean_mask(a, b, c, d)\n"
_, unused_report, unused_errors, new_text = self._upgrade(text)
self.assertEqual(new_text,
"tf.boolean_mask(tensor=a, mask=b, name=c, axis=d)\n")
def testLearningRateDecay(self):
for decay in ["tf.train.exponential_decay", "tf.train.piecewise_constant",
"tf.train.polynomial_decay", "tf.train.natural_exp_decay",
"tf.train.inverse_time_decay", "tf.train.cosine_decay",
"tf.train.cosine_decay_restarts",
"tf.train.linear_cosine_decay",
"tf.train.noisy_linear_cosine_decay"]:
text = "%s(a, b)\n" % decay
_, report, errors, new_text = self._upgrade(text)
self.assertEqual(text, new_text)
self.assertEqual(errors, ["test.py:1: %s requires manual check." % decay])
self.assertIn("%s has been changed" % decay, report)
def testEstimatorLossReductionChange(self):
classes = [
"LinearClassifier", "LinearRegressor", "DNNLinearCombinedClassifier",
"DNNLinearCombinedRegressor", "DNNRegressor", "DNNClassifier",
"BaselineClassifier", "BaselineRegressor"
]
for c in classes:
ns = "tf.estimator." + c
text = ns + "(a, b)"
_, report, errors, new_text = self._upgrade(text)
self.assertEqual(text, new_text)
self.assertEqual(errors, ["test.py:1: %s requires manual check." % ns])
self.assertIn("loss_reduction has been changed", report)
def testCountNonZeroChanges(self):
text = (
"tf.math.count_nonzero(input_tensor=input, dtype=dtype, name=name, "
"reduction_indices=axis, keep_dims=keepdims)\n"
)
_, unused_report, unused_errors, new_text = self._upgrade(text)
expected_text = (
"tf.math.count_nonzero(input=input, dtype=dtype, name=name, "
"axis=axis, keepdims=keepdims)\n"
)
self.assertEqual(new_text, expected_text)
class TestUpgradeFiles(test_util.TensorFlowTestCase):
def testInplace(self):
"""Check to make sure we don't have a file system race."""
temp_file = tempfile.NamedTemporaryFile("w", delete=False)
original = "tf.conj(a)\n"
upgraded = "tf.math.conj(a)\n"
temp_file.write(original)
temp_file.close()
upgrader = ast_edits.ASTCodeUpgrader(tf_upgrade_v2.TFAPIChangeSpec())
upgrader.process_file(temp_file.name, temp_file.name)
self.assertAllEqual(open(temp_file.name).read(), upgraded)
os.unlink(temp_file.name)
if __name__ == "__main__":
test_lib.main()
| [
"[email protected]"
] | |
80810bf8538a097220492556fb02df2122426b9e | e4007870b4d75ba23c2f12ac6646f272cf17865c | /FFMPEG_Scripts/Video_Drawer.py | ff79049fa690bf27f94f3a7db415cde233945c49 | [
"MIT"
] | permissive | knut0815/PythonUtility | 385ce332ff34501be7ad21ac7948eb609770e72a | 0062e1e60dc151776b963d13bc4c1763eb90d333 | refs/heads/master | 2023-01-10T09:58:14.619531 | 2020-11-10T12:22:47 | 2020-11-10T12:22:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,420 | py | import sys
import platform
import subprocess
import os
horizontal_center = 'x=(w-tw)/2'
horizontal_right_margin = 'x=(w-tw)'
vertical_bottom_margin = 'y=h-(2*lh)'
class VideoDrawer(object):
@staticmethod
def _get_font_ifp():
if platform.system() == 'windows':
font_ifp = 'C:\\Windows\\Fonts\\Arial.ttf'
else:
font_ifp = '/usr/share/fonts/truetype/freefont/FreeMono.ttf'
return font_ifp
@staticmethod
def _get_font_ifp_option():
return 'fontfile=' + VideoDrawer._get_font_ifp()
@staticmethod
def _get_font_size_option(size):
return 'fontsize=' + str(size)
@staticmethod
def _get_color_option(color):
return 'fontcolor=' + color
@staticmethod
def _get_activate_box_option():
return 'box=1'
@staticmethod
def _get_box_color_option(color):
return 'boxcolor=' + color
@staticmethod
def _get_box_with_option(width):
return 'boxborderw=' + str(width)
@staticmethod
def _get_text_option(text):
return 'text=\'' + str(text) + '\''
@staticmethod
def _get_frame_number_text_option():
return 'text=\'%{frame_num}\''
@staticmethod
def _get_start_number_option(start_number):
return 'start_number=' + str(start_number)
@staticmethod
def _get_enable_between_option(start, end, values_in_frames=True):
# This option is used to show some string only in a specific subpart of the video
# http://ffmpeg.org/ffmpeg-all.html#Expression-Evaluation
# n: the number of current processed frame, starting from 0
# t: the number of current processed frame, starting from 0
if values_in_frames:
test_variable = 'n'
else:
test_variable = 't'
return 'enable=\'between(' + test_variable + ',' + str(start) + ',' + str(end) + ')\''
@staticmethod
def _create_colon_separated_draw_options(option_list):
option_str = ''
option_str += '"' # prepend quote
option_str += 'drawtext='
for ele in option_list[:-1]:
option_str += ele + ': '
option_str += option_list[-1]
option_str += '"' # append quote
return option_str
@staticmethod
def add_text_to_video(ifp,
ofp,
text_time_interval_triples_list=None,
add_frame_numbers=True):
options = ''
options += ' ' + '-i'
options += ' ' + ifp
options += ' ' + '-vf'
font_ifp_option = VideoDrawer._get_font_ifp_option()
x_pos_option = horizontal_center
y_pos_option = vertical_bottom_margin
font_color_option = VideoDrawer._get_color_option('black')
font_size_option = VideoDrawer._get_font_size_option(20)
active_box_option = VideoDrawer._get_activate_box_option()
box_color_option = VideoDrawer._get_box_color_option('green')
box_width_option = VideoDrawer._get_box_with_option(5)
if text_time_interval_triples_list is not None:
draw_text_options = ''
for index, text_with_time_stamp in enumerate(text_time_interval_triples_list):
text_option = VideoDrawer._get_text_option(text_with_time_stamp[0])
start = text_with_time_stamp[1]
end = text_with_time_stamp[2]
enable_between_option = VideoDrawer._get_enable_between_option(start, end)
single_draw_options = VideoDrawer._create_colon_separated_draw_options(
[font_ifp_option,
text_option,
enable_between_option,
x_pos_option,
y_pos_option,
font_color_option,
font_size_option,
active_box_option,
box_color_option,
box_width_option
])
if index > 0:
draw_text_options += ',' # draw commands must be comma separated
draw_text_options += single_draw_options
options += ' ' + draw_text_options
if add_frame_numbers:
frame_number_text_option = VideoDrawer._get_frame_number_text_option()
start_number_option = VideoDrawer._get_start_number_option(0)
x_pos_option = horizontal_right_margin
draw_options = VideoDrawer._create_colon_separated_draw_options(
[font_ifp_option,
frame_number_text_option,
start_number_option,
x_pos_option,
y_pos_option,
font_color_option,
font_size_option,
active_box_option,
box_color_option,
box_width_option
])
if text_time_interval_triples_list is not None:
options += ',' + draw_options # draw commands must be comma separated
else:
options += ' ' + draw_options
options += ' ' + '-c:a'
options += ' ' + 'copy'
call_str = 'ffmpeg' + ' ' + options + ' ' + ofp
print('call_str', call_str)
subprocess.call(call_str, shell=True)
# Make sure the file has been created
assert os.path.isfile(ofp)
| [
"[email protected]"
] | |
f36b312afc18e9f6b1941362c2dfbc66574e3deb | 98b63e3dc79c75048163512c3d1b71d4b6987493 | /tensorflow/python/keras/tests/memory_test.py | 465df84d6fef375a6f515ec1eb64815e4b74ec3f | [
"Apache-2.0"
] | permissive | galeone/tensorflow | 11a4e4a3f42f4f61a65b432c429ace00401c9cc4 | 1b6f13331f4d8e7fccc66bfeb0b066e77a2b7206 | refs/heads/master | 2022-11-13T11:56:56.143276 | 2020-11-10T14:35:01 | 2020-11-10T14:35:01 | 310,642,488 | 21 | 12 | Apache-2.0 | 2020-11-06T16:01:03 | 2020-11-06T16:01:02 | null | UTF-8 | Python | false | false | 2,599 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for memory leaks in eager execution.
It is possible that this test suite will eventually become flaky due to taking
too long to run (since the tests iterate many times), but for now they are
helpful for finding memory leaks since not all PyObject leaks are found by
introspection (test_util decorators). Please be careful adding new tests here.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python import keras
from tensorflow.python.eager import backprop
from tensorflow.python.eager.memory_tests import memory_test_util
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class SingleLayerNet(keras.Model):
"""Simple keras model used to ensure that there are no leaks."""
def __init__(self):
super(SingleLayerNet, self).__init__()
self.fc1 = keras.layers.Dense(5)
def call(self, x):
return self.fc1(x)
class MemoryTest(test.TestCase):
def testMemoryLeakInSimpleModelForwardOnly(self):
if not memory_test_util.memory_profiler_is_available():
self.skipTest("memory_profiler required to run this test")
inputs = array_ops.zeros([32, 100], dtypes.float32)
net = SingleLayerNet()
def f():
with backprop.GradientTape():
net(inputs)
memory_test_util.assert_no_leak(f)
def testMemoryLeakInSimpleModelForwardAndBackward(self):
if not memory_test_util.memory_profiler_is_available():
self.skipTest("memory_profiler required to run this test")
inputs = array_ops.zeros([32, 100], dtypes.float32)
net = SingleLayerNet()
def f():
with backprop.GradientTape() as tape:
result = net(inputs)
tape.gradient(result, net.variables)
del tape
memory_test_util.assert_no_leak(f)
if __name__ == "__main__":
test.main()
| [
"[email protected]"
] | |
7eb105d6e6a9cab22984c6db01666070c56c508b | 2bf76e30ad517adf8805a9fdb22e60c4c010eea3 | /ipypandex/tests/echo_pandas.py | 4c35a62cf638ddd4aa4f4bf2ae5ef84c977c07cf | [
"BSD-3-Clause"
] | permissive | isabella232/ipypandex | 2be06d8be96280f110ffd063eb7f8c81a6d4dc8c | fc1023266a7e3e784595f296629f4fd827fb7d0f | refs/heads/main | 2023-02-11T20:15:02.731204 | 2021-01-06T00:41:44 | 2021-01-06T00:41:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | import pandas as pd
from IPython.utils.capture import capture_output
with capture_output() as c:
display(pd.DataFrame(data={'col1': [1, 2], 'col2': [3, 4]}))
print(c.outputs[0].data)
| [
"[email protected]"
] | |
aeb34e6f1e8723cc6424c196cb99ef779f507e4d | c2081f368428e5fb684e08863ecac4f37f5717e5 | /jobapplicant/wsgi.py | 045dbc1851268e7d082365cdb2495383f2d755be | [] | no_license | agimenezpy/jobapplicant | 9148e80e3e535f7ea956992ba9c7fc0ea472b0e8 | 99ac06464a9137061c89fea0389b7c95422c29f2 | refs/heads/master | 2020-06-05T08:48:25.222470 | 2013-10-04T00:42:33 | 2013-10-04T00:42:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | """
WSGI config for jobapplicant project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jobapplicant.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| [
"[email protected]"
] | |
9be6c9b60a6871fc27eb6f3f9518c33a42785596 | c6c6c32547ba334f75a5cc938a9c07e708670365 | /buses/migrations/0002_alter_busbooking_bus_id.py | af6ea77ae0304b32d0b5ac41d86b6f261725998a | [] | no_license | wilsonmwiti/SmartTravel | e693acb0b323d1be9ae1c58917a32ef6a418448d | 9513f0f15745f9e73e70680c5d9e5798de85be7c | refs/heads/master | 2023-09-01T14:16:28.471037 | 2021-10-14T10:55:20 | 2021-10-14T10:55:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # Generated by Django 3.2.8 on 2021-10-13 05:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('buses', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='busbooking',
name='bus_id',
field=models.CharField(max_length=100),
),
]
| [
"[email protected]"
] | |
03a6cc2e483937f89f007060d6086be7425f4626 | 4e9d3ba19a694c25fdbfd4ed1c6ab66339674beb | /python/GafferUI/PopupWindow.py | b13e219fa456b6bdee63ed65695f9d5a99197b0f | [
"BSD-3-Clause"
] | permissive | mcanthony/gaffer | 0a6af7856b1c2ecae5620a9f2bd04316f2df271c | 32189357fda4bc4b2e5367a06af64928c479ffaf | refs/heads/master | 2021-01-18T19:59:29.212027 | 2015-10-26T20:43:45 | 2015-10-26T20:43:45 | 45,088,868 | 2 | 0 | null | 2015-10-28T04:30:06 | 2015-10-28T04:30:04 | null | UTF-8 | Python | false | false | 7,109 | py | ##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
QtCore = GafferUI._qtImport( "QtCore" )
QtGui = GafferUI._qtImport( "QtGui" )
class PopupWindow( GafferUI.Window ) :
def __init__( self, title="GafferUI.Window", borderWidth=8, child=None, sizeMode=GafferUI.Window.SizeMode.Automatic, closeOnLeave=False, **kw ) :
GafferUI.Window.__init__( self, title, borderWidth, child=child, sizeMode=sizeMode, **kw )
self._qtWidget().setWindowFlags( self._qtWidget().windowFlags() | QtCore.Qt.FramelessWindowHint | QtCore.Qt.Tool )
self._qtWidget().setAttribute( QtCore.Qt.WA_TranslucentBackground )
self._qtWidget().setMouseTracking( True )
self._qtWidget().paintEvent = Gaffer.WeakMethod( self.__paintEvent )
self._qtWidget().mousePressEvent = Gaffer.WeakMethod( self.__mousePressEvent )
self._qtWidget().mouseReleaseEvent = Gaffer.WeakMethod( self.__mouseReleaseEvent )
self._qtWidget().mouseMoveEvent = Gaffer.WeakMethod( self.__mouseMoveEvent )
self._qtWidget().enterEvent = Gaffer.WeakMethod( self.__enterEvent )
self._qtWidget().leaveEvent = Gaffer.WeakMethod( self.__leaveEvent )
# setVisible() will animate this to 1
self._qtWidget().setWindowOpacity( 0 )
self.__visibilityAnimation = None
self.__dragOffset = None
self.__cursor = None
self.setCloseOnLeave( closeOnLeave )
## Reimplemented from base class to make nice opacity animations
def setVisible( self, visible ) :
if visible == self.getVisible() :
return
self.__visibilityAnimation = _VisibilityAnimation( self._qtWidget(), visible )
self.__visibilityAnimation.start()
## Reimplemented from base class to account for nice opacity animations
def getVisible( self ) :
result = GafferUI.Window.getVisible( self )
# account for the fact that we might be animating towards invisibility
if self.__visibilityAnimation is not None and self.__visibilityAnimation.state() == self.__visibilityAnimation.Running :
if GafferUI._Variant.fromVariant( self.__visibilityAnimation.endValue() ) == 0 :
result = False
return result
def setCloseOnLeave( self, closeOnLeave ) :
self.__closeOnLeave = closeOnLeave
def getCloseOnLeave( self ) :
return self.__closeOnLeave
def __mousePressEvent( self, event ) :
if event.button() == QtCore.Qt.LeftButton :
if self.__cursor == QtCore.Qt.SizeFDiagCursor :
size = self._qtWidget().size()
self.__dragOffset = QtCore.QPoint( size.width(), size.height() ) - event.globalPos()
else :
self.__dragOffset = self._qtWidget().frameGeometry().topLeft() - event.globalPos()
def __mouseReleaseEvent( self, event ) :
if event.button() == QtCore.Qt.LeftButton :
self.__dragOffset = None
self.__setCursorFromPosition( event )
def __mouseMoveEvent( self, event ) :
if event.buttons() & QtCore.Qt.LeftButton and self.__dragOffset is not None :
if self.__cursor == QtCore.Qt.SizeFDiagCursor :
newSize = event.globalPos() + self.__dragOffset
self._qtWidget().resize( newSize.x(), newSize.y() )
else :
self._qtWidget().move( event.globalPos() + self.__dragOffset )
elif self.getResizeable() :
self.__setCursorFromPosition( event )
def __enterEvent( self, event ) :
if self.__closeOnLeave and self.__visibilityAnimation is not None :
if self.__visibilityAnimation.state() == self.__visibilityAnimation.Running :
# we currently visible, but we have an animation, so we must be
# in the process of becoming invisible. reverse that.
self.setVisible( True )
def __leaveEvent( self, event ) :
self.__setCursor( None )
if self.__closeOnLeave :
self.setVisible( False )
def __paintEvent( self, event ) :
painter = QtGui.QPainter( self._qtWidget() )
painter.setRenderHint( QtGui.QPainter.Antialiasing )
painter.setBrush( QtGui.QColor( 76, 76, 76 ) )
painter.setPen( QtGui.QColor( 0, 0, 0, 0 ) )
radius = self._qtWidget().layout().contentsMargins().left()
size = self.size()
painter.drawRoundedRect( QtCore.QRectF( 0, 0, size.x, size.y ), radius, radius )
if self.getResizeable() :
painter.drawRect( size.x - radius, size.y - radius, radius, radius )
def __setCursorFromPosition( self, event ) :
radius = self._qtWidget().layout().contentsMargins().left()
size = self.size()
p = event.pos()
if p.x() > size.x - radius and p.y() > size.y - radius :
self.__setCursor( QtCore.Qt.SizeFDiagCursor )
else :
self.__setCursor( None )
def __setCursor( self, cursor ) :
if cursor == self.__cursor :
return
if self.__cursor is not None :
QtGui.QApplication.restoreOverrideCursor()
if cursor is not None :
QtGui.QApplication.setOverrideCursor( QtGui.QCursor( cursor ) )
self.__cursor = cursor
def __closeIfLeft( self ) :
self.close()
class _VisibilityAnimation( QtCore.QVariantAnimation ) :
def __init__( self, window, visible ) :
QtCore.QVariantAnimation.__init__( self )
self.__window = window
startValue = self.__window.windowOpacity()
endValue = 1.0 if visible else 0.0
self.setStartValue( startValue )
self.setEndValue( endValue )
self.setDuration( abs( startValue - endValue ) * 500 )
def updateCurrentValue( self, value ) :
value = GafferUI._Variant.fromVariant( value )
self.__window.setWindowOpacity( value )
if value == 0 :
self.__window.hide()
elif not self.__window.isVisible() :
self.__window.show()
| [
"[email protected]"
] | |
c7867baeca22849ea7b5625a957b27b04f171214 | 3dcb9b9de4e27ee0e7ece48dcd51f920638ca14d | /api/api.py | 291f60faae09d446c7bb503a005fc97f6adb87c9 | [] | no_license | chyld/flask-postgres-react-docker | 4f4a7fb52c52df6fd005af68668a1425139613b1 | e36f36cb32ae259d6472ca7813c4dfb0cb3213da | refs/heads/master | 2021-01-20T07:02:13.625385 | 2017-05-02T06:26:40 | 2017-05-02T06:26:40 | 89,951,591 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,599 | py | from flask import Flask, jsonify, request
from flask_cors import CORS
from flask_sqlalchemy import SQLAlchemy
from marshmallow import Schema
import os
app = Flask(__name__)
CORS(app)
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:pass1234@db/animals'
db = SQLAlchemy(app)
@app.route('/hello', methods=['GET'])
def hello():
print('hello, hello, hello')
dogs = Dog.query.all()
# schema = DogSchema()
# result = schema.dump(dog)
print('running...')
for dog in dogs:
print('dog {0}:', dog)
return jsonify({'woof': 'boo'})
@app.route('/nested')
def nested():
return jsonify({"a": 3,
"b": True,
"c": None,
"d": "hello json",
"e": 3.14,
"f": [1, 2, 3],
"g": {"x":1, "y":2, "z":3}
})
@app.route('/echo', methods=['POST'])
def echo():
# import IPython
# from IPython import embed
# embed() # this call anywhere in your program will start IPython
# import pdb; pdb.set_trace()
# IPython.start_ipython()
return jsonify(request.json)
class Dog(db.Model):
__tablename__ = "dogs"
id = db.Column('id', db.Integer, primary_key=True)
name = db.Column('name', db.String(100))
age = db.Column('age', db.Integer)
def __init__(self, name, age):
self.name = name
self.age = age
class DogSchema(Schema):
class Meta:
fields = ('id', 'name', 'age')
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=int(os.environ['PORT']))
| [
"[email protected]"
] | |
2e24bb1da5abc68896108ac8b9934925cd0b5c5e | aa0c7bb4935ff68bb4ba2be4332890b760c9dda2 | /ipcv/scalespace.py | 998699a9f9b1b62a439bf745940a9dd6c314086b | [
"MIT"
] | permissive | andersbll/ipcv | 0b4deb5f867a4fd642aa7864769e7f4c4901e809 | ea533def7967c9d3a53002ae109db8b256b51c1d | refs/heads/master | 2021-03-12T23:40:26.990304 | 2014-03-05T13:57:31 | 2014-03-05T13:57:31 | 15,453,581 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 3,575 | py | import numpy as np
from scipy.ndimage.filters import gaussian_filter
class ScaleSpace:
def __init__(self, img_shape, sigmas, dys, dxs):
''' Compute the scale-space of an image.
Upon initialization, this class precomputes the Gaussian windows used
to smooth images of a fixed shape to save the computations at later
points.
'''
assert(len(sigmas) == len(dys) == len(dxs))
h, w = img_shape
g_y, g_x = np.mgrid[-.5+.5/h:.5:1./h, -.5+.5/w:.5: 1./w]
self.filters = []
for sigma, dy, dx in zip(sigmas, dys, dxs):
g = np.exp(- (g_x**2 + g_y**2) * (np.pi*2*sigma)**2 / 2.)
g = np.fft.fftshift(g)
if dy > 0 or dx > 0:
#TODO change list(range to np.linspace or similar
dg_y = np.array((list(range(0, h//2))+list(range(-h//2, 0))),
dtype=float, ndmin=2) / h
dg_x = np.array((list(range(0, w//2))+list(range(-w//2, 0))),
dtype=float, ndmin=2) / w
dg = (dg_y.T**dy) * (dg_x**dx) * (1j*2*np.pi)**(dy + dx)
g = np.multiply(g, dg)
self.filters.append(g)
def compute_f(self, img_f):
''' Compute the scale space of an image in the fourier domain.'''
return [np.multiply(img_f, f) for f in self.filters]
def compute(self, img):
''' Compute the scale space of an image.'''
img_f = np.fft.fft2(img)
return [np.fft.ifft2(np.multiply(img_f, f)).real for f in self.filters]
def scalespace(img, sigma, order=(0, 0)):
'''Compute the scale-space of an image. sigma is the scale parameter. dx
and dy specify the differentiation order along the x and y axis
respectively.'''
ss = ScaleSpace(img.shape, [sigma], [order[0]], [order[1]])
return ss.compute(img)[0]
def gradient_orientation(img, scale, signed=True, fft=False):
'''Calculate gradient orientations at scale sigma.'''
normalizer = scale**2
if fft:
Ly = normalizer*scalespace(img, scale, order=(1, 0))
Lx = normalizer*scalespace(img, scale, order=(0, 1))
else:
mode = 'reflect'
Ly = normalizer*gaussian_filter(img, scale, order=(1, 0), mode=mode)
Lx = normalizer*gaussian_filter(img, scale, order=(0, 1), mode=mode)
if signed:
go = np.arctan2(Ly, Lx)
else:
go = np.arctan(Ly/(Lx + 1e-10))
go_m = np.sqrt(Lx**2+Ly**2)
return go, go_m
def shape_index(img, scale, orientations=False, fft=False):
'''Calculate the shape index at the given scale.'''
normalizer = scale**2
if fft:
Lyy = normalizer*scalespace(img, scale, order=(2, 0))
Lxy = normalizer*scalespace(img, scale, order=(1, 1))
Lxx = normalizer*scalespace(img, scale, order=(0, 2))
else:
mode = 'reflect'
Lyy = normalizer*gaussian_filter(img, scale, order=(2, 0), mode=mode)
Lxy = normalizer*gaussian_filter(img, scale, order=(1, 1), mode=mode)
Lxx = normalizer*gaussian_filter(img, scale, order=(0, 2), mode=mode)
si = np.arctan((-Lxx-Lyy) / (np.sqrt((Lxx - Lyy)**2+4*Lxy**2)+1e-10))
si_c = .5*np.sqrt(Lxx**2 + 2*Lxy**2 + Lyy**2)
if orientations:
t = Lxx + Lyy
d = Lxx*Lyy - Lxy**2
l1 = t/2.0 + np.sqrt(np.abs(t**2/4 - d))
l2 = t/2.0 - np.sqrt(np.abs(t**2/4 - d))
y = l1-Lyy
x = Lxy
si_o = np.arctan(y/(x+1e-10))
si_om = l1-l2
return si, si_c, si_o, si_om
else:
return si, si_c
| [
"[email protected]"
] | |
ee82f549982587ab5b564579fb516fba6bdf691f | 22013212df1e21f29d0180f2109841177a2a8791 | /basic_addons/account_budget_report/reports/__init__.py | 08af8422c824fc2e2e1015f5bb8891ccaf05f79f | [] | no_license | butagreeza/DTDATA_A | f965236c0d7faf0ec4082d27e2a0ff8e7dafe1c6 | 90b09f89714349a3f26de671a440a979aeebd54c | refs/heads/master | 2023-06-18T00:41:02.521432 | 2021-06-14T21:17:06 | 2021-06-14T21:17:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# Cybrosys Technologies Pvt. Ltd.
# Copyright (C) 2017-TODAY Cybrosys Technologies(<https://www.cybrosys.com>).
# Author: Jesni Banu(<https://www.cybrosys.com>)
# you can modify it under the terms of the GNU LESSER
# GENERAL PUBLIC LICENSE (LGPL v3), Version 3.
#
# It is forbidden to publish, distribute, sublicense, or sell copies
# of the Software or modified copies of the Software.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU LESSER GENERAL PUBLIC LICENSE (LGPL v3) for more details.
#
# You should have received a copy of the GNU LESSER GENERAL PUBLIC LICENSE
# GENERAL PUBLIC LICENSE (LGPL v3) along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import budget_parser
import cross_overed_budget_report
import analytic_budget
| [
"[email protected]"
] | |
9657b3ceec8c66aed46b44498f1668e29d1b6871 | 3b09dc4623dac559c85c0333526d55b0615d79d7 | /problems/56.py | 94bcfe31fbef92738fe0088cba102cb331404cf7 | [] | no_license | Asperas13/leetcode | 5d45bd65c490ada9b3cb2c33331a728eab2ef9b4 | 7f2f1d4f221925945328a355d653d9622107fae7 | refs/heads/master | 2021-09-28T15:54:54.761873 | 2020-05-05T15:29:48 | 2020-05-05T15:30:59 | 145,767,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | class Solution:
def merge(self, intervals: List[List[int]]) -> List[List[int]]:
if len(intervals) < 2:
return intervals
intervals.sort(key=lambda a: a[0])
prev = intervals[0]
result = []
for i in range(1, len(intervals)):
if intervals[i][0] >= prev[0] and intervals[i][0] <= prev[1]:
prev[0] = min(prev[0], intervals[i][0])
prev[1] = max(prev[1], intervals[i][1])
else:
result.append(prev)
prev = intervals[i]
result.append(prev)
return result | [
"[email protected]"
] | |
bacc780f56a918e21b35b9fecc1d2a15d95159bf | 5d1a348e11ad652e6cc8f894d4ca774429f335f9 | /Prob-and-Stats/_Calculators/confidence_intervals.py | 014691dd6abedfa0a271ad2b36d1498a30b5a843 | [] | no_license | anhnguyendepocen/UCSanDiegoX | 5332fe0780540038c0cde70af70d67544a3e7725 | 053a1fae52f9b46188a9fcf10729f70d10b3db63 | refs/heads/master | 2022-04-18T03:23:27.636938 | 2020-03-30T23:29:40 | 2020-03-30T23:29:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 990 | py | import numpy as np
from scipy.stats import norm, t, sem
from math import sqrt
# list = [60, 56, 61, 68, 51, 53, 69, 54, 80, 90, 55, 35, 45]
list = np.random.randint(low=35,high=71, size=20)
print(list)
n = len(list)
mu = np.mean(list)
sigma = np.std(list)
var = np.var(list)
bounds = t.interval(0.90, len(list)-1, loc=np.mean(list), scale=sem(list))
print('The Mean Is =', mu)
print('The Raw Variance ("S^2") Is =', var)
print('The Standard Deviation Is =', sigma)
print('Lower Bounds =', bounds[0])
print('Upper Bounds =', bounds[1])
# the number of tweets a random user is a random variable with sigma=2
# in a sample of 121 users, the sample mean was 3.7
# find the 95% confidence interval for the distribtuion mean.
ci = 0.95
sig = .15
mean = 17.65
users = 50
inv_theta = norm.ppf((1+ci)/2)
std_error = sig/sqrt(users)
tweets_lower = mean - (inv_theta*std_error)
tweets_upper = mean + (inv_theta*std_error)
print('the bounds of number of tweets is =', tweets_lower, tweets_upper)
| [
"[email protected]"
] | |
7caaba5f6d3bc82752e4d751b5c5e178037ab7f7 | 74951991a9e1dbe92d4999da9060409a9492bdc3 | /palindrome-number/palindrome-number.py | 3155e839eb464780b9d419ca27c1b6a61a2bf6d4 | [] | no_license | drpuig/Leetcode-1 | fd800ee2f13c7ce03fa57c8a1d10b3aa6976d7c0 | 4ee104f3069c380e1756dd65f6ff6004554e6c0e | refs/heads/main | 2023-07-15T08:57:32.971194 | 2021-08-21T08:29:24 | 2021-08-21T08:29:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | class Solution:
def isPalindrome(self, num: int) -> bool:
if num < 0: return False
r, x = 0, num
while x > 0:
r = r * 10 + x % 10
x //= 10
return r == num
| [
"[email protected]"
] | |
7eb9e41beacc32274f19363e57b7522cb3378335 | a59d1faced9fe7348ca7143d2a8643e0ebad2132 | /pyvisdk/do/invalid_profile_reference_host.py | 8adf31269e62041bb94e93f6a596abce09a3a869 | [
"MIT"
] | permissive | Infinidat/pyvisdk | c55d0e363131a8f35d2b0e6faa3294c191dba964 | f2f4e5f50da16f659ccc1d84b6a00f397fa997f8 | refs/heads/master | 2023-05-27T08:19:12.439645 | 2014-07-20T11:49:16 | 2014-07-20T11:49:16 | 4,072,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def InvalidProfileReferenceHost(vim, *args, **kwargs):
'''A InvalidProfileReferenceHost fault is thrown when a valid host is not
associated with a profile in the Virtual Center inventory. This could be
because there is no host assciated with the profile or because the associated
host is incompatible with the profile.'''
obj = vim.client.factory.create('{urn:vim25}InvalidProfileReferenceHost')
# do some validation checking...
if (len(args) + len(kwargs)) < 7:
raise IndexError('Expected at least 8 arguments got: %d' % len(args))
required = [ 'host', 'profile', 'reason', 'dynamicProperty', 'dynamicType', 'faultCause',
'faultMessage' ]
optional = [ ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| [
"[email protected]"
] | |
a9aae5af5a7cacba668bf8b9da5cef2adec167b3 | a29c7e363026111276e94b96d39b1b4ab48dbca8 | /sdk/test/test_authorisation_request_response.py | d58f0334a7f10dfe20737e184735fd187ad09325 | [
"MIT"
] | permissive | matteo-kalogirou/yapily-sdk-python | a56bf6f9b1b308efda38f081f6237ebd8c8f8ad5 | f10d2d14383f551eeb59aa893d328ffa5080da22 | refs/heads/master | 2022-12-16T22:24:18.026765 | 2020-09-18T13:59:26 | 2020-09-18T13:59:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,523 | py | # coding: utf-8
"""
Yapily API
To access endpoints that require authentication, use your application key and secret created in the Dashboard (https://dashboard.yapily.com) # noqa: E501
The version of the OpenAPI document: 0.0.242
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import yapily
from yapily.models.authorisation_request_response import AuthorisationRequestResponse # noqa: E501
from yapily.rest import ApiException
class TestAuthorisationRequestResponse(unittest.TestCase):
"""AuthorisationRequestResponse unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test AuthorisationRequestResponse
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = yapily.models.authorisation_request_response.AuthorisationRequestResponse() # noqa: E501
if include_optional :
return AuthorisationRequestResponse(
id = '0',
user_uuid = '0',
application_user_id = '0',
reference_id = '0',
institution_id = '0',
status = 'AWAITING_AUTHORIZATION',
created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
transaction_from = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
transaction_to = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
expires_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
time_to_expire_in_millis = 56,
time_to_expire = '0',
feature_scope = [
'INITIATE_PRE_AUTHORISATION'
],
authorisation_url = '0',
consent_token = '0',
qr_code_url = '0'
)
else :
return AuthorisationRequestResponse(
)
def testAuthorisationRequestResponse(self):
"""Test AuthorisationRequestResponse"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a6b04f1468e584f07faee05a9f0038e74e17f645 | e527efa21057bdab7aff6a6b8c753171e75c6bfe | /quetzalcoatl/settings/celery/prod.py | b940474cf5d70d73314d43a75ecee8d4c48b1ffa | [
"WTFPL"
] | permissive | dem4ply/quetzalcoatl | cb49d6cbf0260ebdb127f6d95d39d299684291c6 | f9f72dc72f0da4f264c33128dc01b79f9fda5f2a | refs/heads/master | 2022-01-10T14:37:08.985767 | 2020-03-14T07:47:27 | 2020-03-14T07:47:27 | 235,905,684 | 0 | 0 | WTFPL | 2021-09-22T18:28:28 | 2020-01-23T23:19:15 | Python | UTF-8 | Python | false | false | 1,131 | py | import os
# from kombu import Exchange, Queue
# from celery.schedules import crontab
# from datetime import timedelta
url_key = os.environ[ 'QUETZALCOATL__RABBITMQ__KEY__URL' ]
celery_url = os.environ[ url_key ]
BROKER_URL = celery_url
RESULT_BACKEND = celery_url
CELERY_RESULT_BACKEND = celery_url
'''
task_annotations = {
'*': {
'rate_limit': '5/s'
}
}
'''
# beat_schedule = 'djcelery.schedulers.DatabaseScheduler'
# TASK_QUEUES = (
# Queue( 'default', Exchange( 'task', 'topic' ), routing_key='default' ),
# Queue(
# 'debug', Exchange( 'task_debug', 'topic' ), routing_key='*.debug.*' ),
# )
#
# TASK_DEFAULT_QUEUE = 'default'
# TASK_DEFAULT_EXCHANGE = "tasks"
# TASK_DEFAULT_EXCHANGE_TYPE = "topic"
# TASK_DEFAULT_ROUTING_KEY = "task.default"
#
# TASK_ROUTES = {
# 'default': {
# 'binding_key': 'task.#',
# },
# 'reader_moe.tasks.debug_task': {
# 'queue': 'debug',
# 'binding_key': 'task.debug.*',
# 'exchange': 'task_debug'
# }
# }
#
# beat_schedule = { }
RESULT_SERIALIZER = 'json'
TASK_SERIALIZER = 'json'
CELERY_ALWAYS_EAGER = False
| [
"[email protected]"
] | |
54320cc144accbbc19a2366c523173264961565a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02922/s040859855.py | c93462013926489db291dd42664757224e2579ba | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | import sys
import math
from collections import deque
def input():
return sys.stdin.readline().rstrip()
def main():
A, B = map(int, input().split())
a = math.ceil((B-1)/(A-1))
print(a)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
41aab887f5b4c35a78397323e316aa412cbfc975 | da934e0010380fdc6894063540f61b0ebc2c9ded | /vendor/lockfile/lockfile/linklockfile.py | f8aeaefcfc16578a51a1d2fb4c86a762e01c4047 | [
"Apache-2.0",
"MIT"
] | permissive | bopopescu/cc-2 | ed4f1dfe3c98f476ff619058d99855a16272d36b | 37444fb16b36743c439b0d6c3cac2347e0cc0a94 | refs/heads/master | 2022-11-23T03:57:12.255817 | 2014-10-02T06:10:46 | 2014-10-02T06:10:46 | 282,512,589 | 0 | 0 | Apache-2.0 | 2020-07-25T19:36:05 | 2020-07-25T19:36:05 | null | UTF-8 | Python | false | false | 2,419 | py | from __future__ import absolute_import
import time
import os
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
class LinkLockFile(LockBase):
"""Lock access to a file using atomic property of link(2).
>>> lock = LinkLockFile('somefile')
>>> lock = LinkLockFile('somefile', threaded=False)
"""
def acquire(self, timeout=None):
try:
open(self.unique_name, "wb").close()
except IOError:
raise LockFailed("failed to create %s" % self.unique_name)
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
# Try and create a hard link to it.
try:
print 'making a hard link %s to %s' % (self.unique_name,
self.lock_file)
os.link(self.unique_name, self.lock_file)
except OSError:
# Link creation failed. Maybe we've double-locked?
nlinks = os.stat(self.unique_name).st_nlink
if nlinks == 2:
# The original link plus the one I created == 2. We're
# good to go.
return
else:
# Otherwise the lock creation failed.
if timeout is not None and time.time() > end_time:
os.unlink(self.unique_name)
if timeout > 0:
raise LockTimeout
else:
raise AlreadyLocked
time.sleep(timeout is not None and timeout/10 or 0.1)
else:
# Link creation succeeded. We're good to go.
return
def release(self):
if not self.is_locked():
raise NotLocked
elif not os.path.exists(self.unique_name):
raise NotMyLock
os.unlink(self.unique_name)
os.unlink(self.lock_file)
def is_locked(self):
return os.path.exists(self.lock_file)
def i_am_locking(self):
return (self.is_locked() and
os.path.exists(self.unique_name) and
os.stat(self.unique_name).st_nlink == 2)
def break_lock(self):
if os.path.exists(self.lock_file):
os.unlink(self.lock_file)
| [
"[email protected]"
] | |
498b161763e04089ca2bc69b627c2c265422a62b | e23b28fc3ed196866a04af4e790c1c16b1b5183e | /django/portfolio/apps/portfolio_app/urls.py | 73a949e99ec0e9b82a53e892a13c8fb1443a2aa5 | [] | no_license | diazmc/Python | 6f47e7fcfb8c263eb154d59a5a9b3866e2c9d6a8 | 89e3d54eeb2b0ed7dc7af24103ace6fb6e45d65e | refs/heads/master | 2021-01-20T01:18:23.954877 | 2017-08-24T10:39:19 | 2017-08-24T10:39:19 | 101,283,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^testimonials$', views.testimonial)
]
| [
"[email protected]"
] | |
c451c1854b8bfd9dc2aa1c81ff03ee27356279ce | 7822e004b9697e451a9345589a411133ca12d74e | /scripts/createGradientImage.py | 54fb723a814d6d173509a46a8a6458d07aa24bec | [] | no_license | tomwright01/SLOAntsRegistration | 0e6335feff3f97e59728fdca0f174165df582f4a | 5ff0eb100d40604feae62500c5b8e6cd07c00017 | refs/heads/master | 2021-01-04T14:14:11.212043 | 2014-12-09T20:39:12 | 2014-12-09T20:39:12 | 26,826,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | import subprocess
import logging
import os
def main(dims,outputName,inputName,Sigma,antsPath):
"""Use the ANTs ImageMath to create a gradient image"""
imPath = os.path.join(antsPath,'ImageMath')
cmd = '{0} {1} {2} Grad {3} {4}'.format(imPath,dims,outputName,inputName,Sigma)
logging.info('Creating Gradient Image with command:')
logging.info('=======================')
logging.info(cmd)
logging.info('=======================')
try:
subprocess.check_call(cmd,shell=True,executable='/bin/bash')
return True
except subprocess.CalledProcessError:
return False
| [
"[email protected]"
] | |
f6a87a9dedd704b40464a5040ddb2d851e703ba9 | a9b31181ad6f695a2809018167a52a6d9847c0df | /Chap05-funcoes-frutiferas/compara.py | 2de8e1459dc0075b09a80469e8aaee81d6d62fa9 | [] | no_license | frclasso/Aprendendo_computacao_com_Python | 21cdecdebcdbafad35a48d8425d06e4ec2ba1259 | 40276f396c90d25b301e15e855942a607efd895b | refs/heads/master | 2020-03-12T17:38:04.886153 | 2018-10-11T14:17:13 | 2018-10-11T14:17:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | #!/usr/bin/env python3
def compara(x, y):
if x > y:
return 1
elif x == y:
return 0
else:
return -1
print(compara(1,2))
print(compara(3,2))
print(compara(3,3)) | [
"[email protected]"
] | |
1d7fcddad197b9c1e5b50b8573b0b569e645370a | 35a1593fbd15c8ef1a20971055774a1cdcd41bce | /test/test_rpc_fork.py | 5e2432f60183e5c5213ef1772931d6b7939ae669 | [
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | sahils2607/pytorch | 884a2da2a613b525522a1615c8c5ecef013e4fb1 | 16454095e09eab2e737d99ad569cd44bb7910f03 | refs/heads/master | 2020-08-07T15:13:53.319183 | 2019-10-07T21:22:42 | 2019-10-07T21:24:34 | 213,499,886 | 1 | 0 | NOASSERTION | 2019-10-07T22:39:43 | 2019-10-07T22:39:43 | null | UTF-8 | Python | false | false | 421 | py | #!/usr/bin/env python3
from __future__ import absolute_import, division, print_function, unicode_literals
from rpc_test import RpcTest
from common_distributed import MultiProcessTestCase
from common_utils import run_tests
class RpcTestWithFork(MultiProcessTestCase, RpcTest):
def setUp(self):
super(RpcTestWithFork, self).setUp()
self._fork_processes()
if __name__ == '__main__':
run_tests()
| [
"[email protected]"
] | |
3472469d1a6567b5c42751cad45681f14a096b86 | b9bc60cca34c6b4f8a750af6062f357f18dfcae2 | /tensorflow/contrib/ndlstm/python/lstm2d.py | 3907046ddad48c43fe12f40301240acae3703489 | [
"Apache-2.0"
] | permissive | lidenghui1110/tensorflow-0.12.0-fpga | 7c96753aafab5fe79d5d0c500a0bae1251a3d21b | f536d3d0b91f7f07f8e4a3978d362cd21bad832c | refs/heads/master | 2022-11-20T11:42:11.461490 | 2017-07-28T09:28:37 | 2017-07-28T09:28:37 | 98,633,565 | 3 | 2 | Apache-2.0 | 2022-11-15T05:22:07 | 2017-07-28T09:29:01 | C++ | UTF-8 | Python | false | false | 5,639 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A small library of functions dealing with LSTMs applied to images.
Tensors in this library generally have the shape (num_images, height, width,
depth).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.ndlstm.python import lstm1d
def _shape(tensor):
"""Get the shape of a tensor as an int list."""
return tensor.get_shape().as_list()
def images_to_sequence(tensor):
"""Convert a batch of images into a batch of sequences.
Args:
tensor: a (num_images, height, width, depth) tensor
Returns:
(width, num_images*height, depth) sequence tensor
"""
num_image_batches, height, width, depth = _shape(tensor)
transposed = tf.transpose(tensor, [2, 0, 1, 3])
return tf.reshape(transposed, [width, num_image_batches * height, depth])
def sequence_to_images(tensor, num_image_batches):
"""Convert a batch of sequences into a batch of images.
Args:
tensor: (num_steps, num_batches, depth) sequence tensor
num_image_batches: the number of image batches
Returns:
(num_images, height, width, depth) tensor
"""
width, num_batches, depth = _shape(tensor)
height = num_batches // num_image_batches
reshaped = tf.reshape(tensor, [width, num_image_batches, height, depth])
return tf.transpose(reshaped, [1, 2, 0, 3])
def horizontal_lstm(images, num_filters_out, scope=None):
"""Run an LSTM bidirectionally over all the rows of each image.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output depth
scope: optional scope name
Returns:
(num_images, height, width, num_filters_out) tensor, where
num_steps is width and new num_batches is num_image_batches * height
"""
with tf.variable_scope(scope, "HorizontalLstm", [images]):
batch_size, _, _, _ = _shape(images)
sequence = images_to_sequence(images)
with tf.variable_scope("lr"):
hidden_sequence_lr = lstm1d.ndlstm_base(sequence, num_filters_out // 2)
with tf.variable_scope("rl"):
hidden_sequence_rl = (
lstm1d.ndlstm_base(sequence,
num_filters_out - num_filters_out // 2,
reverse=1))
output_sequence = tf.concat(2, [hidden_sequence_lr, hidden_sequence_rl])
output = sequence_to_images(output_sequence, batch_size)
return output
def separable_lstm(images, num_filters_out, nhidden=None, scope=None):
"""Run bidirectional LSTMs first horizontally then vertically.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output layer depth
nhidden: hidden layer depth
scope: optional scope name
Returns:
(num_images, height, width, num_filters_out) tensor
"""
with tf.variable_scope(scope, "SeparableLstm", [images]):
if nhidden is None:
nhidden = num_filters_out
hidden = horizontal_lstm(images, nhidden)
with tf.variable_scope("vertical"):
transposed = tf.transpose(hidden, [0, 2, 1, 3])
output_transposed = horizontal_lstm(transposed, num_filters_out)
output = tf.transpose(output_transposed, [0, 2, 1, 3])
return output
def reduce_to_sequence(images, num_filters_out, scope=None):
"""Reduce an image to a sequence by scanning an LSTM vertically.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output layer depth
scope: optional scope name
Returns:
A (width, num_images, num_filters_out) sequence.
"""
with tf.variable_scope(scope, "ReduceToSequence", [images]):
batch_size, height, width, depth = _shape(images)
transposed = tf.transpose(images, [1, 0, 2, 3])
reshaped = tf.reshape(transposed, [height, batch_size * width, depth])
reduced = lstm1d.sequence_to_final(reshaped, num_filters_out)
output = tf.reshape(reduced, [batch_size, width, num_filters_out])
return output
def reduce_to_final(images, num_filters_out, nhidden=None, scope=None):
"""Reduce an image to a final state by running two LSTMs.
Args:
images: (num_images, height, width, depth) tensor
num_filters_out: output layer depth
nhidden: hidden layer depth (defaults to num_filters_out)
scope: optional scope name
Returns:
A (num_images, num_filters_out) batch.
"""
with tf.variable_scope(scope, "ReduceToFinal", [images]):
nhidden = nhidden or num_filters_out
batch_size, height, width, depth = _shape(images)
transposed = tf.transpose(images, [1, 0, 2, 3])
reshaped = tf.reshape(transposed, [height, batch_size * width, depth])
with tf.variable_scope("reduce1"):
reduced = lstm1d.sequence_to_final(reshaped, nhidden)
transposed_hidden = tf.reshape(reduced, [batch_size, width, nhidden])
hidden = tf.transpose(transposed_hidden, [1, 0, 2])
with tf.variable_scope("reduce2"):
output = lstm1d.sequence_to_final(hidden, num_filters_out)
return output
| [
"[email protected]"
] | |
e6b0e6837166020928a9bfbdf5bc302fa4f86ad8 | 7dfa21d74dae975082c6d5deaa01248bac1dcc26 | /.circleci/cimodel/data/pytorch_build_data.py | 09476a970b40045f3d53a7de2f01f11f71d683ae | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | mruberry/pytorch | 88cf536ed58d20a409c1e5119be4ec04ec960082 | 19f73180cfb39eb67110d2a1d541975a49211453 | refs/heads/master | 2022-02-03T16:25:31.070089 | 2019-04-22T17:52:28 | 2019-04-22T17:58:15 | 130,132,886 | 4 | 1 | NOASSERTION | 2020-01-16T16:51:39 | 2018-04-18T23:24:38 | C++ | UTF-8 | Python | false | false | 3,956 | py | #!/usr/bin/env python3
from cimodel.lib.conf_tree import ConfigNode, X
CONFIG_TREE_DATA = [
("trusty", [
(None, [
X("2.7.9"),
X("2.7"),
X("3.5"),
X("nightly"),
]),
("gcc", [
("4.8", [X("3.6")]),
("5.4", [("3.6", [X(False), X(True)])]),
("7", [X("3.6")]),
]),
]),
("xenial", [
("clang", [
("5", [X("3.6")]),
]),
("cuda", [
("8", [X("3.6")]),
("9", [
# Note there are magic strings here
# https://github.com/pytorch/pytorch/blob/master/.jenkins/pytorch/build.sh#L21
# and
# https://github.com/pytorch/pytorch/blob/master/.jenkins/pytorch/build.sh#L143
# and
# https://github.com/pytorch/pytorch/blob/master/.jenkins/pytorch/build.sh#L153
# (from https://github.com/pytorch/pytorch/pull/17323#discussion_r259453144)
X("2.7"),
X("3.6"),
]),
("9.2", [X("3.6")]),
("10", [X("3.6")]),
]),
("android", [
("r19c", [X("3.6")]),
]),
]),
]
def get_major_pyver(dotted_version):
parts = dotted_version.split(".")
return "py" + parts[0]
class TreeConfigNode(ConfigNode):
def __init__(self, parent, node_name, subtree):
super(TreeConfigNode, self).__init__(parent, self.modify_label(node_name))
self.subtree = subtree
self.init2(node_name)
def modify_label(self, label):
return label
def init2(self, node_name):
pass
def get_children(self):
return [self.child_constructor()(self, k, v) for (k, v) in self.subtree]
class TopLevelNode(TreeConfigNode):
def __init__(self, node_name, subtree):
super(TopLevelNode, self).__init__(None, node_name, subtree)
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return DistroConfigNode
class DistroConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["distro_name"] = node_name
def child_constructor(self):
distro = self.find_prop("distro_name")
next_nodes = {
"trusty": TrustyCompilerConfigNode,
"xenial": XenialCompilerConfigNode,
}
return next_nodes[distro]
class TrustyCompilerConfigNode(TreeConfigNode):
def modify_label(self, label):
return label or "<unspecified>"
def init2(self, node_name):
self.props["compiler_name"] = node_name
def child_constructor(self):
return TrustyCompilerVersionConfigNode if self.props["compiler_name"] else PyVerConfigNode
class TrustyCompilerVersionConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_version"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return PyVerConfigNode
class PyVerConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["pyver"] = node_name
self.props["abbreviated_pyver"] = get_major_pyver(node_name)
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return XlaConfigNode
class XlaConfigNode(TreeConfigNode):
def modify_label(self, label):
return "XLA=" + str(label)
def init2(self, node_name):
self.props["is_xla"] = node_name
class XenialCompilerConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_name"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return XenialCompilerVersionConfigNode
class XenialCompilerVersionConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_version"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return PyVerConfigNode
| [
"[email protected]"
] | |
71dafe2db4bc761973d6704dc92903b815a5d803 | df7f13ec34591fe1ce2d9aeebd5fd183e012711a | /hata/discord/channel/channel_metadata/tests/test__parse_video_quality_mode.py | 1d27462067dbc38950831d7cb97ceae62bdabb9d | [
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | HuyaneMatsu/hata | 63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e | 53f24fdb38459dc5a4fd04f11bdbfee8295b76a4 | refs/heads/master | 2023-08-20T15:58:09.343044 | 2023-08-20T13:09:03 | 2023-08-20T13:09:03 | 163,677,173 | 3 | 3 | Apache-2.0 | 2019-12-18T03:46:12 | 2018-12-31T14:59:47 | Python | UTF-8 | Python | false | false | 594 | py | import vampytest
from ..preinstanced import VideoQualityMode
from ..fields import parse_video_quality_mode
def test__parse_video_quality_mode():
"""
Tests whether ``parse_video_quality_mode`` works as intended.
"""
for input_data, expected_output in (
({}, VideoQualityMode.auto),
({'video_quality_mode': VideoQualityMode.auto.value}, VideoQualityMode.auto),
({'video_quality_mode': VideoQualityMode.full.value}, VideoQualityMode.full),
):
output = parse_video_quality_mode(input_data)
vampytest.assert_eq(output, expected_output)
| [
"[email protected]"
] | |
02870225cf065083ba4335fd8a97915249b45f48 | cf50ea39bfd5a7dee49f10c5889637131bb40c74 | /11-CHAPTER/3-multiple-inheritance.py | f44b08ef42b3cd67a92e4e03882b0df37fad6336 | [] | no_license | Rishi05051997/Python-Notes | 4878b1760731d7b7f5060f320ec9758fc5946536 | 1c7c1d927e1c78be430d7131f569e3272f8e81ad | refs/heads/main | 2023-07-15T03:00:06.498240 | 2021-08-24T05:27:46 | 2021-08-24T05:27:46 | 377,142,221 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 307 | py | class Employee:
company = "Visa"
eCode = 120
class Freelancer:
company = "Fiverr"
level = 0
def upgradeLevel(self):
self.level = self.level + 1
class Programmer(Employee, Freelancer):
name = "Vrushabh"
p = Programmer()
p.upgradeLevel()
print(p.level)
print(p.company)
| [
"[email protected]"
] | |
8e15123ac1006ef3d53de1573baf06184dd75c95 | 48832d27da16256ee62c364add45f21b968ee669 | /res_bw/scripts/common/lib/plat-mac/carbon/lists.py | 4fc2252e450153f6e01beee179948af9cb5f9698 | [] | no_license | webiumsk/WOT-0.9.15.1 | 0752d5bbd7c6fafdd7f714af939ae7bcf654faf7 | 17ca3550fef25e430534d079876a14fbbcccb9b4 | refs/heads/master | 2021-01-20T18:24:10.349144 | 2016-08-04T18:08:34 | 2016-08-04T18:08:34 | 64,955,694 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,082 | py | # 2016.08.04 20:01:15 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/plat-mac/Carbon/Lists.py
def FOUR_CHAR_CODE(x):
return x
listNotifyNothing = FOUR_CHAR_CODE('nada')
listNotifyClick = FOUR_CHAR_CODE('clik')
listNotifyDoubleClick = FOUR_CHAR_CODE('dblc')
listNotifyPreClick = FOUR_CHAR_CODE('pclk')
lDrawingModeOffBit = 3
lDoVAutoscrollBit = 1
lDoHAutoscrollBit = 0
lDrawingModeOff = 8
lDoVAutoscroll = 2
lDoHAutoscroll = 1
lOnlyOneBit = 7
lExtendDragBit = 6
lNoDisjointBit = 5
lNoExtendBit = 4
lNoRectBit = 3
lUseSenseBit = 2
lNoNilHiliteBit = 1
lOnlyOne = -128
lExtendDrag = 64
lNoDisjoint = 32
lNoExtend = 16
lNoRect = 8
lUseSense = 4
lNoNilHilite = 2
lInitMsg = 0
lDrawMsg = 1
lHiliteMsg = 2
lCloseMsg = 3
kListDefProcPtr = 0
kListDefUserProcType = kListDefProcPtr
kListDefStandardTextType = 1
kListDefStandardIconType = 2
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\plat-mac\carbon\lists.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.08.04 20:01:15 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
1dbbd38333e4bdfa695a265eab97dede7839959c | 893a2fea722b77148f1fb1cac066ce476f1afa0a | /codeforces/cf_beta_85/problem2.py | 8c260d049262d5c1c10a34300dcf43695e7bd3a3 | [] | no_license | the-brainiac/contests | feb9f1ee1abdfb3cc9dccd5a69623192b4ec09ed | b95426aa3e54e703f7924fe0f222c2915e07c8f7 | refs/heads/main | 2023-05-12T13:10:11.765678 | 2021-06-03T04:05:50 | 2021-06-03T04:05:50 | 373,376,225 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | N = 10**5
is_prime = [1]*N
# We know 0 and 1 are composites
is_prime[0] = 0
is_prime[1] = 0
def sieve():
"""
We cross out all composites from 2 to sqrt(N)
"""
i = 2
# This will loop from 2 to int(sqrt(x))
while i*i <= N:
# If we already crossed out this number, then continue
if is_prime[i] == 0:
i += 1
continue
j = 2*i
while j < N:
# Cross out this as it is composite
is_prime[j] = 0
# j is incremented by i, because we want to cover all multiples of i
j += i
i += 1
sieve()
def nextPrime(n):
i = n
while True:
if is_prime[i]:
return i
i += 1
for _ in range(int(input())):
d = int(input())
k1 = nextPrime(d+1)
k2 = nextPrime(d+k1)
print(k1*k2) | [
"[email protected]"
] | |
61f2b2619c96c01b5dda1b6c9aeb86457872c271 | 0ee8350bedb5c8ac575ee0b634fece214a06646a | /poezio/asyncio.py | 2b02a91ffd66a8b6a9a97f66ca7342e5dcce6026 | [
"Zlib",
"CC-BY-2.0"
] | permissive | LukeMarlin/poezio | 3fcad784d37aa665850b649622d6f8d75cc1fa3f | 884aae28a24d65951cc7d57f6044098f236f52bc | refs/heads/master | 2021-01-09T20:12:18.234842 | 2016-10-28T15:26:02 | 2016-10-28T15:57:38 | 72,423,788 | 1 | 0 | null | 2016-10-31T09:54:48 | 2016-10-31T09:54:48 | null | UTF-8 | Python | false | false | 1,353 | py | """
A module that monkey patches the standard asyncio module to add an
idle_call() method to the main loop. This method is used to execute a
callback whenever the loop is not busy handling anything else. This means
that it is a callback with lower priority than IO, timer, or even
call_soon() ones. These callback are called only once each.
"""
import asyncio
import functools
import collections
from asyncio import events
import slixmpp
def monkey_patch_asyncio_slixmpp():
def idle_call(self, callback):
if asyncio.iscoroutinefunction(callback):
raise TypeError("coroutines cannot be used with idle_call()")
handle = events.Handle(callback, [], self)
self._idle.append(handle)
def my_run_once(self):
if self._idle:
self._ready.append(events.Handle(lambda: None, (), self))
real_run_once(self)
if self._idle:
handle = self._idle.popleft()
handle._run()
cls = asyncio.get_event_loop().__class__
cls._idle = collections.deque()
cls.idle_call = idle_call
real_run_once = cls._run_once
cls._run_once = my_run_once
spawn_event = slixmpp.xmlstream.XMLStream._spawn_event
def patchy(self, xml):
self.loop.idle_call(functools.partial(spawn_event, self, xml))
slixmpp.xmlstream.XMLStream._spawn_event = patchy
| [
"[email protected]"
] | |
36119431fd312a3e8902674067afbe6396c63da9 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/219/29883/submittedfiles/testes.py | 4f4b4f48b93942c5a8eddaabeee18acfd3de9bd6 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
a=int(input('Digite a:'))
b=int(input('Digite b:'))
c=int(input('Digite c:'))
d=int(input('Digite d:'))
if a>=b and a>=c and a>=d:
print(a)
if b<=c and b<=d:
print(b)
elif b>=a and b>=c and b>=d
print(
| [
"[email protected]"
] | |
4455b3a1b142bedf192ae2f451c4ff35db376820 | 176c59cf09d42c66d4101eca52beb9c3ea7362a1 | /pyramid_authsanity/tests/test_includeme.py | ad635c82cfa576e918a2c5fbe5ec15a7f88c8027 | [
"ISC"
] | permissive | stevepiercy/pyramid_authsanity | 146d90abcf7622e1d509eb069bfbbf80ed61acc8 | daf7188a8ab1a8bd215d9e1e1cb6682e87fa8ac7 | refs/heads/master | 2021-07-16T08:33:46.683994 | 2016-01-10T05:48:32 | 2016-01-10T05:48:32 | 51,718,108 | 0 | 0 | null | 2016-02-14T22:53:13 | 2016-02-14T22:53:13 | null | UTF-8 | Python | false | false | 3,043 | py | import pytest
from pyramid.authorization import ACLAuthorizationPolicy
import pyramid.testing
from zope.interface import (
Interface,
implementedBy,
providedBy,
)
from zope.interface.verify import (
verifyClass,
verifyObject
)
from pyramid_services import IServiceClassifier
from pyramid_authsanity.interfaces import (
IAuthSourceService,
)
class TestAuthServicePolicyIntegration(object):
@pytest.fixture(autouse=True)
def pyramid_config(self, request):
from pyramid.interfaces import IDebugLogger
self.config = pyramid.testing.setUp()
self.config.set_authorization_policy(ACLAuthorizationPolicy())
def finish():
del self.config
pyramid.testing.tearDown()
request.addfinalizer(finish)
def _makeOne(self, settings):
self.config.registry.settings.update(settings)
self.config.include('pyramid_authsanity')
def test_include_me(self):
from pyramid_authsanity.policy import AuthServicePolicy
self._makeOne({})
self.config.commit()
introspector = self.config.registry.introspector
auth_policy = introspector.get('authentication policy', None)
assert isinstance(auth_policy['policy'], AuthServicePolicy)
with pytest.raises(ValueError):
find_service_factory(self.config, IAuthSourceService)
def test_include_me_cookie_no_secret(self):
settings = {'authsanity.source': 'cookie'}
with pytest.raises(RuntimeError):
self._makeOne(settings)
def test_include_me_cookie_with_secret(self):
from pyramid_authsanity.policy import AuthServicePolicy
settings = {'authsanity.source': 'cookie', 'authsanity.secret': 'sekrit'}
self._makeOne(settings)
self.config.commit()
introspector = self.config.registry.introspector
auth_policy = introspector.get('authentication policy', None)
assert isinstance(auth_policy['policy'], AuthServicePolicy)
assert verifyClass(IAuthSourceService, find_service_factory(self.config, IAuthSourceService))
def test_include_me_session(self):
from pyramid_authsanity.policy import AuthServicePolicy
settings = {'authsanity.source': 'session'}
self._makeOne(settings)
self.config.commit()
introspector = self.config.registry.introspector
auth_policy = introspector.get('authentication policy', None)
assert isinstance(auth_policy['policy'], AuthServicePolicy)
assert verifyClass(IAuthSourceService, find_service_factory(self.config, IAuthSourceService))
def find_service_factory(
config,
iface=Interface,
):
context_iface = providedBy(None)
svc_types = (IServiceClassifier, context_iface)
adapters = config.registry.adapters
svc_factory = adapters.lookup(svc_types, iface, name='')
if svc_factory is None:
raise ValueError('could not find registered service')
return svc_factory
| [
"[email protected]"
] | |
0a0504b0bc50786ad6319cc72a59f6bd7ed5d613 | 8f7c595f2b9d075a89417760b7fbf9abb1fecb72 | /try_enricher.py | e94463b5038fad8d0620db03b893bab816739527 | [
"MIT"
] | permissive | MainakMaitra/trading-utils | 555ed240a20b26d4876f1490fc8a2d9273231fc5 | 3e73091b4d3432e74c385a9677b7f7ca4192c67f | refs/heads/main | 2023-07-04T09:19:40.122188 | 2021-08-08T09:01:37 | 2021-08-08T09:01:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | import json
import matplotlib.pyplot as plt
import pandas as pd
from common.analyst import fetch_data_from_cache
plt.ioff()
pd.set_option("display.max_columns", None)
pd.set_option("display.width", None)
ticker = "AAPL"
# weekly_options.set_index('Symbol', inplace=True)
# cboe_options = pd.read_csv(f"data/cboesymboldirequityindex.csv")
# print(has_options('AAPL'))
# data, ticker_df = fetch_data_on_demand(ticker)
data = fetch_data_from_cache(ticker, is_etf=False)
key_values = list([(k, data[k]) for k in data.keys() if "month_" in k])
for kv in key_values:
print(kv)
# weekly_ticker_candles = convert_to_weekly(df)
#
# for wp in [4, 8]:
# df[["max_weekly_{}".format(wp), "max_weekly_{}_at".format(wp)]] = max_weekly(
# weekly_ticker_candles, week_until=wp
# )
# print(df)
| [
"[email protected]"
] | |
0ab91b8fc7a8f722176caa772e62d86f3f98bab8 | ecbf6a7c04b068a4f5606bbab46b974e53bd28d8 | /src/replace_localparam.py | debb116037d08c9f96c454b86e2bebe117057dc9 | [
"MIT"
] | permissive | jamesjiang52/V2SV | 970357be757ba068111645fd6964e8672a72f69f | 4b6109d16482131785b9dfec13fd66452078ae17 | refs/heads/main | 2022-12-30T06:47:27.814305 | 2020-10-18T00:07:39 | 2020-10-18T00:07:39 | 300,928,285 | 1 | 2 | MIT | 2020-10-18T00:06:56 | 2020-10-03T16:41:38 | Python | UTF-8 | Python | false | false | 4,354 | py | def __remove_extra_declarations(module_string, replaced_wires, debug=False):
buffer = module_string[:module_string.index(");") + 2] + "\n"
body_string = module_string[module_string.index(");") + 2:]
statements = body_string.split(";")
# remove the previous declarations of any new enums
for statement in statements:
words = statement.split()
if not words:
continue
if words[0] in ["reg", "wire", "logic"]:
if ":" in words[1]:
# wire is an array
signals = statement[statement.index("]") + 1:].split()
else:
signals = words[1:]
signals = [signal[:-1] if signal[-1] == "," else signal for signal in signals]
signals_remaining = signals[:]
for signal in signals:
if signal in replaced_wires:
signals_remaining.remove(signal)
if signals_remaining == signals:
# none of these signals were changed to enums
buffer += "{};\n".format(" ".join(words))
elif signals_remaining == []:
# all signals are declared as new enums now, so don't write anything
if debug:
print("Removed:\n{}\n\n".format(" ".join(words)))
else:
new_statement = "logic " # might as well do this
if ":" in words[1]:
# wire is an array
new_statement += words[1] + " "
for signal in signals_remaining:
new_statement += signal + ", "
# remove trailing comma from last wire
if new_statement[-2] == ",":
new_statement = new_statement[:-2]
buffer += "{};\n".format(new_statement)
if debug:
print("Replaced:\n{}\nwith\n{}\n\n".format(" ".join(words), new_statement))
else:
# don't care
buffer += "{};\n".format(" ".join(words))
# remove trailing semicolon from endmodule
if buffer[-2] == ";":
buffer = buffer[:-2] + "\n"
return buffer
def replace_localparam(module_string, debug=False):
buffer = module_string[:module_string.index(");") + 2] + "\n"
body_string = module_string[module_string.index(");") + 2:]
statements = body_string.split(";")
replaced_wires = []
for statement in statements:
words = statement.split()
if not words:
continue
if words[0] == "localparam":
new_statement = "enum int unsigned {\n"
params = []
pair_strings = "".join(words[1:]).split(",")
# get all localparam names
for pair_string in pair_strings:
param = pair_string.split("=")[0]
new_statement += param + ",\n"
params.append(param)
# remove trailing comma from last param
if new_statement[-2] == ",":
new_statement = new_statement[:-2] + "\n} "
# need to search for wires that are being assigned to these localparams,
# and declare these as the new enums
for statement_i in statements:
if "=" in statement_i or "<=" in statement_i:
statement_i = statement_i.replace("<=", "=")
words_i = statement_i.split()
if words_i[-1] in params:
wire = statement_i[:statement_i.index("=")].split()[-1]
if wire not in replaced_wires:
new_statement += wire + ", "
replaced_wires.append(wire)
else:
# don't care
pass
# remove trailing comma from last wire
if new_statement[-2] == ",":
new_statement = new_statement[:-2]
buffer += "{};\n".format(new_statement)
if debug:
print("Replaced:\n{}\nwith\n{}\n\n".format(" ".join(words), new_statement))
else:
# don't care at all about anything else
buffer += "{};\n".format(" ".join(words))
buffer = __remove_extra_declarations(buffer, replaced_wires, debug=debug)
return buffer
| [
"[email protected]"
] | |
d63408c2d9adafeadf3ac5e64efccfc40b438cae | 025fa245d4cbffdaa422287ed2f31c4d0442ee28 | /menus/models.py | 27649dd15508def3b9d933b9bfa95ba0bc8eb771 | [
"MIT"
] | permissive | elcolie/zero-to-deploy | 01f346ca50b8ccb271faef23934abe6a487baca6 | 6191a33ef55af7c550c0e529a4e373bfe40bc014 | refs/heads/master | 2022-02-08T23:22:17.008555 | 2018-06-15T19:39:06 | 2018-06-15T19:39:06 | 137,083,690 | 0 | 0 | MIT | 2022-01-21T19:35:33 | 2018-06-12T14:28:01 | Python | UTF-8 | Python | false | false | 653 | py | from django.db import models
from djchoices import DjangoChoices, ChoiceItem
from commons.models import AbstractTimestamp
class Menu(AbstractTimestamp):
class BackType(DjangoChoices):
food = ChoiceItem(f"Food")
drink = ChoiceItem(f"Drink")
menu_type = models.CharField(max_length=15, choices=BackType.choices, default=BackType.food)
name = models.CharField(max_length=20)
image = models.ImageField(default='sr.png', upload_to='menus')
take_home = models.BooleanField(default=False)
price = models.DecimalField(max_digits=6, decimal_places=2)
def __str__(self):
return f"{self.name} {self.price}"
| [
"[email protected]"
] | |
100854a6d18277d4c0dd905456dcf69e64b42394 | e96e9990ba26757b834eeff95e8bee9b720b72aa | /django/test007/blog/models.py | fa736c53142137dcf246805e9ccfa1fbf92b1a0a | [] | no_license | cuiyanan89/Python | 475a0a2778d7be5b9f6aa87ba35c21569080e056 | f742684474730e3b032aabd0151d584167c3ed02 | refs/heads/master | 2016-09-06T05:31:48.956411 | 2013-09-06T08:13:09 | 2013-09-06T08:13:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | from django.db import models
# Create your models here.
class New(models.Model):
new_title = models.CharField(max_length=30)
new_content = models.TextField(max_length=3000)
new_img = models.FileField(upload_to="./images")
def __unicode__(self):
return self.new_title
| [
"root@yanan-Rev-1-0.(none)"
] | root@yanan-Rev-1-0.(none) |
828540d2bb15e92786f7d4e9d29d60f51087bb38 | 908cf8e6ef52033bbf3d5afbb29637a25f5d66f8 | /test/test_codat_data_contracts_datasets_journal_entry_paged_response_model.py | 2fed4e4b1c7bd6ead4aef71f66240ef4f130e40b | [] | no_license | procurify/codat-python-sdk | 074769a2d9e72640741689b6f51e880d35b88095 | 3c8f664998427bda32bad8062c3bf324f39506da | refs/heads/master | 2023-08-25T03:55:19.817085 | 2021-10-22T22:14:34 | 2021-10-22T22:14:34 | 395,381,471 | 1 | 0 | null | 2021-10-20T21:10:31 | 2021-08-12T16:31:03 | Python | UTF-8 | Python | false | false | 1,570 | py | """
Codat API
[What's changed in our Swagger](https://docs.codat.io/docs/new-swagger-ui) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import codat_python_sdk
from codat_python_sdk.model.codat_data_contracts_datasets_journal_entry import CodatDataContractsDatasetsJournalEntry
from codat_python_sdk.model.codat_data_contracts_datasets_journal_entry_paged_response_links_model import CodatDataContractsDatasetsJournalEntryPagedResponseLinksModel
globals()['CodatDataContractsDatasetsJournalEntry'] = CodatDataContractsDatasetsJournalEntry
globals()['CodatDataContractsDatasetsJournalEntryPagedResponseLinksModel'] = CodatDataContractsDatasetsJournalEntryPagedResponseLinksModel
from codat_python_sdk.model.codat_data_contracts_datasets_journal_entry_paged_response_model import CodatDataContractsDatasetsJournalEntryPagedResponseModel
class TestCodatDataContractsDatasetsJournalEntryPagedResponseModel(unittest.TestCase):
"""CodatDataContractsDatasetsJournalEntryPagedResponseModel unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testCodatDataContractsDatasetsJournalEntryPagedResponseModel(self):
"""Test CodatDataContractsDatasetsJournalEntryPagedResponseModel"""
# FIXME: construct object with mandatory attributes with example values
# model = CodatDataContractsDatasetsJournalEntryPagedResponseModel() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
1bcbcbfe92659458a764c39a0f71f668340971fc | 2b0eab74af8d23244ff11699830f9bb10fbd717a | /accounts/perms.py | bd00bb6b63018efa7cc39d7709ce8ee5829b7d04 | [] | no_license | alexandrenorman/mixeur | c7e25cd20b03c78b361cb40e3e359a6dc5d9b06b | 95d21cd6036a99c5f399b700a5426e9e2e17e878 | refs/heads/main | 2023-03-13T23:50:11.800627 | 2021-03-07T15:49:15 | 2021-03-07T15:49:15 | 345,384,858 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,451 | py | # -*- coding: utf-8 -*-
from simple_perms import PermissionLogic, register
from helpers.mixins import BasicPermissionLogicMixin
class UserPermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, user_to_view, *args):
if user_to_view == user:
return True
if user.is_client or user.is_professional:
return False
if user.is_administrator or user.is_advisor or user.is_manager:
return True
return self.admin_permission(user, user_to_view, *args)
def change(self, user, user_to_modify, *args):
if user_to_modify == user:
return True
if user.is_client or user.is_professional:
return False
if user.is_administrator:
return True
# Allow same group modifications
if user_to_modify.group is not None and user_to_modify.group.is_member(user):
if user.is_advisor and user_to_modify.is_advisor:
return True
if user.is_manager and (
user_to_modify.is_advisor or user_to_modify.is_manager
):
return True
if (user.is_advisor or user.is_manager) and user_to_modify.is_client:
return True
if (
user.is_manager
and user_to_modify.is_advisor
and user_to_modify.group.admin_group == user.group
and user.group.is_admin
):
return True
if (
user.is_manager
and user_to_modify.is_manager
and user_to_modify.group == user.group
):
return True
return self.admin_permission(user, user_to_modify, *args)
def change_user_type(self, user, *args):
"""
Perm for user to change user_type for user_modified
Parameters
----------
user : User
args : Dict(user_modified, to_user_type)
"""
user_modified = args[0]["user_modified"]
to_user_type = args[0]["to_user_type"]
if user.is_client or user.is_professional:
return False
if user_modified.is_client or user_modified.is_professional:
return False
if to_user_type == "client" or to_user_type == "professional":
return False
if user.is_administrator:
return True
if user.is_manager:
if (
user_modified.is_advisor
or user_modified.is_superadvisor
or user_modified.is_manager
and user_modified.group.is_member(user)
):
if to_user_type in ["advisor", "superadvisor", "manager"]:
return True
if (
user.is_superadvisor
and to_user_type in ["advisor", "superadvisor"]
and user_modified.is_advisor
):
return True
return self.admin_permission(user, user_modified, *args)
register("user", UserPermissionLogic)
register("accounts/user", UserPermissionLogic)
class RgpdConsentPermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, rgpdconsent, *args):
if rgpdconsent.user == user:
return True
return self.admin_permission(user, rgpdconsent, *args)
change = view
register("rgpdconsent", RgpdConsentPermissionLogic)
register("accounts/rgpdconsent", RgpdConsentPermissionLogic)
class GroupPermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, group, *args):
if user.is_anonymous:
return False
if user.is_administrator:
return True
if user.is_advisor or user.is_manager:
return True
return self.admin_permission(user, group, *args)
def create(self, user, group, group_data, *args):
if user.is_anonymous:
return False
if user.is_administrator:
return True
if user.is_manager:
if not group_data:
return False
if user.group is not None:
if group is not None:
if group.admin_group.pk == user.group.pk:
return True
return self.admin_permission(user, None, *args)
def change(self, user, group, *args):
if user.is_anonymous:
return False
if user.is_administrator:
return True
if (
user.is_manager
and user.group is not None
and group.admin_group == user.group
):
return True
return self.admin_permission(user, group, *args)
def partial_change(self, user, group, *args):
"""
change only some fiels on group
"""
if user.is_advisor and user.group is not None and group == user.group:
return True
return self.admin_permission(user, group, *args)
register("group", GroupPermissionLogic)
register("accounts/group", GroupPermissionLogic)
class GroupPlacePermissionLogic(BasicPermissionLogicMixin, PermissionLogic):
def view(self, user, group, *args):
if user.is_anonymous:
return False
if user.is_expert:
return True
return self.admin_permission(user, group, *args)
register("group_place", GroupPlacePermissionLogic)
register("accounts/group_place", GroupPlacePermissionLogic)
| [
"[email protected]"
] | |
a65604c0deab61126203e90bd4b92e397c7b27c7 | 73c9537b3e2dd9c57e581d474b9e2daf7a8fb02a | /petccenv/lib/python3.4/site-packages/django_summernote/__init__.py | c63655d2b511e883b3efacf6fa71393ea01972ab | [] | no_license | pviniciusm/petcc | 8f6ec2966729051f11b482c4c7ed522df3f920ba | 30ccddce6d0e39ccea492ac73b2ddca855c63cee | refs/heads/master | 2021-01-21T13:29:52.835434 | 2016-04-23T18:06:07 | 2016-04-23T18:06:07 | 54,607,007 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | version_info = (0, 8, 2)
__version__ = version = '.'.join(map(str, version_info))
__project__ = PROJECT = 'django-summernote'
__author__ = AUTHOR = "Park Hyunwoo <[email protected]>"
default_app_config = 'django_summernote.apps.DjangoSummernoteConfig'
| [
"[email protected]"
] | |
f179eade30c3bd9c2fd92c1dcafbdf2683622c47 | 635cb7fb75048f9de7b95b48d1f59de68f9b3368 | /R01/sortowanie_obiektów_bez_wbudowanej_obsługi_porównań/example.py | 7641bed7b8787e11f23a4ef78d74ba00e90b1ae8 | [] | no_license | anpadoma/python_receptury3 | 9e889ac503e48eb62160050eecfdc4a64072c184 | c761f2c36707785a8a70bdaccebd7533c76dee21 | refs/heads/master | 2021-01-22T14:38:34.718999 | 2014-01-31T22:09:44 | 2014-01-31T22:09:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | from operator import attrgetter
class User:
def __init__(self, user_id):
self.user_id = user_id
def __repr__(self):
return 'User({})'.format(self.user_id)
# Przykład
users = [User(23), User(3), User(99)]
print(users)
# Sortowanie według pola user-id
print(sorted(users, key=attrgetter('user_id')))
| [
"[email protected]"
] | |
b539a324c93a3ce5b5b5feedc5d1287601d63ffd | 0b4957de738dd05f964ea838016b4b811feca970 | /tests/utils/test_utils_shell.py | fdae13b81ae7f8e06716a3e3f09b9ce5f7a76e6a | [
"MIT",
"Apache-2.0"
] | permissive | bossjones/ultron8 | bdb5db72ba58b80645ae417cdf97287cfadd325d | 09d69c788110becadb9bfaa7b3d2a2046f6b5a1c | refs/heads/master | 2023-01-13T06:52:45.679582 | 2023-01-03T22:25:54 | 2023-01-03T22:25:54 | 187,934,920 | 0 | 0 | Apache-2.0 | 2023-01-03T22:25:56 | 2019-05-22T00:44:03 | Python | UTF-8 | Python | false | false | 1,008 | py | """Test shell utils"""
# pylint: disable=protected-access
import logging
import pytest
from six.moves import zip
from ultron8.utils.shell import quote_unix
logger = logging.getLogger(__name__)
@pytest.mark.utilsonly
@pytest.mark.unittest
class TestShellUtilsTestCase:
def test_quote_unix(self):
arguments = ["foo", "foo bar", "foo1 bar1", '"foo"', '"foo" "bar"', "'foo bar'"]
expected_values = [
"""
foo
""",
"""
'foo bar'
""",
"""
'foo1 bar1'
""",
"""
'"foo"'
""",
"""
'"foo" "bar"'
""",
"""
''"'"'foo bar'"'"''
""",
]
for argument, expected_value in zip(arguments, expected_values):
actual_value = quote_unix(value=argument)
expected_value = expected_value.lstrip()
assert actual_value == expected_value.strip()
| [
"[email protected]"
] | |
8f98de03e4669f0cea77fa4b917683db4d9be640 | 1f256bf20e68770c1a74f7e41ef6730623db0c74 | /location_management/migrations/0001_initial.py | 4015d233eb0c002e111dfd9acab22eacef6e3268 | [
"MIT"
] | permissive | davtoh/enterprise-website | 380ea32b730f16b7157e59ca0dc1e86d1f10e4a8 | 00b6c42cd6cb01517c152b9ffce9cfb56744703d | refs/heads/master | 2021-06-05T09:20:27.721789 | 2021-05-05T04:58:04 | 2021-05-05T05:14:05 | 141,315,681 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,933 | py | # Generated by Django 2.0.6 on 2018-07-05 23:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cities',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('latitude', models.DecimalField(decimal_places=8, max_digits=10)),
('longitude', models.DecimalField(decimal_places=8, max_digits=11)),
],
),
migrations.CreateModel(
name='Countries',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('code', models.CharField(max_length=10)),
],
),
migrations.CreateModel(
name='States',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('code', models.CharField(max_length=10)),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='location_management.Countries')),
],
),
migrations.AddField(
model_name='cities',
name='country',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='location_management.Countries'),
),
migrations.AddField(
model_name='cities',
name='state',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='location_management.States'),
),
]
| [
"[email protected]"
] | |
a811597869c088ec4c17da0719f6b9a3e9e8a9b8 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_46/83.py | 728c1c577aee018ba646a8511a4f62a6e9af6751 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,459 | py | import psyco
psyco.full()
class memoize:
def __init__(self, function):
self.function = function
self.memoized = {}
def __call__(self, *args):
if args not in self.memoized:
self.memoized[args] = self.function(*args)
return self.memoized[args]
def clear(self):
self.memoized = {}
def alloc(size, default = 0): return [default] * size
def alloc2(r, c, default = 0): return [alloc(c, default)] * r
def isset(a, bit): return ((a >> bit) & 1) > 0
def dig(c): return ord(c) - 48
def abs(x):
if x<0: return -x;
return x
def area(x1, y1, x2, y2, x3, y3):
return abs((x3-x1)*(y2-y1) - (x2-x1)*(y3-y1))/2
def bisection(f, lo, hi):
"""
finds the integer x where f(x)=0.
assumes f is monotounous.
"""
while lo < hi:
mid = (lo+hi)//2
midval = f(mid)
if midval < 0:
lo = mid+1
elif midval > 0:
hi = mid
else:
return mid
return None
def minarg(f, args):
min_val = None
min_arg = None
for a in args:
temp=f(a)
if min_arg==None or temp<min_val:
min_val=temp
min_arg=a
return min_arg, min_val
#mat[i] = lowest row for the row currently at position i
def solve():
c=0
for i in range(N):
#print mat, c
#print "i=", i
if mat[i]>i:
for j in range(i+1, N):
if mat[j]<=i:
#print "replace", i, " with ", j
mat.insert(i, mat[j])
#print mat
del mat[j+1]
#mat[j]=None
c+=j-i
break
return c
from time import time
if __name__ == "__main__":
def getInts(): return map(int, input.readline().rstrip('\n').split(' '))
def getFloats(): return map(float, input.readline().rstrip('\n').split(' '))
def getMatrix(rows): return [getInts() for _ in range(rows)]
input, output = open("d:/gcj/in", "r"), open('d:/gcj/output', 'w')
start_time=time()
for case in range(1, int(input.readline()) + 1):
N, = getInts()
mat=[[int(d) for d in input.readline().rstrip('\n')] for _ in range(N)]
for i in range(N):
j=N-1
while j>0 and mat[i][j]==0:
j-=1
mat[i]=j
s="Case #%d: %d\n" % (case, solve())
print s
output.write(s)
print time()-start_time
| [
"[email protected]"
] | |
677fb51759db8a07210bb76240c9cbab445670b8 | edcd74f8f65119bdbe737360c2ca33b4a6da160a | /python/problem-string/two_characters.py | 10b3ac19c02ca478f6a224f3f683e11fe2efc679 | [] | no_license | hyunjun/practice | 72e83de6a1d5e04ddcd16526f16110ea2dd00373 | 5376dd48b1cefb4faba9d2ef6a8a497b6b1d6c67 | refs/heads/master | 2023-08-31T07:00:37.320351 | 2023-08-17T07:29:24 | 2023-08-17T07:29:24 | 2,704,126 | 3 | 2 | null | 2022-12-14T20:25:07 | 2011-11-03T18:28:44 | Python | UTF-8 | Python | false | false | 1,698 | py | # https://www.hackerrank.com/challenges/two-characters
from collections import Counter
from collections import defaultdict
def alternate(s):
if s is None or 0 == len(s):
return 0
consecutiveSet = set()
for i, c in enumerate(s):
if 0 == i:
continue
if s[i - 1] == c:
consecutiveSet.add(c)
#print(consecutiveSet)
def isAlternating(cand):
for i, c in enumerate(cand):
if 0 == i:
continue
if cand[i - 1] == c:
return False
return True
cntDict = Counter([c for c in s if c not in consecutiveSet])
cntCharDict = defaultdict(list)
for c, cnt in cntDict.items():
cntCharDict[cnt].append(c)
sortedCntCharList = sorted(cntCharDict.items(), key=lambda t: t[0], reverse=True)
#print(sortedCntCharList)
for i, (cnt1, charList1) in enumerate(sortedCntCharList):
for j, (cnt2, charList2) in enumerate(sortedCntCharList):
if j < i or 1 < abs(cnt1 - cnt2):
continue
for ch1 in charList1:
for ch2 in charList2:
if ch1 == ch2:
continue
cand = [c for c in s if c == ch1 or c == ch2]
#print(cand)
if isAlternating(cand):
return len(cand)
return 0
data = [('abaacdabd', 4),
('beabeefeab', 5),
('asdcbsdcagfsdbgdfanfghbsfdab', 8),
('asvkugfiugsalddlasguifgukvsa', 0),
]
for s, expected in data:
real = alternate(s)
print('{}, expected {}, real {}, result {}'.format(s, expected, real, expected == real))
| [
"[email protected]"
] | |
3f7a3592ecb43458823f4a89ef52c6dcfbfef71c | 70d4ef0863906b3ca64f986075cd35b8412b871e | /blueapps/account/sites/default.py | e996ac9936aeb25beb19699d619290f60b693d5c | [
"MIT",
"BSD-3-Clause",
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | selinagyan/bk-sops | 72db0ac33d9c307f51769e4baa181ceb8e1b279e | 39e63e66416f688e6a3641ea8e975d414ece6b04 | refs/heads/master | 2020-05-07T16:44:33.312442 | 2019-04-11T02:09:25 | 2019-04-11T02:09:25 | 180,696,241 | 0 | 0 | null | 2019-04-11T02:07:11 | 2019-04-11T02:07:10 | null | UTF-8 | Python | false | false | 2,955 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
""" # noqa
class ConfFixture(object):
"""
登录模块项目变量汇总
"""
#################
# 浏览器参数说明 #
#################
# 登录模块,可选项为 components 目录下的模块,如 qcloud_tlogin
BACKEND_TYPE = None
# 用户验证 Backend qcloud_tlogin.backends.QPtloginBackend
USER_BACKEND = None
# 用户登录验证中间件 qcloud_tlogin.middlewares.LoginRequiredMiddleware
LOGIN_REQUIRED_MIDDLEWARE = None
# 用户模型 qcloud_tlogin.models.UserProxy
USER_MODEL = None
# 登录平台弹窗链接 http://xxxx.com/accounts/login_page/
CONSOLE_LOGIN_URL = None
# 登录平台链接 http://login.o.qcloud.com
LOGIN_URL = None
# 内嵌式的登录平台链接(可嵌入弹框、IFrame)http://xxx.com/plain/
LOGIN_PLAIN_URL = None
# 是否提供内嵌式的统一登录页面
HAS_PLAIN = True
# 跳转至登录平台是否加跨域前缀标识
# http://xxx.com/login/?c_url={CROSS_PREFIX}http%3A//xxx.com%3A8000/
ADD_CROSS_PREFIX = True
CROSS_PREFIX = ''
# 跳转至登录平台是否加上APP_CODE
# http://xxx.com/login/?c_url=http%3A//xxx.com%3A8000/&app_code=xxx
ADD_APP_CODE = True
# http://xxx.com/login/?c_url=http%3A//xxx.com%3A8000/&{APP_KEY}=xxx
APP_KEY = 'app_code'
SETTINGS_APP_KEY = 'APP_CODE'
# 跳转至登录平台,回调参数名称
# http://xxx.com/login/?{C_URL}=http%3A//xxx.com%3A8000/
C_URL = 'c_url'
# 内嵌式的登录平台的尺寸大小,决定前端适配的弹框大小
IFRAME_HEIGHT = 490
IFRAME_WIDTH = 460
###############
# 微信参数说明 #
###############
# 登录模块 weixin
WEIXIN_BACKEND_TYPE = None
# 用户认证中间件 bk_ticket.middlewares.LoginRequiredMiddleware
WEIXIN_MIDDLEWARE = None
# 用户认证 Backend bk_ticket.backends.TicketBackend
WEIXIN_BACKEND = None
# 用户信息链接 http://xxx.com/user/weixin/get_user_info/
WEIXIN_INFO_URL = None
# 用户 OAUTH 认证链接 https://xxx.com/connect/oauth2/authorize
WEIXIN_OAUTH_URL = None
# 在微信端的应用ID 'xxxx'
WEIXIN_APP_ID = None
| [
"[email protected]"
] | |
1b1d43ac638223550a5a9f28cb4d5f216a837cbf | 1fac53ab13a9a682ecd926857ef565fa779afae4 | /fbseries.py | 44da677508725917468869fb71285e9ed733a195 | [] | no_license | Shamabanu/python-1 | 339123ff4e7667d6331c207cb1c7ca3fc775dc48 | 4c1642679bb0bdd53a1d21e5421e04eb7abda65b | refs/heads/master | 2020-04-13T23:49:27.700807 | 2018-12-29T15:10:26 | 2018-12-29T15:10:26 | 163,516,492 | 1 | 0 | null | 2018-12-29T14:16:28 | 2018-12-29T14:16:28 | null | UTF-8 | Python | false | false | 219 | py | def fibonacci(n):
if(n <= 1):
return n
else:
return(fibonacci(n-1) + fibonacci(n-2))
n = int(input("Enter no of terms:"))
print("Fibonacci sequence:")
for i in range(n):
print (fibonacci(i))
| [
"[email protected]"
] | |
52389b5b2bff83aa9b999bd20397ad5a96cf1b26 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_145/601.py | 1c4900414caa5c3d523730cdea08f4e249066ea5 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | #!/usr/bin/env python3
from fractions import gcd
from math import log
rounds = int(input())
for i in range(rounds):
n, d = input().split('/')
n = int(n)
d = int(d)
g = gcd(n,d)
n = n//g
d = d//g
if log(d,2) != round( log(d,2)):
print("Case #{}: impossible".format(i+1))
continue;
while n!=1 :
n -= 1
g = gcd(n,d)
n = n // g
d = d // g
print("Case #{}: {}".format(i+1,int(log(d,2))))
| [
"[email protected]"
] | |
93856c78a47412b99de857cb1abbf8b25758ad79 | f8bbdfb112618136fc4adccb03ce25fbfc48bff5 | /panel/config/admin/management_data/CustomPages/Member.py | 16842cd9719bcaac1229acc9f6e270cb55f48b24 | [] | no_license | lazypanda10117/CICSA-Ranking-Platform | 160973987b533ede6e0b94af29b5bc85646b2bc0 | d5f6ac64a1f85c3333c71a7d81749b49145b9a16 | refs/heads/master | 2022-12-09T23:21:28.649252 | 2020-04-28T22:53:07 | 2020-04-28T22:53:07 | 133,093,367 | 3 | 2 | null | 2021-09-22T17:51:39 | 2018-05-11T22:14:01 | Python | UTF-8 | Python | false | false | 3,351 | py | from cicsa_ranking.models import Member
from .AbstractCustomClass import AbstractCustomClass
from panel.component.CustomElements import Choices
from misc.CustomFunctions import MiscFunctions, RequestFunctions, LogFunctions
class MemberView(AbstractCustomClass):
def __init__(self, request):
self.base_class = Member
self.validation_table = {
'base_table_invalid': {'_state'},
'base_form_invalid': {'_state', 'id'},
}
super().__init__(request, self.base_class, self.validation_table)
# View Process Functions
def abstractFormProcess(self, action, **kwargs):
try:
post_dict = dict(self.request.POST)
dispatcher = super().populateDispatcher()
if dispatcher.get(action):
member_id = kwargs.pop('id', None)
member = self.useAPI(self.base_class).editSelf(id=member_id)
else:
member = self.base_class()
member.member_name = RequestFunctions.getSingleRequestObj(post_dict, 'member_name')
member.member_school = RequestFunctions.getSingleRequestObj(post_dict, 'member_school')
member.member_email = RequestFunctions.getSingleRequestObj(post_dict, 'member_email')
member.member_status = RequestFunctions.getSingleRequestObj(post_dict, 'member_status')
if not action == 'delete':
member.save()
LogFunctions.generateLog(
self.request, 'admin', LogFunctions.makeLogQuery(self.base_class, action.title(), id=member.id))
if action == 'delete':
member.delete()
except Exception:
print({"Error": "Cannot Process " + action.title() + " Request."})
# View Generating Functions
# Form Generating Functions
def getFieldData(self, **kwargs):
action = kwargs.pop('action')
element_id = kwargs.pop('element_id')
field_data_dispatcher = self.populateDispatcher()
if field_data_dispatcher.get(action):
field_data = MiscFunctions.filterDict(self.useAPI(self.base_class).getSelf(id=element_id).__dict__.items(),
self.validation_table['base_form_invalid'])
return field_data
return None
def getChoiceData(self):
choice_data = dict()
choice_data["member_status"] = Choices().getStatusChoices()
choice_data["member_school"] = Choices().getSchoolChoices()
return choice_data
def getDBMap(self, data):
return None
def getMultiChoiceData(self):
return None
def getSearchElement(self, **kwargs):
return None
# Table Generating Functions
def getTableSpecificHeader(self):
return [field.name for field in self.base_class._meta.get_fields()
if field.name not in self.validation_table['base_table_invalid']]
def getTableRowContent(self, content):
field_data = MiscFunctions.filterDict(self.useAPI(self.base_class).getSelf(id=content.id).__dict__.items(),
self.validation_table['base_table_invalid'])
field_data = self.updateChoiceAsValue(field_data, self.getChoiceData())
field_data = MiscFunctions.grabValueAsList(field_data)
return field_data
| [
"[email protected]"
] | |
a06db2c071875ff44793b4fa25d314d8e7a501c1 | 0178c69ef9fc5e49cadeaadddb4839eeff3f4a2a | /examples/sac.py | edb4bb7454feec8eb93576ef06326455a559076a | [] | no_license | YangHaha11514/rlkit | 3b17de2b4861e12b8c13c849410b7fab335157df | 8c2ee5d1602423e352724a0b0845c646688f98df | refs/heads/master | 2020-03-14T06:22:53.568011 | 2018-03-11T01:31:38 | 2018-03-11T01:31:38 | 131,482,724 | 1 | 0 | null | 2018-04-29T09:46:53 | 2018-04-29T09:46:53 | null | UTF-8 | Python | false | false | 1,813 | py | """
Run PyTorch Soft Actor Critic on HalfCheetahEnv.
NOTE: You need PyTorch 0.3 or more (to have torch.distributions)
"""
import gym
import numpy as np
import rlkit.torch.pytorch_util as ptu
from rlkit.envs.wrappers import NormalizedBoxEnv
from rlkit.launchers.launcher_util import setup_logger
from rlkit.torch.sac.policies import TanhGaussianPolicy
from rlkit.torch.sac.sac import SoftActorCritic
from rlkit.torch.networks import FlattenMlp
def experiment(variant):
env = NormalizedBoxEnv(gym.make('HalfCheetah-v1'))
obs_dim = int(np.prod(env.observation_space.shape))
action_dim = int(np.prod(env.action_space.shape))
net_size = variant['net_size']
qf = FlattenMlp(
hidden_sizes=[net_size, net_size],
input_size=obs_dim + action_dim,
output_size=1,
)
vf = FlattenMlp(
hidden_sizes=[net_size, net_size],
input_size=obs_dim,
output_size=1,
)
policy = TanhGaussianPolicy(
hidden_sizes=[net_size, net_size],
obs_dim=obs_dim,
action_dim=action_dim,
)
algorithm = SoftActorCritic(
env=env,
policy=policy,
qf=qf,
vf=vf,
**variant['algo_params']
)
if ptu.gpu_enabled():
algorithm.cuda()
algorithm.train()
if __name__ == "__main__":
# noinspection PyTypeChecker
variant = dict(
algo_params=dict(
num_epochs=1000,
num_steps_per_epoch=1000,
num_steps_per_eval=1000,
batch_size=128,
max_path_length=999,
discount=0.99,
soft_target_tau=0.001,
policy_lr=3E-4,
qf_lr=3E-4,
vf_lr=3E-4,
),
net_size=300,
)
setup_logger('name-of-experiment', variant=variant)
experiment(variant)
| [
"[email protected]"
] | |
05352a15e8fe5729ce8218b174d55903f616d532 | 0f812d8a0a3743a9ff9df414e096a7f9830b0397 | /old/demo/onelinkmanipulator_demo_PID.py | 56c7d8e95f6edfbf0742cc0e0823707431e2d674 | [
"MIT"
] | permissive | pierrecaillouette/AlexRobotics | 18977eec79875b7fc8c84d11f1c680be93b43fcb | 2223100df3e141d88491dde3d60a4eadd07a5c72 | refs/heads/master | 2021-04-09T03:18:58.858708 | 2019-04-28T15:30:26 | 2019-04-28T15:30:26 | 248,833,850 | 0 | 0 | MIT | 2020-03-20T19:14:52 | 2020-03-20T19:14:52 | null | UTF-8 | Python | false | false | 2,985 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 6 15:27:04 2016
@author: alex
"""
import numpy as np
###########################
# Load libs
###########################
from AlexRobotics.dynamic import Manipulator
from AlexRobotics.control import linear
from AlexRobotics.control import ComputedTorque
from AlexRobotics.planning import RandomTree
from AlexRobotics.control import DPO
###########################
# Objectives
###########################
x_start = np.array([-3.0, 0.0])
x_goal = np.array([ 0.0, 0.0])
###########################
# Create objects
###########################
Robot = Manipulator.OneLinkManipulator()
PD = linear.PD( kp = 5 , kd = 2 )
PID = linear.PID( kp = 5 , kd = 2 , ki = 4 )
CTC = ComputedTorque.ComputedTorqueController( Robot )
SLD = ComputedTorque.SlidingModeController( Robot )
RRT = RandomTree.RRT( Robot , x_start )
VI = DPO.ValueIteration1DOF( Robot , 'quadratic' )
############################
# Params
############################
tmax = 8 # max motor torque
Robot.u_ub = np.array([ tmax]) # Control Upper Bounds
Robot.u_lb = np.array([-tmax]) # Control Lower Bounds
RRT.x_start = x_start
RRT.discretizeactions( 3 )
RRT.dt = 0.1
RRT.goal_radius = 0.3
RRT.max_nodes = 5000
RRT.max_solution_time = 5
RRT.dyna_plot = True
RRT.dyna_node_no_update = 10
RRT.traj_ctl_kp = 25
RRT.traj_ctl_kd = 10
PID.dt = 0.001
CTC.w0 = 2
SLD.lam = 1
SLD.nab = 0
SLD.D = 5
###########################
# Offline Plannning
###########################
#RRT.find_path_to_goal( x_goal )
#RRT.plot_2D_Tree()
###########################
# Offline Optimization
###########################
#VI.first_step()
#VI.load_data( 'data/' + 'R1' + 'quadratic' )
#VI.compute_steps(1)
#
## Plot Value Iteration Results
#ValueIterationAlgo.plot_raw()
#ValueIterationAlgo.plot_J_nice( 2 )
###########################
# Assign controller
###########################
#Robot.ctl = PD.ctl
Robot.ctl = PID.ctl
#Robot.ctl = CTC.ctl
#Robot.ctl = SLD.ctl
#Robot.ctl = RRT.trajectory_controller
#VI.assign_interpol_controller()
###########################
# Simulation
###########################
Robot.plotAnimation( x_start , tf=10, n=10001, solver='euler' )
###########################
# Plots
###########################
Robot.Sim.phase_plane_trajectory()
#Robot.Sim.phase_plane_trajectory( PP_OL = False , PP_CL = True )
Robot.Sim.plot_CL()
###########################
# and more
###########################
#from AlexRobotics.dynamic import CustomManipulator
#BoeingArm = CustomManipulator.BoeingArm()
#BoeingArm.plot3DAnimation( x0 = np.array([0.2,0,0,0,0,0]) )
# Hold script in console
import matplotlib.pyplot as plt
plt.show() | [
"[email protected]"
] | |
3f38851402838e78a9602b3e882605fb1e2d4f86 | 14f4d045750f7cf45252838d625b2a761d5dee38 | /argo/test/test_io_k8s_kube_aggregator_pkg_apis_apiregistration_v1beta1_api_service_condition.py | 01d2de718c08b57e04b58fbd20a8e3d5c8c0eb44 | [] | no_license | nfillot/argo_client | cf8d7413d728edb4623de403e03d119fe3699ee9 | c8cf80842f9eebbf4569f3d67b9d8eff4ba405fa | refs/heads/master | 2020-07-11T13:06:35.518331 | 2019-08-26T20:54:07 | 2019-08-26T20:54:07 | 204,546,868 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,330 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.14.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import argo
from models.io_k8s_kube_aggregator_pkg_apis_apiregistration_v1beta1_api_service_condition import IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition # noqa: E501
from argo.rest import ApiException
class TestIoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition(unittest.TestCase):
"""IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition(self):
"""Test IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition"""
# FIXME: construct object with mandatory attributes with example values
# model = argo.models.io_k8s_kube_aggregator_pkg_apis_apiregistration_v1beta1_api_service_condition.IoK8sKubeAggregatorPkgApisApiregistrationV1beta1APIServiceCondition() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
8fe298aaf5cf8b93c96ab107fbe0f5771e3f5e25 | b775940595617a13289ee7006cf837f8f3a34480 | /examples/ppk_plot.py | 24497e1d56f97c75755d7197f2dbe75215961c3c | [] | no_license | Nathan-Walk/manufacturing | 5d2f58c2be45c9ccb2263bd750b6c18809fe76d4 | 2a22457ff9ef695da649a1e11d0cf7cb8ddde348 | refs/heads/master | 2023-03-08T19:48:15.613729 | 2021-02-26T01:05:46 | 2021-02-26T01:05:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | import logging
import matplotlib.pyplot as plt
from manufacturing import import_excel, ppk_plot
logging.basicConfig(level=logging.INFO)
data = import_excel('data/example_data_with_faults.xlsx', columnname='value (lcl=-7.4 ucl=7.4)', skiprows=3)
ppk_plot(**data)
plt.show()
| [
"[email protected]"
] | |
37857bc4bb9559c9e3f68635744baf75a7cc8762 | c086a38a366b0724d7339ae94d6bfb489413d2f4 | /PythonEnv/Lib/site-packages/docutils/utils/urischemes.py | 01335601af86e67266b95a75aa5f0935ea92bcf5 | [] | no_license | FlowkoHinti/Dionysos | 2dc06651a4fc9b4c8c90d264b2f820f34d736650 | d9f8fbf3bb0713527dc33383a7f3e135b2041638 | refs/heads/master | 2021-03-02T01:14:18.622703 | 2020-06-09T08:28:44 | 2020-06-09T08:28:44 | 245,826,041 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,028 | py | # $Id: urischemes.py 8376 2019-08-27 19:49:29Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
`schemes` is a dictionary with lowercase URI addressing schemes as
keys and descriptions as values. It was compiled from the index at
http://www.iana.org/assignments/uri-schemes (revised 2005-11-28)
and an older list at http://www.w3.org/Addressing/schemes.html.
"""
# Many values are blank and should be filled in with useful descriptions.
schemes = {
'about': 'provides information on Navigator',
'acap': 'Application Configuration Access Protocol; RFC 2244',
'addbook': "To add vCard entries to Communicator's Address Book",
'afp': 'Apple Filing Protocol',
'afs': 'Andrew File System global file names',
'aim': 'AOL Instant Messenger',
'callto': 'for NetMeeting links',
'castanet': 'Castanet Tuner URLs for Netcaster',
'chttp': 'cached HTTP supported by RealPlayer',
'cid': 'content identifier; RFC 2392',
'crid': 'TV-Anytime Content Reference Identifier; RFC 4078',
'data': ('allows inclusion of small data items as "immediate" data; '
'RFC 2397'),
'dav': 'Distributed Authoring and Versioning Protocol; RFC 2518',
'dict': 'dictionary service protocol; RFC 2229',
'dns': 'Domain Name System resources',
'eid': ('External ID; non-URL data; general escape mechanism to allow '
'access to information for applications that are too '
'specialized to justify their own schemes'),
'fax': ('a connection to a terminal that can handle telefaxes '
'(facsimiles); RFC 2806'),
'feed': 'NetNewsWire feed',
'file': 'Host-specific file names; RFC 1738',
'finger': '',
'freenet': '',
'ftp': 'File Transfer Protocol; RFC 1738',
'go': 'go; RFC 3368',
'gopher': 'The Gopher Protocol',
'gsm-sms': ('Global System for Mobile Communications Short Message '
'Service'),
'h323': ('video (audiovisual) communication on local area networks; '
'RFC 3508'),
'h324': ('video and audio communications over low bitrate connections '
'such as POTS modem connections'),
'hdl': 'CNRI handle system',
'hnews': 'an HTTP-tunneling variant of the NNTP news protocol',
'http': 'Hypertext Transfer Protocol; RFC 2616',
'https': 'HTTP over SSL; RFC 2818',
'hydra': 'SubEthaEdit URI. See http://www.codingmonkeys.de/subethaedit.',
'iioploc': 'Internet Inter-ORB Protocol Location?',
'ilu': 'Inter-Language Unification',
'im': 'Instant Messaging; RFC 3860',
'imap': 'Internet Message Access Protocol; RFC 2192',
'info': 'Information Assets with Identifiers in Public Namespaces',
'ior': 'CORBA interoperable object reference',
'ipp': 'Internet Printing Protocol; RFC 3510',
'irc': 'Internet Relay Chat',
'iris.beep': 'iris.beep; RFC 3983',
'iseek': 'See www.ambrosiasw.com; a little util for OS X.',
'jar': 'Java archive',
'javascript': ('JavaScript code; evaluates the expression after the '
'colon'),
'jdbc': 'JDBC connection URI.',
'ldap': 'Lightweight Directory Access Protocol',
'lifn': '',
'livescript': '',
'lrq': '',
'mailbox': 'Mail folder access',
'mailserver': 'Access to data available from mail servers',
'mailto': 'Electronic mail address; RFC 2368',
'md5': '',
'mid': 'message identifier; RFC 2392',
'mocha': '',
'modem': ('a connection to a terminal that can handle incoming data '
'calls; RFC 2806'),
'mtqp': 'Message Tracking Query Protocol; RFC 3887',
'mupdate': 'Mailbox Update (MUPDATE) Protocol; RFC 3656',
'news': 'USENET news; RFC 1738',
'nfs': 'Network File System protocol; RFC 2224',
'nntp': 'USENET news using NNTP access; RFC 1738',
'opaquelocktoken': 'RFC 2518',
'phone': '',
'pop': 'Post Office Protocol; RFC 2384',
'pop3': 'Post Office Protocol v3',
'pres': 'Presence; RFC 3859',
'printer': '',
'prospero': 'Prospero Directory Service; RFC 4157',
'rdar': ('URLs found in Darwin source '
'(http://www.opensource.apple.com/darwinsource/).'),
'res': '',
'rtsp': 'real time streaming protocol; RFC 2326',
'rvp': '',
'rwhois': '',
'rx': 'Remote Execution',
'sdp': '',
'service': 'service location; RFC 2609',
'shttp': 'secure hypertext transfer protocol',
'sip': 'Session Initiation Protocol; RFC 3261',
'sips': 'secure session intitiaion protocol; RFC 3261',
'smb': 'SAMBA filesystems.',
'snews': 'For NNTP postings via SSL',
'snmp': 'Simple Network Management Protocol; RFC 4088',
'soap.beep': 'RFC 3288',
'soap.beeps': 'RFC 3288',
'ssh': 'Reference to interactive sessions via ssh.',
't120': 'real time data conferencing (audiographics)',
'tag': 'RFC 4151',
'tcp': '',
'tel': ('a connection to a terminal that handles normal voice '
'telephone calls, a voice mailbox or another voice messaging '
'system or a service that can be operated using DTMF tones; '
'RFC 3966.'),
'telephone': 'telephone',
'telnet': 'Reference to interactive sessions; RFC 4248',
'tftp': 'Trivial File Transfer Protocol; RFC 3617',
'tip': 'Transaction Internet Protocol; RFC 2371',
'tn3270': 'Interactive 3270 emulation sessions',
'tv': '',
'urn': 'Uniform Resource Name; RFC 2141',
'uuid': '',
'vemmi': 'versatile multimedia interface; RFC 2122',
'videotex': '',
'view-source': 'displays HTML code that was generated with JavaScript',
'wais': 'Wide Area Information Servers; RFC 4156',
'whodp': '',
'whois++': 'Distributed directory service.',
'x-man-page': ('Opens man page in Terminal.app on OS X '
'(see macosxhints.com)'),
'xmlrpc.beep': 'RFC 3529',
'xmlrpc.beeps': 'RFC 3529',
'z39.50r': 'Z39.50 Retrieval; RFC 2056',
'z39.50s': 'Z39.50 Session; RFC 2056', }
| [
"="
] | = |
3c6272b5ed36863e8a7b012c1491944ae1bc0fed | d61f7eda203a336868c010abb8f9a6f45dd51adb | /217. Contains Duplicate.py | bfe1c7f845dd61be31b14fab7c6bc51dc3d70b9b | [] | no_license | Mschikay/leetcode | b91df914afc728c2ae1a13d3994568bb6c1dcffb | 7c5e5fe76cee542f67cd7dd3a389470b02597548 | refs/heads/master | 2020-04-17T12:11:38.810325 | 2019-10-06T02:37:32 | 2019-10-06T02:37:32 | 166,570,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
# s = set()
# for n in nums:
# if n in s:
# return True
# s.add(n)
# return False
return not (len(set(nums)) == len(nums)) | [
"[email protected]"
] | |
10320c2b5c5d228ae3ada19ae71d1c1b9d7fff71 | 77d7f2c1284b276c95ad31b15ac2bde077f1ceca | /fastreid/data/common.py | 959fefb3f17b62bcdefa3071913ff3df58331735 | [
"Apache-2.0"
] | permissive | Cris-zj/fast-reid | a53f19fefe149eec93d0f1b2a1d61136d9c9eaf6 | db4b65444912cfd54675e6a52fa12e2d1321e971 | refs/heads/master | 2022-12-14T15:23:40.820118 | 2020-08-31T12:34:33 | 2020-08-31T12:34:33 | 291,639,026 | 2 | 0 | Apache-2.0 | 2020-08-31T06:56:24 | 2020-08-31T06:56:23 | null | UTF-8 | Python | false | false | 1,078 | py | # encoding: utf-8
"""
@author: liaoxingyu
@contact: [email protected]
"""
from torch.utils.data import Dataset
from .data_utils import read_image
class CommDataset(Dataset):
"""Image Person ReID Dataset"""
def __init__(self, img_items, transform=None, relabel=True):
self.img_items = img_items
self.transform = transform
self.relabel = relabel
pid_set = set([i[1] for i in img_items])
self.pids = sorted(list(pid_set))
if relabel: self.pid_dict = dict([(p, i) for i, p in enumerate(self.pids)])
def __len__(self):
return len(self.img_items)
def __getitem__(self, index):
img_path, pid, camid = self.img_items[index]
img = read_image(img_path)
if self.transform is not None: img = self.transform(img)
if self.relabel: pid = self.pid_dict[pid]
return {
"images": img,
"targets": pid,
"camid": camid,
"img_path": img_path
}
@property
def num_classes(self):
return len(self.pids)
| [
"[email protected]"
] | |
43161b15896e4902218ba23e07244705afec3bd9 | b0bb0dcdf8228cbdd02e47a9e2097892f7dd0861 | /bak/download.py | 03f5f93a9a8b197c04727d19e700059556ff1ede | [] | no_license | scmsqhn/zipline_for_u | 562de0d8ed638de431d207e6808db7e19fc168f7 | 369b17fd3142fcfb9ced7ce0b17a3a35a8af37d4 | refs/heads/master | 2021-01-20T06:51:09.012662 | 2017-05-02T14:54:38 | 2017-05-02T14:54:38 | 89,936,089 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,161 | py | # -*- coding: utf-8 -*-
# @Author: yuqing5
# date: 20151023
import tushare as ts
from sqlalchemy import create_engine
import datetime
import time
import pandas as pd
import os
import cPickle
from pandas import DataFrame
import pandas.io.sql as SQL
import sys
sys.path.append('./utility/')
from tool_decorator import local_memcached
def date2str(date):
return date.strftime("%Y-%m-%d")
class DownLoad(object):
'''
1.下载历史数据
2. 更新每天数据
3. 装载历史数据
'''
def __init__(self):
self.basic = ts.get_stock_basics()
self.engine = create_engine('mysql://root:[email protected]/stock_info?charset=utf8')
self.connection = self.engine.connect()
@staticmethod
def date2str(today=None):
if today == None:
today =datetime.date.today()
return today.strftime("%Y-%m-%d")
def down_history(self, stock, index=False):
'''
下载历史至今天的数据,可以用于下载新股票
date,open,high,close,low,volume,amount
'''
print '--'*10,"downing ",stock,'--'*10
date = self.basic.ix[stock]['timeToMarket']
#20100115 竟然是个整数
start_year = date/10000
today =datetime.date.today()
end_year = int(today.strftime("%Y"))
suffix = "-" + str(date)[4:6] + "-" + str(date)[6:8]
raw_data = None
#针对次新股,今年的股票
if start_year == end_year:
raw_data = ts.get_h_data(stock,index)
for year in range(start_year, end_year):
start = str(year) + suffix
right = datetime.datetime.strptime(str(year+1) + suffix, "%Y-%m-%d")-datetime.timedelta(days=1)
#跨年的应该没有那天上市的公司,所以不存在bug
end = right.strftime("%Y-%m-%d")
print start, "-----",end
data = ts.get_h_data(stock,start=start,end=end,index=index)
if data is None:
print None
else:
print data.shape
raw_data = pd.concat([raw_data, data], axis=0)
#看看是否需要补充最后一段时间的数据
if (year+1) == end_year and end < today.strftime("%Y-%m-%d"):
this_year_start = str(year+1) + suffix
print this_year_start, "-------",today.strftime("%Y-%m-%d")
data = ts.get_h_data(stock, start=this_year_start, end=today.strftime("%Y-%m-%d"),index=index)
if data is None:
print None
else:
print data.shape
raw_data = pd.concat([raw_data, data], axis=0)
raw_data = raw_data.sort_index(ascending=True)
raw_data.to_sql('day_'+stock, self.engine)
return raw_data
def down_all_day_stick(self):
'''
下载所有股票的历史数据
'''
for stock in self.basic.index:
try:
print stock
self.down_history(stock)
except Exception ,ex:
print Exception, ";",ex
def append_days(self,stock, start, end):
'''
添加stock,指定时间范围内的数据
'''
data = ts.get_h_data(stock,start=start,end=end)
data = data.sort_index(ascending=True)
data.to_sql('day_'+stock, self.engine,if_exists='append')
def append_all_days(self, start=None, end=None):
'''
添加所有股票数据
'''
if start == None:
start = datetime.datetime.today()
end = start
for stock in self.basic['code']:
self.append_days(stock, start, end)
def load_data(self, stock):
'''
加载股票历史数据
'''
search_sql = "select * from {0}".format('day_'+stock)
raw_data = SQL.read_sql(search_sql, self.engine)
return raw_data
def check_is_new_stock(self, stock):
'''
检测该股票是否为新上市股票
结果不需要该函数
'''
check_sql = "show tables like '{0}'".format('day_'+stock)
result = self.connection.execute(check_sql)
if result.first() == None:
return True
else:
return False
#默认为近3年数据
def down_period(self, stock,start=None,end=None):
raw_data = ts.get_hist_data(stock,start,end)
return raw_data
#新股如603861 有问题
#封装一下ts接口,同一天不要重复获取数据
class TS(object):
@staticmethod
@local_memcached
def memchaced_data(funcname, fileprefix):
'''
使用方法
1. funcname ts的方法名
2. fileprefix 该方法缓存的文件名字
'''
raw_data = funcname()
return raw_data
if __name__ == '__main__':
# dl = DownLoad()
# dl.down_all_day_stick()
# raw_data = dl.load_data('000001')
# print raw_data
TS() | [
"[email protected]"
] | |
9a2930492647fe490bf485ff55258371f5687191 | 3a63a9af2693b7d2f87a6d2db0585d8ce5480934 | /vision-vgg_objects.py | 3eb6277cd8e7d427d5a26d0fbd15066c271bf1e7 | [] | no_license | andreeadeac22/HackCam2018 | d167f71069c6fe529f1e88dd92e31794b64e6773 | 0bb529b0d0cc11583722107b7125eb0671ca149a | refs/heads/master | 2021-05-09T09:45:34.945701 | 2018-01-30T00:21:57 | 2018-01-30T00:21:57 | 119,458,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 863 | py | import torch
from torch.autograd import Variable as V
import torchvision.models as models
from torchvision.models.vgg import vgg16
from torchvision import transforms as trn
from torch.nn import functional as F
from PIL import Image
def image_to_objects(img_name):
model = vgg16(pretrained=True)
model.eval()
# load the image transformer
centre_crop = trn.Compose([
trn.Resize((256,256)),
trn.CenterCrop(224),
trn.ToTensor(),
trn.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
])
# load the class label
file_name = 'categories_imagenet.txt'
img = Image.open(img_name)
input_img = V(centre_crop(img).unsqueeze(0))
# forward pass
logit = model.forward(input_img)
h_x = F.softmax(logit, 1).data.squeeze()
return h_x
print(image_to_objects("arch.jpeg")) | [
"[email protected]"
] | |
bc026c4ed31e48c1c7c6a8dad59f6f27b760e5de | d44b5a657e7cd69c875b55dd5cddf21812e89095 | /pixel_cnn/model/resnet.py | 4c7abe39625aca83798614a9c570268916820747 | [
"Apache-2.0"
] | permissive | nel215/chainer-pixel-cnn | ca8ae17fda998f7677dea785e53319b3fc646e76 | 94b064f9e66355d141ed5d6cce0c38492203715b | refs/heads/master | 2020-04-02T02:11:29.546694 | 2018-10-21T12:10:43 | 2018-10-21T12:10:43 | 153,896,421 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 774 | py | from chainer import Chain
from chainer import links as L
from chainer import functions as F
def concat_elu(x):
return F.elu(F.concat([x, -x], 1))
class GatedResnet(Chain):
def __init__(self, n_out, Conv2D):
super(GatedResnet, self).__init__()
with self.init_scope():
self.conv1 = Conv2D(n_out)
self.conv2 = L.Convolution2D(None, n_out, ksize=1)
self.conv3 = Conv2D(2*n_out)
def __call__(self, x, a=None):
h = self.conv1(concat_elu(x))
if a is not None:
h += self.conv2(concat_elu(a))
h = F.dropout(concat_elu(h))
h = self.conv3(h)
# TODO: conditional generation
a, b = F.split_axis(h, 2, 1)
h = a * F.sigmoid(b)
return x + h
| [
"[email protected]"
] | |
e6ffa0af18975bc4140bb2a0fd222509374d096d | 174975248ffa04bb0339ace7475a791842e99ffb | /reverse_bits.py | 141244053c843ee9fa1eb7c73d05ab32903b8c86 | [] | no_license | KONAPAVANKUMAR/code-library | 87a5525dcf71aaba47f233df17ad31227cb3c44b | 6839ef596858515119a3c300b031a107c8d72292 | refs/heads/main | 2023-06-02T09:33:21.382512 | 2021-06-24T09:49:00 | 2021-06-24T09:49:00 | 378,131,322 | 0 | 0 | null | 2021-06-24T09:41:12 | 2021-06-18T11:39:22 | Python | UTF-8 | Python | false | false | 415 | py | def get_reverse_bit_string(number: int) -> str:
bit_string = ""
for _ in range(0, 32):
bit_string += str(number % 2)
number = number >> 1
return bit_string
def reverse_bit(number):
result = 0
for _ in range(1, 33):
result = result << 1
end_bit = number % 2
number = number >> 1
result = result | end_bit
return get_reverse_bit_string(result) | [
"[email protected]"
] | |
70425764af9a4af7b00d9a87514deba1e28c8fda | 722af8e6fa81960a6119c2e45ba6795771bad595 | /agents/migrations/0043_veri.py | a9fbb71d81b2b140fcb68e8c4a02de3f0a744641 | [] | no_license | witty-technologies-empowerment/pmc | 85d21fa3c360d40adeec7ca93792b5bc68c258e5 | 201bee60197240eec911637e136cf14bc5814eec | refs/heads/master | 2023-05-27T12:37:48.894933 | 2021-06-13T04:34:57 | 2021-06-13T04:34:57 | 376,439,472 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | # Generated by Django 2.2.6 on 2020-02-04 15:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0042_auto_20191022_0303'),
]
operations = [
migrations.CreateModel(
name='Veri',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.CharField(max_length=20)),
('rcode', models.CharField(max_length=100)),
('count', models.CharField(default=1, max_length=2)),
('created', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['-created'],
},
),
]
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.