content
stringlengths 7
1.05M
| fixed_cases
stringlengths 1
1.28M
|
---|---|
# ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
class DashboardInfo:
MODEL_ID_KEY = "id" # To match Model schema
MODEL_INFO_FILENAME = "model_info.json"
RAI_INSIGHTS_MODEL_ID_KEY = "model_id"
RAI_INSIGHTS_RUN_ID_KEY = "rai_insights_parent_run_id"
RAI_INSIGHTS_PARENT_FILENAME = "rai_insights.json"
class PropertyKeyValues:
# The property to indicate the type of Run
RAI_INSIGHTS_TYPE_KEY = "_azureml.responsibleai.rai_insights.type"
RAI_INSIGHTS_TYPE_CONSTRUCT = "construction"
RAI_INSIGHTS_TYPE_CAUSAL = "causal"
RAI_INSIGHTS_TYPE_COUNTERFACTUAL = "counterfactual"
RAI_INSIGHTS_TYPE_EXPLANATION = "explanation"
RAI_INSIGHTS_TYPE_ERROR_ANALYSIS = "error_analysis"
RAI_INSIGHTS_TYPE_GATHER = "gather"
# Property to point at the model under examination
RAI_INSIGHTS_MODEL_ID_KEY = "_azureml.responsibleai.rai_insights.model_id"
# Property for tool runs to point at their constructor run
RAI_INSIGHTS_CONSTRUCTOR_RUN_ID_KEY = (
"_azureml.responsibleai.rai_insights.constructor_run"
)
# Property to record responsibleai version
RAI_INSIGHTS_RESPONSIBLEAI_VERSION_KEY = (
"_azureml.responsibleai.rai_insights.responsibleai_version"
)
# Property format to indicate presence of a tool
RAI_INSIGHTS_TOOL_KEY_FORMAT = "_azureml.responsibleai.rai_insights.has_{0}"
class RAIToolType:
CAUSAL = "causal"
COUNTERFACTUAL = "counterfactual"
ERROR_ANALYSIS = "error_analysis"
EXPLANATION = "explanation"
| class Dashboardinfo:
model_id_key = 'id'
model_info_filename = 'model_info.json'
rai_insights_model_id_key = 'model_id'
rai_insights_run_id_key = 'rai_insights_parent_run_id'
rai_insights_parent_filename = 'rai_insights.json'
class Propertykeyvalues:
rai_insights_type_key = '_azureml.responsibleai.rai_insights.type'
rai_insights_type_construct = 'construction'
rai_insights_type_causal = 'causal'
rai_insights_type_counterfactual = 'counterfactual'
rai_insights_type_explanation = 'explanation'
rai_insights_type_error_analysis = 'error_analysis'
rai_insights_type_gather = 'gather'
rai_insights_model_id_key = '_azureml.responsibleai.rai_insights.model_id'
rai_insights_constructor_run_id_key = '_azureml.responsibleai.rai_insights.constructor_run'
rai_insights_responsibleai_version_key = '_azureml.responsibleai.rai_insights.responsibleai_version'
rai_insights_tool_key_format = '_azureml.responsibleai.rai_insights.has_{0}'
class Raitooltype:
causal = 'causal'
counterfactual = 'counterfactual'
error_analysis = 'error_analysis'
explanation = 'explanation' |
#4 lines: Fibonacci, tuple assignment
parents, babies = (1, 1)
while babies < 100:
print ('This generation has {0} babies'.format(babies))
parents, babies = (babies, parents + babies) | (parents, babies) = (1, 1)
while babies < 100:
print('This generation has {0} babies'.format(babies))
(parents, babies) = (babies, parents + babies) |
# Copyright 2014 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Public definitions for Go rules.
All public Go rules, providers, and other definitions are imported and
re-exported in this file. This allows the real location of definitions
to change for easier maintenance.
Definitions outside this file are private unless otherwise noted, and
may change without notice.
"""
load(
"//go/private:context.bzl",
_go_context = "go_context",
)
load(
"//go/private:providers.bzl",
_GoArchive = "GoArchive",
_GoArchiveData = "GoArchiveData",
_GoLibrary = "GoLibrary",
_GoPath = "GoPath",
_GoSDK = "GoSDK",
_GoSource = "GoSource",
)
load(
"//go/private/rules:sdk.bzl",
_go_sdk = "go_sdk",
)
load(
"//go/private:go_toolchain.bzl",
_declare_toolchains = "declare_toolchains",
_go_toolchain = "go_toolchain",
)
load(
"//go/private/rules:wrappers.bzl",
_go_binary_macro = "go_binary_macro",
_go_library_macro = "go_library_macro",
_go_test_macro = "go_test_macro",
)
load(
"//go/private/rules:source.bzl",
_go_source = "go_source",
)
load(
"//extras:embed_data.bzl",
_go_embed_data = "go_embed_data",
)
load(
"//go/private/tools:path.bzl",
_go_path = "go_path",
)
load(
"//go/private/rules:library.bzl",
_go_tool_library = "go_tool_library",
)
load(
"//go/private/rules:nogo.bzl",
_nogo = "nogo_wrapper",
)
# TOOLS_NOGO is a list of all analysis passes in
# golang.org/x/tools/go/analysis/passes.
# This is not backward compatible, so use caution when depending on this --
# new analyses may discover issues in existing builds.
TOOLS_NOGO = [
"@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library",
"@org_golang_x_tools//go/analysis/passes/assign:go_default_library",
"@org_golang_x_tools//go/analysis/passes/atomic:go_default_library",
"@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library",
"@org_golang_x_tools//go/analysis/passes/bools:go_default_library",
"@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library",
"@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library",
# TODO(#2396): pass raw cgo sources to cgocall and re-enable.
# "@org_golang_x_tools//go/analysis/passes/cgocall:go_default_library",
"@org_golang_x_tools//go/analysis/passes/composite:go_default_library",
"@org_golang_x_tools//go/analysis/passes/copylock:go_default_library",
"@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library",
"@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library",
"@org_golang_x_tools//go/analysis/passes/errorsas:go_default_library",
"@org_golang_x_tools//go/analysis/passes/findcall:go_default_library",
"@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library",
"@org_golang_x_tools//go/analysis/passes/ifaceassert:go_default_library",
"@org_golang_x_tools//go/analysis/passes/inspect:go_default_library",
"@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library",
"@org_golang_x_tools//go/analysis/passes/lostcancel:go_default_library",
"@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library",
"@org_golang_x_tools//go/analysis/passes/nilness:go_default_library",
"@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library",
"@org_golang_x_tools//go/analysis/passes/printf:go_default_library",
"@org_golang_x_tools//go/analysis/passes/shadow:go_default_library",
"@org_golang_x_tools//go/analysis/passes/shift:go_default_library",
"@org_golang_x_tools//go/analysis/passes/sortslice:go_default_library",
"@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library",
"@org_golang_x_tools//go/analysis/passes/stringintconv:go_default_library",
"@org_golang_x_tools//go/analysis/passes/structtag:go_default_library",
"@org_golang_x_tools//go/analysis/passes/testinggoroutine:go_default_library",
"@org_golang_x_tools//go/analysis/passes/tests:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library",
"@org_golang_x_tools//go/analysis/passes/unusedresult:go_default_library",
]
# Current version or next version to be tagged. Gazelle and other tools may
# check this to determine compatibility.
RULES_GO_VERSION = "0.30.0"
declare_toolchains = _declare_toolchains
go_context = _go_context
go_embed_data = _go_embed_data
go_sdk = _go_sdk
go_tool_library = _go_tool_library
go_toolchain = _go_toolchain
nogo = _nogo
# See go/providers.rst#GoLibrary for full documentation.
GoLibrary = _GoLibrary
# See go/providers.rst#GoSource for full documentation.
GoSource = _GoSource
# See go/providers.rst#GoPath for full documentation.
GoPath = _GoPath
# See go/providers.rst#GoArchive for full documentation.
GoArchive = _GoArchive
# See go/providers.rst#GoArchiveData for full documentation.
GoArchiveData = _GoArchiveData
# See go/providers.rst#GoSDK for full documentation.
GoSDK = _GoSDK
# See docs/go/core/rules.md#go_library for full documentation.
go_library = _go_library_macro
# See docs/go/core/rules.md#go_binary for full documentation.
go_binary = _go_binary_macro
# See docs/go/core/rules.md#go_test for full documentation.
go_test = _go_test_macro
# See docs/go/core/rules.md#go_test for full documentation.
go_source = _go_source
# See docs/go/core/rules.md#go_path for full documentation.
go_path = _go_path
def go_vet_test(*args, **kwargs):
fail("The go_vet_test rule has been removed. Please migrate to nogo instead, which supports vet tests.")
def go_rule(**kwargs):
fail("The go_rule function has been removed. Use rule directly instead. See https://github.com/bazelbuild/rules_go/blob/master/go/toolchains.rst#writing-new-go-rules")
def go_rules_dependencies():
_moved("go_rules_dependencies")
def go_register_toolchains(**kwargs):
_moved("go_register_toolchains")
def go_download_sdk(**kwargs):
_moved("go_download_sdk")
def go_host_sdk(**kwargs):
_moved("go_host_sdk")
def go_local_sdk(**kwargs):
_moved("go_local_sdk")
def go_wrap_sdk(**kwargs):
_moved("go_wrap_sdK")
def _moved(name):
fail(name + " has moved. Please load from " +
" @io_bazel_rules_go//go:deps.bzl instead of def.bzl.")
| """Public definitions for Go rules.
All public Go rules, providers, and other definitions are imported and
re-exported in this file. This allows the real location of definitions
to change for easier maintenance.
Definitions outside this file are private unless otherwise noted, and
may change without notice.
"""
load('//go/private:context.bzl', _go_context='go_context')
load('//go/private:providers.bzl', _GoArchive='GoArchive', _GoArchiveData='GoArchiveData', _GoLibrary='GoLibrary', _GoPath='GoPath', _GoSDK='GoSDK', _GoSource='GoSource')
load('//go/private/rules:sdk.bzl', _go_sdk='go_sdk')
load('//go/private:go_toolchain.bzl', _declare_toolchains='declare_toolchains', _go_toolchain='go_toolchain')
load('//go/private/rules:wrappers.bzl', _go_binary_macro='go_binary_macro', _go_library_macro='go_library_macro', _go_test_macro='go_test_macro')
load('//go/private/rules:source.bzl', _go_source='go_source')
load('//extras:embed_data.bzl', _go_embed_data='go_embed_data')
load('//go/private/tools:path.bzl', _go_path='go_path')
load('//go/private/rules:library.bzl', _go_tool_library='go_tool_library')
load('//go/private/rules:nogo.bzl', _nogo='nogo_wrapper')
tools_nogo = ['@org_golang_x_tools//go/analysis/passes/asmdecl:go_default_library', '@org_golang_x_tools//go/analysis/passes/assign:go_default_library', '@org_golang_x_tools//go/analysis/passes/atomic:go_default_library', '@org_golang_x_tools//go/analysis/passes/atomicalign:go_default_library', '@org_golang_x_tools//go/analysis/passes/bools:go_default_library', '@org_golang_x_tools//go/analysis/passes/buildssa:go_default_library', '@org_golang_x_tools//go/analysis/passes/buildtag:go_default_library', '@org_golang_x_tools//go/analysis/passes/composite:go_default_library', '@org_golang_x_tools//go/analysis/passes/copylock:go_default_library', '@org_golang_x_tools//go/analysis/passes/ctrlflow:go_default_library', '@org_golang_x_tools//go/analysis/passes/deepequalerrors:go_default_library', '@org_golang_x_tools//go/analysis/passes/errorsas:go_default_library', '@org_golang_x_tools//go/analysis/passes/findcall:go_default_library', '@org_golang_x_tools//go/analysis/passes/httpresponse:go_default_library', '@org_golang_x_tools//go/analysis/passes/ifaceassert:go_default_library', '@org_golang_x_tools//go/analysis/passes/inspect:go_default_library', '@org_golang_x_tools//go/analysis/passes/loopclosure:go_default_library', '@org_golang_x_tools//go/analysis/passes/lostcancel:go_default_library', '@org_golang_x_tools//go/analysis/passes/nilfunc:go_default_library', '@org_golang_x_tools//go/analysis/passes/nilness:go_default_library', '@org_golang_x_tools//go/analysis/passes/pkgfact:go_default_library', '@org_golang_x_tools//go/analysis/passes/printf:go_default_library', '@org_golang_x_tools//go/analysis/passes/shadow:go_default_library', '@org_golang_x_tools//go/analysis/passes/shift:go_default_library', '@org_golang_x_tools//go/analysis/passes/sortslice:go_default_library', '@org_golang_x_tools//go/analysis/passes/stdmethods:go_default_library', '@org_golang_x_tools//go/analysis/passes/stringintconv:go_default_library', '@org_golang_x_tools//go/analysis/passes/structtag:go_default_library', '@org_golang_x_tools//go/analysis/passes/testinggoroutine:go_default_library', '@org_golang_x_tools//go/analysis/passes/tests:go_default_library', '@org_golang_x_tools//go/analysis/passes/unmarshal:go_default_library', '@org_golang_x_tools//go/analysis/passes/unreachable:go_default_library', '@org_golang_x_tools//go/analysis/passes/unsafeptr:go_default_library', '@org_golang_x_tools//go/analysis/passes/unusedresult:go_default_library']
rules_go_version = '0.30.0'
declare_toolchains = _declare_toolchains
go_context = _go_context
go_embed_data = _go_embed_data
go_sdk = _go_sdk
go_tool_library = _go_tool_library
go_toolchain = _go_toolchain
nogo = _nogo
go_library = _GoLibrary
go_source = _GoSource
go_path = _GoPath
go_archive = _GoArchive
go_archive_data = _GoArchiveData
go_sdk = _GoSDK
go_library = _go_library_macro
go_binary = _go_binary_macro
go_test = _go_test_macro
go_source = _go_source
go_path = _go_path
def go_vet_test(*args, **kwargs):
fail('The go_vet_test rule has been removed. Please migrate to nogo instead, which supports vet tests.')
def go_rule(**kwargs):
fail('The go_rule function has been removed. Use rule directly instead. See https://github.com/bazelbuild/rules_go/blob/master/go/toolchains.rst#writing-new-go-rules')
def go_rules_dependencies():
_moved('go_rules_dependencies')
def go_register_toolchains(**kwargs):
_moved('go_register_toolchains')
def go_download_sdk(**kwargs):
_moved('go_download_sdk')
def go_host_sdk(**kwargs):
_moved('go_host_sdk')
def go_local_sdk(**kwargs):
_moved('go_local_sdk')
def go_wrap_sdk(**kwargs):
_moved('go_wrap_sdK')
def _moved(name):
fail(name + ' has moved. Please load from ' + ' @io_bazel_rules_go//go:deps.bzl instead of def.bzl.') |
# model settings
norm_cfg = dict(type='BN', requires_grad=True)
model = dict(
type='EncoderDecoder',
pretrained='pretrain/vit_base_patch16_224.pth',
backbone=dict(
type='VisionTransformer',
img_size=(224, 224),
patch_size=16,
in_channels=3,
embed_dim=768,
depth=12,
num_heads=12,
mlp_ratio=4,
# out_indices=(2, 5, 8, 11),
qkv_bias=True,
drop_rate=0.0,
attn_drop_rate=0.0,
drop_path_rate=0.0,
with_cls_token=True,
norm_cfg=dict(type='LN', eps=1e-6),
act_cfg=dict(type='GELU'),
norm_eval=False,
interpolate_mode='bicubic'),
neck=None,
decode_head=dict(
type='ASPPHead',
in_channels=768,
# in_index=3,
channels=512,
dilations=(1, 6, 12, 18),
dropout_ratio=0.1,
num_classes=21,
contrast=True,
norm_cfg=norm_cfg,
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)),
auxiliary_head=None,
# model training and testing settings
train_cfg=dict(),
test_cfg=dict(mode='whole')) # yapf: disable | norm_cfg = dict(type='BN', requires_grad=True)
model = dict(type='EncoderDecoder', pretrained='pretrain/vit_base_patch16_224.pth', backbone=dict(type='VisionTransformer', img_size=(224, 224), patch_size=16, in_channels=3, embed_dim=768, depth=12, num_heads=12, mlp_ratio=4, qkv_bias=True, drop_rate=0.0, attn_drop_rate=0.0, drop_path_rate=0.0, with_cls_token=True, norm_cfg=dict(type='LN', eps=1e-06), act_cfg=dict(type='GELU'), norm_eval=False, interpolate_mode='bicubic'), neck=None, decode_head=dict(type='ASPPHead', in_channels=768, channels=512, dilations=(1, 6, 12, 18), dropout_ratio=0.1, num_classes=21, contrast=True, norm_cfg=norm_cfg, align_corners=False, loss_decode=dict(type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)), auxiliary_head=None, train_cfg=dict(), test_cfg=dict(mode='whole')) |
# Copyright (C) 2019 Intel Corporation. All rights reserved.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES")
load(
"@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl",
"feature",
"flag_group",
"flag_set",
"tool_path",
)
all_compile_actions = [
ACTION_NAMES.c_compile,
ACTION_NAMES.cpp_compile,
]
all_link_actions = [
ACTION_NAMES.cpp_link_executable,
ACTION_NAMES.cpp_link_dynamic_library,
ACTION_NAMES.cpp_link_nodeps_dynamic_library,
]
def _impl(ctx):
tool_paths = [
tool_path(
name = "gcc",
path = "/opt/emsdk/upstream/emscripten/emcc",
),
tool_path(
name = "ld",
path = "/opt/emsdk/upstream/emscripten/emcc",
),
tool_path(
name = "ar",
path = "/opt/emsdk/upstream/emscripten/emar",
),
tool_path(
name = "cpp",
path = "/opt/emsdk/upstream/emscripten/em++",
),
tool_path(
name = "gcov",
path = "/bin/false",
),
tool_path(
name = "nm",
path = "/bin/false",
),
tool_path(
name = "objdump",
path = "/bin/false",
),
tool_path(
name = "strip",
path = "/bin/false",
),
]
features = [ # NEW
feature(
name = "default_compile_flags",
enabled = True,
flag_sets = [
flag_set(
actions = all_compile_actions,
flag_groups = ([
flag_group(
flags = [
"-O3",
"-msimd128",
"-s",
"USE_PTHREADS=0",
"-s",
"ERROR_ON_UNDEFINED_SYMBOLS=0",
"-s",
"STANDALONE_WASM=1",
],
),
]),
),
],
),
feature(
name = "default_linker_flags",
enabled = True,
flag_sets = [
flag_set(
actions = all_link_actions,
flag_groups = ([
flag_group(
flags = [
"-O3",
"-msimd128",
"-s",
"USE_PTHREADS=0",
"-s",
"ERROR_ON_UNDEFINED_SYMBOLS=0",
"-s",
"STANDALONE_WASM=1",
"-Wl,--export=__heap_base",
"-Wl,--export=__data_end",
],
),
]),
),
],
),
]
return cc_common.create_cc_toolchain_config_info(
ctx = ctx,
features = features, # NEW
cxx_builtin_include_directories = [
"/opt/emsdk/upstream/emscripten/system/include/libcxx",
"/opt/emsdk/upstream/emscripten/system/lib/libcxxabi/include",
"/opt/emsdk/upstream/emscripten/system/include",
"/opt/emsdk/upstream/emscripten/system/include/libc",
"/opt/emsdk/upstream/emscripten/system/lib/libc/musl/arch/emscripten",
"/opt/emsdk/upstream/lib/clang/12.0.0/include/",
],
toolchain_identifier = "wasm-emsdk",
host_system_name = "i686-unknown-linux-gnu",
target_system_name = "wasm32-unknown-emscripten",
target_cpu = "wasm32",
target_libc = "unknown",
compiler = "emsdk",
abi_version = "unknown",
abi_libc_version = "unknown",
tool_paths = tool_paths,
)
emsdk_toolchain_config = rule(
implementation = _impl,
attrs = {},
provides = [CcToolchainConfigInfo],
)
| load('@bazel_tools//tools/build_defs/cc:action_names.bzl', 'ACTION_NAMES')
load('@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl', 'feature', 'flag_group', 'flag_set', 'tool_path')
all_compile_actions = [ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile]
all_link_actions = [ACTION_NAMES.cpp_link_executable, ACTION_NAMES.cpp_link_dynamic_library, ACTION_NAMES.cpp_link_nodeps_dynamic_library]
def _impl(ctx):
tool_paths = [tool_path(name='gcc', path='/opt/emsdk/upstream/emscripten/emcc'), tool_path(name='ld', path='/opt/emsdk/upstream/emscripten/emcc'), tool_path(name='ar', path='/opt/emsdk/upstream/emscripten/emar'), tool_path(name='cpp', path='/opt/emsdk/upstream/emscripten/em++'), tool_path(name='gcov', path='/bin/false'), tool_path(name='nm', path='/bin/false'), tool_path(name='objdump', path='/bin/false'), tool_path(name='strip', path='/bin/false')]
features = [feature(name='default_compile_flags', enabled=True, flag_sets=[flag_set(actions=all_compile_actions, flag_groups=[flag_group(flags=['-O3', '-msimd128', '-s', 'USE_PTHREADS=0', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0', '-s', 'STANDALONE_WASM=1'])])]), feature(name='default_linker_flags', enabled=True, flag_sets=[flag_set(actions=all_link_actions, flag_groups=[flag_group(flags=['-O3', '-msimd128', '-s', 'USE_PTHREADS=0', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0', '-s', 'STANDALONE_WASM=1', '-Wl,--export=__heap_base', '-Wl,--export=__data_end'])])])]
return cc_common.create_cc_toolchain_config_info(ctx=ctx, features=features, cxx_builtin_include_directories=['/opt/emsdk/upstream/emscripten/system/include/libcxx', '/opt/emsdk/upstream/emscripten/system/lib/libcxxabi/include', '/opt/emsdk/upstream/emscripten/system/include', '/opt/emsdk/upstream/emscripten/system/include/libc', '/opt/emsdk/upstream/emscripten/system/lib/libc/musl/arch/emscripten', '/opt/emsdk/upstream/lib/clang/12.0.0/include/'], toolchain_identifier='wasm-emsdk', host_system_name='i686-unknown-linux-gnu', target_system_name='wasm32-unknown-emscripten', target_cpu='wasm32', target_libc='unknown', compiler='emsdk', abi_version='unknown', abi_libc_version='unknown', tool_paths=tool_paths)
emsdk_toolchain_config = rule(implementation=_impl, attrs={}, provides=[CcToolchainConfigInfo]) |
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Contains core logic for Rainman2
"""
__author__ = 'Ari Saha ([email protected]), Mingyang Liu([email protected])'
__date__ = 'Wednesday, February 14th 2018, 11:42:09 am'
| """
Contains core logic for Rainman2
"""
__author__ = 'Ari Saha ([email protected]), Mingyang Liu([email protected])'
__date__ = 'Wednesday, February 14th 2018, 11:42:09 am' |
train_data_path = "../data/no_cycle/train.data"
dev_data_path = "../data/no_cycle/dev.data"
test_data_path = "../data/no_cycle/test.data"
word_idx_file_path = "../data/word.idx"
word_embedding_dim = 100
train_batch_size = 32
dev_batch_size = 500
test_batch_size = 500
l2_lambda = 0.000001
learning_rate = 0.001
epochs = 100
encoder_hidden_dim = 200
num_layers_decode = 1
word_size_max = 1
dropout = 0.0
path_embed_method = "lstm" # cnn or lstm or bi-lstm
unknown_word = "<unk>"
PAD = "<PAD>"
GO = "<GO>"
EOS = "<EOS>"
deal_unknown_words = True
seq_max_len = 11
decoder_type = "greedy" # greedy, beam
beam_width = 4
attention = True
num_layers = 1 # 1 or 2
# the following are for the graph encoding method
weight_decay = 0.0000
sample_size_per_layer = 4
sample_layer_size = 4
hidden_layer_dim = 100
feature_max_len = 1
feature_encode_type = "uni"
# graph_encode_method = "max-pooling" # "lstm" or "max-pooling"
graph_encode_direction = "bi" # "single" or "bi"
concat = True
encoder = "gated_gcn" # "gated_gcn" "gcn" "seq"
lstm_in_gcn = "none" # before, after, none
| train_data_path = '../data/no_cycle/train.data'
dev_data_path = '../data/no_cycle/dev.data'
test_data_path = '../data/no_cycle/test.data'
word_idx_file_path = '../data/word.idx'
word_embedding_dim = 100
train_batch_size = 32
dev_batch_size = 500
test_batch_size = 500
l2_lambda = 1e-06
learning_rate = 0.001
epochs = 100
encoder_hidden_dim = 200
num_layers_decode = 1
word_size_max = 1
dropout = 0.0
path_embed_method = 'lstm'
unknown_word = '<unk>'
pad = '<PAD>'
go = '<GO>'
eos = '<EOS>'
deal_unknown_words = True
seq_max_len = 11
decoder_type = 'greedy'
beam_width = 4
attention = True
num_layers = 1
weight_decay = 0.0
sample_size_per_layer = 4
sample_layer_size = 4
hidden_layer_dim = 100
feature_max_len = 1
feature_encode_type = 'uni'
graph_encode_direction = 'bi'
concat = True
encoder = 'gated_gcn'
lstm_in_gcn = 'none' |
class Input(object):
def __init__(self, type, data):
self.__type = type
self.__data = deepcopy(data)
def __repr__(self):
return repr(self.__data)
def __str__(self):
return str(self.__type) + str(self.__data)
| class Input(object):
def __init__(self, type, data):
self.__type = type
self.__data = deepcopy(data)
def __repr__(self):
return repr(self.__data)
def __str__(self):
return str(self.__type) + str(self.__data) |
class SnakeGame(object):
def __init__(self, width,height,food):
"""
Initialize your data structure here.
@param width - screen width
@param height - screen height
@param food - A list of food positions
E.g food = [[1,1], [1,0]] means the first food is positioned at [1,1], the second is at [1,0].
:type width: int
:type height: int
:type food: List[List[int]]
"""
self.width=width
self.height=height
self.food=collections.deque(food)
self.position=collections.deque([(0,0)])
self.moveops={'U':(-1,0),'L':(0,-1),'R':(0,1),'D':(1,0)}
self.score=0
def move(self, direction):
"""
Moves the snake.
@param direction - 'U' = Up, 'L' = Left, 'R' = Right, 'D' = Down
@return The game's score after the move. Return -1 if game over.
Game over when snake crosses the screen boundary or bites its body.
:type direction: str
:rtype: int
"""
if direction not in self.moveops:
return -1
peak,tail=self.position[0],self.position[-1]
self.position.pop()
idxi,idxj=self.moveops[direction]
newi,newj=peak[0]+idxi,peak[1]+idxj
if (newi,newj) in self.position or \
newi<0 or newi>=self.height or \
newj<0 or newj>=self.width:
return -1
self.position.appendleft((newi,newj))
if self.food and [newi,newj]==self.food[0]:
self.food.popleft()
self.position.append(tail)
self.score+=1
return self.score
# Your SnakeGame object will be instantiated and called as such:
# obj = SnakeGame(width, height, food)
# param_1 = obj.move(direction) | class Snakegame(object):
def __init__(self, width, height, food):
"""
Initialize your data structure here.
@param width - screen width
@param height - screen height
@param food - A list of food positions
E.g food = [[1,1], [1,0]] means the first food is positioned at [1,1], the second is at [1,0].
:type width: int
:type height: int
:type food: List[List[int]]
"""
self.width = width
self.height = height
self.food = collections.deque(food)
self.position = collections.deque([(0, 0)])
self.moveops = {'U': (-1, 0), 'L': (0, -1), 'R': (0, 1), 'D': (1, 0)}
self.score = 0
def move(self, direction):
"""
Moves the snake.
@param direction - 'U' = Up, 'L' = Left, 'R' = Right, 'D' = Down
@return The game's score after the move. Return -1 if game over.
Game over when snake crosses the screen boundary or bites its body.
:type direction: str
:rtype: int
"""
if direction not in self.moveops:
return -1
(peak, tail) = (self.position[0], self.position[-1])
self.position.pop()
(idxi, idxj) = self.moveops[direction]
(newi, newj) = (peak[0] + idxi, peak[1] + idxj)
if (newi, newj) in self.position or newi < 0 or newi >= self.height or (newj < 0) or (newj >= self.width):
return -1
self.position.appendleft((newi, newj))
if self.food and [newi, newj] == self.food[0]:
self.food.popleft()
self.position.append(tail)
self.score += 1
return self.score |
class Player:
def __init__(self, nickname, vapor_id, player_id, ip):
self.nickname = nickname
self.vapor_id = vapor_id
self.player_id = player_id
self.ip = ip
self.not_joined = True
self.loads_map = True
self.joined_after_change_map = True
class Players:
def __init__(self, main_object, modded, lobby):
self.main = main_object
self.players = []
self.modded = modded
self.map_changed = False
self.lobby = lobby
self.commands = None
def get_commands_object(self, commands_object):
self.commands = commands_object
def _on_map_change(self, map_name):
self.map_changed = map_name
if self.modded and self.players:
for player in self.players:
player.loads_map = True
def check_if_everyone_joined_after_change_map(self):
for player in self.players:
if player.loads_map and not player.joined_after_change_map:
return False
return True
def _on_player_info_ev(self, player_id):
player = [player for player in self.players if player.player_id == player_id][0]
if self.map_changed or hasattr(player, "not_joined"):
if player.loads_map and player.joined_after_change_map:
player.joined_after_change_map = False
elif player.loads_map and not player.joined_after_change_map:
player.loads_map = False
player.joined_after_change_map = True
self.main.on_player_map_change(player, self.map_changed)
if hasattr(player, "not_joined"):
del player.not_joined
self.main.on_client_join(player)
if self.check_if_everyone_joined_after_change_map():
self.map_changed = False
def check_nickname_existence(self, nickname):
for player in self.players:
if nickname == player.nickname:
return True
return False
def get_all_players(self, nicknames, vapor_ids, player_ids, ips):
players_list = [nicknames, vapor_ids, player_ids, ips]
for count in range(len(nicknames)):
self.players.append(Player(*[player[count] for player in players_list]))
def add(self, nickname, vapor_id, player_id, ip):
self.players.append(Player(nickname, vapor_id, player_id, ip))
def remove(self, nickname):
for player in self.players:
if nickname == player.nickname:
self.players.remove(player)
break
if self.lobby and len(self.players) == 0:
self.commands.change_map(self.lobby)
def nickname_change(self, old_nickname, new_nickname):
for player in self.players:
if old_nickname == player.nickname:
player.nickname = new_nickname
break
def all_nicknames(self):
return [player.nickname for player in self.players]
def player_from_nickname(self, nickname):
for player in self.players:
if nickname == player.nickname:
return player
def player_from_vapor_id(self, vapor_id):
for player in self.players:
if vapor_id == player.vapor_id:
return player
def player_from_player_id(self, player_id):
for player in self.players:
if player_id == player.player_id:
return player
def get_all_vapor_ids(self):
return [player.vapor_id for player in self.players]
| class Player:
def __init__(self, nickname, vapor_id, player_id, ip):
self.nickname = nickname
self.vapor_id = vapor_id
self.player_id = player_id
self.ip = ip
self.not_joined = True
self.loads_map = True
self.joined_after_change_map = True
class Players:
def __init__(self, main_object, modded, lobby):
self.main = main_object
self.players = []
self.modded = modded
self.map_changed = False
self.lobby = lobby
self.commands = None
def get_commands_object(self, commands_object):
self.commands = commands_object
def _on_map_change(self, map_name):
self.map_changed = map_name
if self.modded and self.players:
for player in self.players:
player.loads_map = True
def check_if_everyone_joined_after_change_map(self):
for player in self.players:
if player.loads_map and (not player.joined_after_change_map):
return False
return True
def _on_player_info_ev(self, player_id):
player = [player for player in self.players if player.player_id == player_id][0]
if self.map_changed or hasattr(player, 'not_joined'):
if player.loads_map and player.joined_after_change_map:
player.joined_after_change_map = False
elif player.loads_map and (not player.joined_after_change_map):
player.loads_map = False
player.joined_after_change_map = True
self.main.on_player_map_change(player, self.map_changed)
if hasattr(player, 'not_joined'):
del player.not_joined
self.main.on_client_join(player)
if self.check_if_everyone_joined_after_change_map():
self.map_changed = False
def check_nickname_existence(self, nickname):
for player in self.players:
if nickname == player.nickname:
return True
return False
def get_all_players(self, nicknames, vapor_ids, player_ids, ips):
players_list = [nicknames, vapor_ids, player_ids, ips]
for count in range(len(nicknames)):
self.players.append(player(*[player[count] for player in players_list]))
def add(self, nickname, vapor_id, player_id, ip):
self.players.append(player(nickname, vapor_id, player_id, ip))
def remove(self, nickname):
for player in self.players:
if nickname == player.nickname:
self.players.remove(player)
break
if self.lobby and len(self.players) == 0:
self.commands.change_map(self.lobby)
def nickname_change(self, old_nickname, new_nickname):
for player in self.players:
if old_nickname == player.nickname:
player.nickname = new_nickname
break
def all_nicknames(self):
return [player.nickname for player in self.players]
def player_from_nickname(self, nickname):
for player in self.players:
if nickname == player.nickname:
return player
def player_from_vapor_id(self, vapor_id):
for player in self.players:
if vapor_id == player.vapor_id:
return player
def player_from_player_id(self, player_id):
for player in self.players:
if player_id == player.player_id:
return player
def get_all_vapor_ids(self):
return [player.vapor_id for player in self.players] |
# 377 Combination Sum IV
# Given an integer array with all positive numbers and no duplicates,
# find the number of possible combinations that add up to a positive integer target.
#
# Example:
#
# nums = [1, 2, 3]
# target = 4
#
# The possible combination ways are:
# (1, 1, 1, 1)
# (1, 1, 2)
# (1, 2, 1)
# (1, 3)
# (2, 1, 1)
# (2, 2)
# (3, 1)
#
# Note that different sequences are counted as different combinations.
#
# Therefore the output is 7.
#
# Follow up:
# What if negative numbers are allowed in the given array?
# How does it change the problem?
# What limitation we need to add to the question to allow negative numbers?
class Solution:
def combinationSum4(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
nums.sort()
res = [0] * (target + 1)
for i in range(1, len(res)):
for num in nums:
if num > i:
break
elif num == i:
res[i] += 1
else:
res[i] += res[i-num]
return res[target]
# https://www.hrwhisper.me/leetcode-combination-sum-iv/
# dp[i] += dp[i-num]
def combinationSum4(self, nums, target):
dp = [1] + [0] * target
for i in range(1, target+1):
for num in nums:
if i >= num:
dp[i] += dp[i-num]
return dp[target]
print(Solution().combinationSum4([1, 2, 3], 4))
| class Solution:
def combination_sum4(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
nums.sort()
res = [0] * (target + 1)
for i in range(1, len(res)):
for num in nums:
if num > i:
break
elif num == i:
res[i] += 1
else:
res[i] += res[i - num]
return res[target]
def combination_sum4(self, nums, target):
dp = [1] + [0] * target
for i in range(1, target + 1):
for num in nums:
if i >= num:
dp[i] += dp[i - num]
return dp[target]
print(solution().combinationSum4([1, 2, 3], 4)) |
#
# PySNMP MIB module DABING-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file://..\DABING-MIB.mib
# Produced by pysmi-0.3.4 at Tue Mar 22 12:53:47 2022
# On host ? platform ? version ? by user ?
# Using Python version 3.8.2 (tags/v3.8.2:7b3ab59, Feb 25 2020, 22:45:29) [MSC v.1916 32 bit (Intel)]
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, ModuleIdentity, IpAddress, ObjectIdentity, iso, Counter32, Unsigned32, Bits, NotificationType, TimeTicks, Counter64, enterprises, MibIdentifier, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "ModuleIdentity", "IpAddress", "ObjectIdentity", "iso", "Counter32", "Unsigned32", "Bits", "NotificationType", "TimeTicks", "Counter64", "enterprises", "MibIdentifier", "Integer32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
dabing = ModuleIdentity((1, 3, 6, 1, 4, 1, 55532))
dabing.setRevisions(('2022-03-17 00:00',))
if mibBuilder.loadTexts: dabing.setLastUpdated('202203170000Z')
if mibBuilder.loadTexts: dabing.setOrganization('www.stuba.sk')
Parameters = MibIdentifier((1, 3, 6, 1, 4, 1, 55532, 1))
Agent = MibIdentifier((1, 3, 6, 1, 4, 1, 55532, 2))
Manager = MibIdentifier((1, 3, 6, 1, 4, 1, 55532, 3))
Notifications = MibIdentifier((1, 3, 6, 1, 4, 1, 55532, 4))
NotificationPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 55532, 4, 1))
NotificationObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 55532, 4, 2))
channel = MibScalar((1, 3, 6, 1, 4, 1, 55532, 1, 1), OctetString().clone('12C')).setMaxAccess("readonly")
if mibBuilder.loadTexts: channel.setStatus('current')
interval = MibScalar((1, 3, 6, 1, 4, 1, 55532, 1, 2), Integer32().clone(960)).setMaxAccess("readonly")
if mibBuilder.loadTexts: interval.setStatus('current')
trapEnabled = MibScalar((1, 3, 6, 1, 4, 1, 55532, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trapEnabled.setStatus('current')
agentIdentifier = MibScalar((1, 3, 6, 1, 4, 1, 55532, 2, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentIdentifier.setStatus('current')
agentLabel = MibScalar((1, 3, 6, 1, 4, 1, 55532, 2, 2), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: agentLabel.setStatus('current')
agentStatus = MibScalar((1, 3, 6, 1, 4, 1, 55532, 2, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: agentStatus.setStatus('current')
managerHostname = MibScalar((1, 3, 6, 1, 4, 1, 55532, 3, 1), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: managerHostname.setStatus('current')
managerPort = MibScalar((1, 3, 6, 1, 4, 1, 55532, 3, 2), Integer32().clone(162)).setMaxAccess("readonly")
if mibBuilder.loadTexts: managerPort.setStatus('current')
genericPayload = MibScalar((1, 3, 6, 1, 4, 1, 55532, 4, 2, 1), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: genericPayload.setStatus('current')
malfunctionTrap = NotificationType((1, 3, 6, 1, 4, 1, 55532, 4, 1, 1)).setObjects(("DABING-MIB", "genericPayload"))
if mibBuilder.loadTexts: malfunctionTrap.setStatus('current')
testTrap = NotificationType((1, 3, 6, 1, 4, 1, 55532, 4, 1, 2)).setObjects(("DABING-MIB", "genericPayload"))
if mibBuilder.loadTexts: testTrap.setStatus('current')
mibBuilder.exportSymbols("DABING-MIB", Notifications=Notifications, channel=channel, PYSNMP_MODULE_ID=dabing, testTrap=testTrap, malfunctionTrap=malfunctionTrap, Parameters=Parameters, agentLabel=agentLabel, managerPort=managerPort, trapEnabled=trapEnabled, managerHostname=managerHostname, Manager=Manager, NotificationPrefix=NotificationPrefix, Agent=Agent, genericPayload=genericPayload, NotificationObjects=NotificationObjects, agentIdentifier=agentIdentifier, dabing=dabing, agentStatus=agentStatus, interval=interval)
| (octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(constraints_intersection, constraints_union, value_range_constraint, single_value_constraint, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'ConstraintsUnion', 'ValueRangeConstraint', 'SingleValueConstraint', 'ValueSizeConstraint')
(notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance')
(mib_scalar, mib_table, mib_table_row, mib_table_column, gauge32, module_identity, ip_address, object_identity, iso, counter32, unsigned32, bits, notification_type, time_ticks, counter64, enterprises, mib_identifier, integer32) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Gauge32', 'ModuleIdentity', 'IpAddress', 'ObjectIdentity', 'iso', 'Counter32', 'Unsigned32', 'Bits', 'NotificationType', 'TimeTicks', 'Counter64', 'enterprises', 'MibIdentifier', 'Integer32')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
dabing = module_identity((1, 3, 6, 1, 4, 1, 55532))
dabing.setRevisions(('2022-03-17 00:00',))
if mibBuilder.loadTexts:
dabing.setLastUpdated('202203170000Z')
if mibBuilder.loadTexts:
dabing.setOrganization('www.stuba.sk')
parameters = mib_identifier((1, 3, 6, 1, 4, 1, 55532, 1))
agent = mib_identifier((1, 3, 6, 1, 4, 1, 55532, 2))
manager = mib_identifier((1, 3, 6, 1, 4, 1, 55532, 3))
notifications = mib_identifier((1, 3, 6, 1, 4, 1, 55532, 4))
notification_prefix = mib_identifier((1, 3, 6, 1, 4, 1, 55532, 4, 1))
notification_objects = mib_identifier((1, 3, 6, 1, 4, 1, 55532, 4, 2))
channel = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 1, 1), octet_string().clone('12C')).setMaxAccess('readonly')
if mibBuilder.loadTexts:
channel.setStatus('current')
interval = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 1, 2), integer32().clone(960)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
interval.setStatus('current')
trap_enabled = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 1, 3), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
trapEnabled.setStatus('current')
agent_identifier = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 2, 1), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
agentIdentifier.setStatus('current')
agent_label = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 2, 2), octet_string()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
agentLabel.setStatus('current')
agent_status = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 2, 3), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
agentStatus.setStatus('current')
manager_hostname = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 3, 1), octet_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
managerHostname.setStatus('current')
manager_port = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 3, 2), integer32().clone(162)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
managerPort.setStatus('current')
generic_payload = mib_scalar((1, 3, 6, 1, 4, 1, 55532, 4, 2, 1), octet_string()).setMaxAccess('accessiblefornotify')
if mibBuilder.loadTexts:
genericPayload.setStatus('current')
malfunction_trap = notification_type((1, 3, 6, 1, 4, 1, 55532, 4, 1, 1)).setObjects(('DABING-MIB', 'genericPayload'))
if mibBuilder.loadTexts:
malfunctionTrap.setStatus('current')
test_trap = notification_type((1, 3, 6, 1, 4, 1, 55532, 4, 1, 2)).setObjects(('DABING-MIB', 'genericPayload'))
if mibBuilder.loadTexts:
testTrap.setStatus('current')
mibBuilder.exportSymbols('DABING-MIB', Notifications=Notifications, channel=channel, PYSNMP_MODULE_ID=dabing, testTrap=testTrap, malfunctionTrap=malfunctionTrap, Parameters=Parameters, agentLabel=agentLabel, managerPort=managerPort, trapEnabled=trapEnabled, managerHostname=managerHostname, Manager=Manager, NotificationPrefix=NotificationPrefix, Agent=Agent, genericPayload=genericPayload, NotificationObjects=NotificationObjects, agentIdentifier=agentIdentifier, dabing=dabing, agentStatus=agentStatus, interval=interval) |
def solve(polynomial):
"""
input is polynomial
if more than one variable, returns 'too many variables'
looks for formula to apply to coefficients
returns solution or 'I cannot solve yet...'
"""
if len(polynomial.term_matrix[0]) > 2:
return 'too many variables'
elif len(polynomial.term_matrix[0]) == 1:
return polynomial.term_matrix[1][0]
elif len(polynomial.term_matrix[0]) == 2:
degree = polynomial.term_matrix[1][1]
if degree == 1:
if len(polynomial.term_matrix) == 2:
return 0
else:
return -polynomial.term_matrix[2][0]/polynomial.term_matrix[1][0]
if degree == 2:
ans = quadratic_formula(polynomial)
return ans
if degree > 2:
return Durand_Kerner(polynomial)
def quadratic_formula(polynomial):
"""
input is single-variable polynomial of degree 2
returns zeros
"""
if len(polynomial.term_matrix) == 3:
if polynomial.term_matrix[2][1] == 1:
a, b = polynomial.term_matrix[1][0], polynomial.term_matrix[2][0]
return 0, -b/a
a, c = polynomial.term_matrix[1][0], polynomial.term_matrix[2][0]
return (-c/a)**.5, -(-c/a)**.5
if len(polynomial.term_matrix) == 2:
a, b, c, = polynomial.term_matrix[1][0], 0, 0
elif len(polynomial.term_matrix) == 3:
a, b, c = polynomial.term_matrix[1][0], polynomial.term_matrix[2][0], 0
else:
a, b, c = polynomial.term_matrix[1][0], polynomial.term_matrix[2][0], polynomial.term_matrix[3][0]
ans1 = (-b + (b**2 - 4*a*c)**.5)/2*a
ans2 = (-b - (b**2 - 4*a*c)**.5)/2*a
if ans1 == ans2:
return ans1
return ans1, ans2
def isclose(a, b, rel_tol=1e-09, abs_tol=0.0001):
"""
returns boolean whether abs(a-b) is less than abs_total or rel_total*max(a, b)
"""
return abs(a-b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
def Durand_Kerner(f):
"""
input polynomial
returns numerical approximation of all complex roots
"""
roots = []
for i in range(f.degree()):
roots.append((0.4 + 0.9j)**i)
diff = 1
diff_temp = 0
def iterate():
nonlocal roots
new_roots = roots[:]
for i in range(len(roots)):
q = 1
for j, root in enumerate(roots):
if j != i:
q *= roots[i] - root
new_roots[i] = roots[i] - f(roots[i])/q
nonlocal diff
nonlocal diff_temp
diff_temp = diff
diff = 0
for i in range(len(roots)):
diff += abs(roots[i] - new_roots[i])
roots = new_roots
while diff > .00000001 and not isclose(diff_temp, diff):
iterate()
for i in range(len(roots)):
if isclose(roots[i].real, round(roots[i].real)):
temp = round(roots[i].real)
roots[i] -= roots[i].real
roots[i] += temp
if isclose(roots[i].imag, round(roots[i].imag)):
temp = round(roots[i].imag)
roots[i] -= roots[i].imag*1j
roots[i] += temp*1j
return roots
if __name__ == '__main__':
pass
| def solve(polynomial):
"""
input is polynomial
if more than one variable, returns 'too many variables'
looks for formula to apply to coefficients
returns solution or 'I cannot solve yet...'
"""
if len(polynomial.term_matrix[0]) > 2:
return 'too many variables'
elif len(polynomial.term_matrix[0]) == 1:
return polynomial.term_matrix[1][0]
elif len(polynomial.term_matrix[0]) == 2:
degree = polynomial.term_matrix[1][1]
if degree == 1:
if len(polynomial.term_matrix) == 2:
return 0
else:
return -polynomial.term_matrix[2][0] / polynomial.term_matrix[1][0]
if degree == 2:
ans = quadratic_formula(polynomial)
return ans
if degree > 2:
return durand__kerner(polynomial)
def quadratic_formula(polynomial):
"""
input is single-variable polynomial of degree 2
returns zeros
"""
if len(polynomial.term_matrix) == 3:
if polynomial.term_matrix[2][1] == 1:
(a, b) = (polynomial.term_matrix[1][0], polynomial.term_matrix[2][0])
return (0, -b / a)
(a, c) = (polynomial.term_matrix[1][0], polynomial.term_matrix[2][0])
return ((-c / a) ** 0.5, -(-c / a) ** 0.5)
if len(polynomial.term_matrix) == 2:
(a, b, c) = (polynomial.term_matrix[1][0], 0, 0)
elif len(polynomial.term_matrix) == 3:
(a, b, c) = (polynomial.term_matrix[1][0], polynomial.term_matrix[2][0], 0)
else:
(a, b, c) = (polynomial.term_matrix[1][0], polynomial.term_matrix[2][0], polynomial.term_matrix[3][0])
ans1 = (-b + (b ** 2 - 4 * a * c) ** 0.5) / 2 * a
ans2 = (-b - (b ** 2 - 4 * a * c) ** 0.5) / 2 * a
if ans1 == ans2:
return ans1
return (ans1, ans2)
def isclose(a, b, rel_tol=1e-09, abs_tol=0.0001):
"""
returns boolean whether abs(a-b) is less than abs_total or rel_total*max(a, b)
"""
return abs(a - b) <= max(rel_tol * max(abs(a), abs(b)), abs_tol)
def durand__kerner(f):
"""
input polynomial
returns numerical approximation of all complex roots
"""
roots = []
for i in range(f.degree()):
roots.append((0.4 + 0.9j) ** i)
diff = 1
diff_temp = 0
def iterate():
nonlocal roots
new_roots = roots[:]
for i in range(len(roots)):
q = 1
for (j, root) in enumerate(roots):
if j != i:
q *= roots[i] - root
new_roots[i] = roots[i] - f(roots[i]) / q
nonlocal diff
nonlocal diff_temp
diff_temp = diff
diff = 0
for i in range(len(roots)):
diff += abs(roots[i] - new_roots[i])
roots = new_roots
while diff > 1e-08 and (not isclose(diff_temp, diff)):
iterate()
for i in range(len(roots)):
if isclose(roots[i].real, round(roots[i].real)):
temp = round(roots[i].real)
roots[i] -= roots[i].real
roots[i] += temp
if isclose(roots[i].imag, round(roots[i].imag)):
temp = round(roots[i].imag)
roots[i] -= roots[i].imag * 1j
roots[i] += temp * 1j
return roots
if __name__ == '__main__':
pass |
class TreeNode:
def __init__(self, name, data, parent=None):
self.name = name
self.parent = parent
self.data = data
self.childs = {}
def add_child(self, name, data):
self.childs.update({name:(type(self))(name, data, self)})
def rm_branch(self, name, ansistors_n: list = None,):
focus = self.childs
while True:
if ansistors_n == None or ansistors_n == self.name:
del focus[name]
break
elif ansistors_n[0] in focus:
focus = (focus[ansistors_n[0]]).childs
del ansistors_n[0]
elif name in focus and ansistors_n is None:
del focus[name]
break
else:
print(focus)
raise NameError(f"couldn't find branch {ansistors_n[0]}")
def __getitem__(self, item):
return self.childs[item]
def __setitem__(self, key, value):
self.childs[key] = value
def __delitem__(self, key, ansistors_n: list = None):
self.rm_branch(key, ansistors_n)
| class Treenode:
def __init__(self, name, data, parent=None):
self.name = name
self.parent = parent
self.data = data
self.childs = {}
def add_child(self, name, data):
self.childs.update({name: type(self)(name, data, self)})
def rm_branch(self, name, ansistors_n: list=None):
focus = self.childs
while True:
if ansistors_n == None or ansistors_n == self.name:
del focus[name]
break
elif ansistors_n[0] in focus:
focus = focus[ansistors_n[0]].childs
del ansistors_n[0]
elif name in focus and ansistors_n is None:
del focus[name]
break
else:
print(focus)
raise name_error(f"couldn't find branch {ansistors_n[0]}")
def __getitem__(self, item):
return self.childs[item]
def __setitem__(self, key, value):
self.childs[key] = value
def __delitem__(self, key, ansistors_n: list=None):
self.rm_branch(key, ansistors_n) |
api_key = "9N7hvPP9yFrjBnELpBdthluBjiOWzJZw"
mongo_url = 'mongodb://localhost:27017'
mongo_db = 'CarPopularity'
mongo_collections = ['CarSalesByYear', 'PopularCarsByRegion']
years_data = ['2019', '2018', '2017', '2016', '2015']
test_mode = True | api_key = '9N7hvPP9yFrjBnELpBdthluBjiOWzJZw'
mongo_url = 'mongodb://localhost:27017'
mongo_db = 'CarPopularity'
mongo_collections = ['CarSalesByYear', 'PopularCarsByRegion']
years_data = ['2019', '2018', '2017', '2016', '2015']
test_mode = True |
class Solution:
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
if not nums:
return [-1, -1]
low = 0
high = len(nums) - 1
f = 0
while low<=high:
mid = (low+high)//2
if nums[mid] == target:
f = 1
break
elif nums[mid] < target:
low = mid + 1
elif nums[mid] > target:
high = mid - 1
i, j = mid, mid
while i>=1 and nums[i-1] == target:
i = i-1
while j<len(nums)-1 and nums[j+1] == target:
j = j+1
if f == 1:
return [i, j]
else:
return [-1, -1]
| class Solution:
def search_range(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
if not nums:
return [-1, -1]
low = 0
high = len(nums) - 1
f = 0
while low <= high:
mid = (low + high) // 2
if nums[mid] == target:
f = 1
break
elif nums[mid] < target:
low = mid + 1
elif nums[mid] > target:
high = mid - 1
(i, j) = (mid, mid)
while i >= 1 and nums[i - 1] == target:
i = i - 1
while j < len(nums) - 1 and nums[j + 1] == target:
j = j + 1
if f == 1:
return [i, j]
else:
return [-1, -1] |
#
# PySNMP MIB module MWORKS-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MWORKS-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:16:04 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, Unsigned32, ObjectIdentity, IpAddress, Bits, MibIdentifier, Integer32, enterprises, ModuleIdentity, TimeTicks, Counter32, NotificationType, iso, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Unsigned32", "ObjectIdentity", "IpAddress", "Bits", "MibIdentifier", "Integer32", "enterprises", "ModuleIdentity", "TimeTicks", "Counter32", "NotificationType", "iso", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
tecElite = MibIdentifier((1, 3, 6, 1, 4, 1, 217))
meterWorks = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16))
mw501 = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16, 1))
mwMem = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16, 1, 1))
mwHeap = MibIdentifier((1, 3, 6, 1, 4, 1, 217, 16, 1, 2))
mwMemCeiling = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwMemCeiling.setStatus('mandatory')
if mibBuilder.loadTexts: mwMemCeiling.setDescription('bytes of memory the agent memory manager will allow the agent to use.')
mwMemUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwMemUsed.setStatus('mandatory')
if mibBuilder.loadTexts: mwMemUsed.setDescription("bytes of memory that meterworks has malloc'ed. some of this may be in free pools.")
mwHeapTotal = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwHeapTotal.setStatus('mandatory')
if mibBuilder.loadTexts: mwHeapTotal.setDescription('bytes of memory given to the heap manager.')
mwHeapUsed = MibScalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mwHeapUsed.setStatus('mandatory')
if mibBuilder.loadTexts: mwHeapUsed.setDescription('bytes of available memory in the heap.')
mibBuilder.exportSymbols("MWORKS-MIB", mwHeap=mwHeap, mwHeapUsed=mwHeapUsed, mwMemCeiling=mwMemCeiling, meterWorks=meterWorks, tecElite=tecElite, mwMem=mwMem, mw501=mw501, mwHeapTotal=mwHeapTotal, mwMemUsed=mwMemUsed)
| (object_identifier, octet_string, integer) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'OctetString', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_range_constraint, single_value_constraint, constraints_union, value_size_constraint, constraints_intersection) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueRangeConstraint', 'SingleValueConstraint', 'ConstraintsUnion', 'ValueSizeConstraint', 'ConstraintsIntersection')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(gauge32, unsigned32, object_identity, ip_address, bits, mib_identifier, integer32, enterprises, module_identity, time_ticks, counter32, notification_type, iso, counter64, mib_scalar, mib_table, mib_table_row, mib_table_column) = mibBuilder.importSymbols('SNMPv2-SMI', 'Gauge32', 'Unsigned32', 'ObjectIdentity', 'IpAddress', 'Bits', 'MibIdentifier', 'Integer32', 'enterprises', 'ModuleIdentity', 'TimeTicks', 'Counter32', 'NotificationType', 'iso', 'Counter64', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn')
(textual_convention, display_string) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString')
tec_elite = mib_identifier((1, 3, 6, 1, 4, 1, 217))
meter_works = mib_identifier((1, 3, 6, 1, 4, 1, 217, 16))
mw501 = mib_identifier((1, 3, 6, 1, 4, 1, 217, 16, 1))
mw_mem = mib_identifier((1, 3, 6, 1, 4, 1, 217, 16, 1, 1))
mw_heap = mib_identifier((1, 3, 6, 1, 4, 1, 217, 16, 1, 2))
mw_mem_ceiling = mib_scalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 1, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mwMemCeiling.setStatus('mandatory')
if mibBuilder.loadTexts:
mwMemCeiling.setDescription('bytes of memory the agent memory manager will allow the agent to use.')
mw_mem_used = mib_scalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 1, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mwMemUsed.setStatus('mandatory')
if mibBuilder.loadTexts:
mwMemUsed.setDescription("bytes of memory that meterworks has malloc'ed. some of this may be in free pools.")
mw_heap_total = mib_scalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 2, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mwHeapTotal.setStatus('mandatory')
if mibBuilder.loadTexts:
mwHeapTotal.setDescription('bytes of memory given to the heap manager.')
mw_heap_used = mib_scalar((1, 3, 6, 1, 4, 1, 217, 16, 1, 2, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mwHeapUsed.setStatus('mandatory')
if mibBuilder.loadTexts:
mwHeapUsed.setDescription('bytes of available memory in the heap.')
mibBuilder.exportSymbols('MWORKS-MIB', mwHeap=mwHeap, mwHeapUsed=mwHeapUsed, mwMemCeiling=mwMemCeiling, meterWorks=meterWorks, tecElite=tecElite, mwMem=mwMem, mw501=mw501, mwHeapTotal=mwHeapTotal, mwMemUsed=mwMemUsed) |
# == 1 ==
bar = [1, 2]
def foo(bar):
bar = sum(bar)
return bar
print(foo(bar))
# == 2 ==
bar = [1, 2]
def foo(bar):
bar[0] = 1
return sum(bar)
print(foo(bar))
# == 3 ==
bar = [1, 2]
def foo():
bar = sum(bar)
return bar
print(foo())
# == 4 ==
bar = [1, 2]
def foo(bar):
bar = [1, 2, 3, ]
return sum(bar)
print(foo(bar), bar)
# == 5 ==
bar = [1, 2]
def foo(bar):
bar[:] = [1, 2, 3, ]
return sum(bar)
print(foo(bar), bar)
# == 6 ==
try:
bar = 1 / 0
print(bar)
except ZeroDivisionError as bar:
print(bar)
print(bar)
# == 7 ==
bar = [1, 2]
print(list(bar for bar in bar))
print(bar)
# == 8 ==
bar = [1, 2]
f = lambda: sum(bar)
print(f())
bar = [1, 2, 3, ]
print(f())
# == 9 ==
bar = [1, 2]
def foo(bar):
return lambda: sum(bar)
f = foo(bar)
print(f())
bar = [1, 2, 3, ]
print(f())
# == 10 ==
bar = [1, 2]
foo = []
for i in bar:
foo.append(lambda: i)
print([f() for f in foo])
# == 11 ==
bar = [1, 2]
foo = [
lambda: i
for i in bar
]
print(list(f() for f in foo))
# == 12 ==
bar = [1, 2]
foo = [
lambda: i
for i in bar
]
print(list(f() for f in foo))
bar = [1, 2, 3, ]
print(list(f() for f in foo))
bar[:] = [1, 2, 3, ]
print(list(f() for f in foo))
# == 13 ==
bar = [1, 2]
foo = [
lambda i=i: i
for i in bar
]
print(list(f() for f in foo))
| bar = [1, 2]
def foo(bar):
bar = sum(bar)
return bar
print(foo(bar))
bar = [1, 2]
def foo(bar):
bar[0] = 1
return sum(bar)
print(foo(bar))
bar = [1, 2]
def foo():
bar = sum(bar)
return bar
print(foo())
bar = [1, 2]
def foo(bar):
bar = [1, 2, 3]
return sum(bar)
print(foo(bar), bar)
bar = [1, 2]
def foo(bar):
bar[:] = [1, 2, 3]
return sum(bar)
print(foo(bar), bar)
try:
bar = 1 / 0
print(bar)
except ZeroDivisionError as bar:
print(bar)
print(bar)
bar = [1, 2]
print(list((bar for bar in bar)))
print(bar)
bar = [1, 2]
f = lambda : sum(bar)
print(f())
bar = [1, 2, 3]
print(f())
bar = [1, 2]
def foo(bar):
return lambda : sum(bar)
f = foo(bar)
print(f())
bar = [1, 2, 3]
print(f())
bar = [1, 2]
foo = []
for i in bar:
foo.append(lambda : i)
print([f() for f in foo])
bar = [1, 2]
foo = [lambda : i for i in bar]
print(list((f() for f in foo)))
bar = [1, 2]
foo = [lambda : i for i in bar]
print(list((f() for f in foo)))
bar = [1, 2, 3]
print(list((f() for f in foo)))
bar[:] = [1, 2, 3]
print(list((f() for f in foo)))
bar = [1, 2]
foo = [lambda i=i: i for i in bar]
print(list((f() for f in foo))) |
# Copyright (C) Dnspython Contributors, see LICENSE for text of ISC license
# Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Class ANY (generic) rdata type classes."""
__all__ = [
'AFSDB',
'AMTRELAY',
'AVC',
'CAA',
'CDNSKEY',
'CDS',
'CERT',
'CNAME',
'CSYNC',
'DLV',
'DNAME',
'DNSKEY',
'DS',
'EUI48',
'EUI64',
'GPOS',
'HINFO',
'HIP',
'ISDN',
'LOC',
'MX',
'NINFO',
'NS',
'NSEC',
'NSEC3',
'NSEC3PARAM',
'OPENPGPKEY',
'OPT',
'PTR',
'RP',
'RRSIG',
'RT',
'SMIMEA',
'SOA',
'SPF',
'SSHFP',
'TKEY',
'TLSA',
'TSIG',
'TXT',
'URI',
'X25',
]
| """Class ANY (generic) rdata type classes."""
__all__ = ['AFSDB', 'AMTRELAY', 'AVC', 'CAA', 'CDNSKEY', 'CDS', 'CERT', 'CNAME', 'CSYNC', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48', 'EUI64', 'GPOS', 'HINFO', 'HIP', 'ISDN', 'LOC', 'MX', 'NINFO', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'OPT', 'PTR', 'RP', 'RRSIG', 'RT', 'SMIMEA', 'SOA', 'SPF', 'SSHFP', 'TKEY', 'TLSA', 'TSIG', 'TXT', 'URI', 'X25'] |
n = int(input())
row = 0
for i in range(100):
if 2 ** i <= n <= 2 ** (i + 1) - 1:
row = i
break
def seki(k, n):
for _ in range(n):
k = 4 * k + 2
return k
k = 0
if row % 2 != 0:
k = 2
cri = seki(k, row // 2)
if n < cri:
print("Aoki")
else:
print("Takahashi")
else:
k = 1
cri = seki(k, row // 2)
if n < cri:
print("Takahashi")
else:
print("Aoki")
| n = int(input())
row = 0
for i in range(100):
if 2 ** i <= n <= 2 ** (i + 1) - 1:
row = i
break
def seki(k, n):
for _ in range(n):
k = 4 * k + 2
return k
k = 0
if row % 2 != 0:
k = 2
cri = seki(k, row // 2)
if n < cri:
print('Aoki')
else:
print('Takahashi')
else:
k = 1
cri = seki(k, row // 2)
if n < cri:
print('Takahashi')
else:
print('Aoki') |
bino = int(input())
cino = int(input())
if (bino+cino)%2==0:
print("Bino")
else:
print("Cino")
| bino = int(input())
cino = int(input())
if (bino + cino) % 2 == 0:
print('Bino')
else:
print('Cino') |
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 2 11:06:59 2019
@author: Paul
"""
def read_data(filename):
"""
Reads csv file into a list, and converts to ints
"""
data = []
f = open(filename, 'r')
for line in f:
data += line.strip('\n').split(',')
int_data = [int(i) for i in data]
f.close()
return int_data
def run_intcode(program, input_int):
"""
Takes data, list of ints to run int_code on.
Returns list of ints after intcode program has been run.
Running Intcode program looks reads in the integers sequentially in sets of 4:
data[i] == Parameter Mode + Opcode (last two digits)
data[i+1] == Entry 1
data[i+2] == Entry 2
data[i+3] == Entry 3
If Opcode == 1, the value of the opcode at index location = entry 1 and 2
in the program are summed and stored at the index location of entry 3.
If Opcode == 2, the value of the opcode at index location = entry 1 and 2
in the program are multiplied and stored at the index location of entry 3.
If Opcode == 3, the the single integer (input) is saved to the position given
by index 1.
If Opcode == 4, the program outputs the value of its only parameter. E.g. 4,50
would output the value at address 50.
If Opcode == 5 and entry 1 is != 0, the intcode position moves to the index stored
at entry 2. Otherwise it does nothing.
If Opcode == 6 and entry 1 is 0, the intcode postion moves to the index stored
at entry 2. Otherwise it does nothing.
If Opcode == 7 and entry 1> entry 2, store 1 in position given by third param,
otherwise store 0 at position given by third param.
If Opcode == 7 and entry 1 = entry 2, store 1 in position given by third param,
otherwise store 0 at position given by third param.
If Opcode == 99, the program is completed and will stop running.
Parameters are digits to the left of the opcode, read left to right:
Parameter 0 -> Position mode - the entry is treated as an index location
Parameter 1 -> Immediate mode - the entry is treated as a value
"""
data = program[:]
answer = -1
params = [0, 0, 0]
param_modes = ['', '', '']
i = 0
while (i < len(program)):
#print("i = ", i)
# Determine Opcode and parameter codes:
opcode_str = "{:0>5d}".format(data[i])
opcode = int(opcode_str[3:])
param_modes[0] = opcode_str[2]
param_modes[1] = opcode_str[1]
param_modes[2] = opcode_str[0]
#print(opcode_str)
for j in range(2):
if param_modes[j] == '0':
try:
params[j] = data[data[i+j+1]]
except IndexError:
continue
else:
try:
params[j] = data[i+j+1]
except IndexError:
continue
#print(params, param_modes)
# If opcode is 1, add relevant entries:
if opcode == 1:
data[data[i+3]] = params[0] + params[1]
i += 4;
# If opcode is 2, multiply the relevant entries:
elif opcode == 2:
data[data[i+3]] = params[0] * params[1]
i += 4;
# If opcode is 3, store input value at required location.
elif opcode == 3:
data[data[i+1]] = input_int
i += 2;
# If opcode is 4, print out the input stored at specified location.
elif opcode == 4:
answer = data[data[i+1]]
print("Program output: ", data[data[i+1]])
i += 2;
# If the opcode is 5 and the next parameter !=0, jump forward
elif opcode == 5:
if params[0] != 0:
i = params[1]
else:
i += 3
# If the opcode is 6 and next parameter is 0, jump forward
elif opcode == 6:
if params[0] == 0:
i = params[1]
else:
i += 3
# If the opcode is 7, carry out less than comparison and store 1/0 at loc 3
elif opcode == 7:
if params[0] < params[1]:
data[data[i+3]] = 1
else:
data[data[i+3]] = 0
i += 4
# If the opcode is 8, carry out equality comparison and store 1/0 at loc 3
elif opcode == 8:
if params[0] == params[1]:
data[data[i+3]] = 1
else:
data[data[i+3]] = 0
i += 4
# If the opcode is 99, halt the intcode
elif opcode == 99:
print("Program ended by halt code")
break
# If opcode is anything else something has gone wrong!
else:
print("Problem with the Program")
break
return data, answer
program = read_data("day5input.txt")
#print(program)
result1, answer1 = run_intcode(program, 1)
#print(result1)
print("Part 1: Answer is: ", answer1)
result2, answer2 = run_intcode(program, 5)
#print(result2)
print("Part 2: Answer is: ", answer2)
#test_program = [1002,4,3,4,33]
#test_program2 = [3,0,4,0,99]
#test_program3 = [1101,100,-1,4,0]
#test_program4 = [3,9,8,9,10,9,4,9,99,-1,8] # 1 if input = 8, 0 otherwise
#test_program5 = [3,9,7,9,10,9,4,9,99,-1,8] # 1 if input < 8, 0 otherwise
#test_program6 = [3,3,1108,-1,8,3,4,3,99] # 1 if input = 8, 0 otherwise
#test_program7 = [3,3,1107,-1,8,3,4,3,99] # 1 if input < 8, 0 otherwise
#test_program8 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9] # 0 if input = 0, 1 otherwise
#test_program9 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1] # 0 if input = 0, 1 otherwise
#test_program10 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,
#36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,
#1105,1,46,98,99] # 999 if input < 8, 1000 if input = 8, 1001 if input > 8
| """
Created on Mon Dec 2 11:06:59 2019
@author: Paul
"""
def read_data(filename):
"""
Reads csv file into a list, and converts to ints
"""
data = []
f = open(filename, 'r')
for line in f:
data += line.strip('\n').split(',')
int_data = [int(i) for i in data]
f.close()
return int_data
def run_intcode(program, input_int):
"""
Takes data, list of ints to run int_code on.
Returns list of ints after intcode program has been run.
Running Intcode program looks reads in the integers sequentially in sets of 4:
data[i] == Parameter Mode + Opcode (last two digits)
data[i+1] == Entry 1
data[i+2] == Entry 2
data[i+3] == Entry 3
If Opcode == 1, the value of the opcode at index location = entry 1 and 2
in the program are summed and stored at the index location of entry 3.
If Opcode == 2, the value of the opcode at index location = entry 1 and 2
in the program are multiplied and stored at the index location of entry 3.
If Opcode == 3, the the single integer (input) is saved to the position given
by index 1.
If Opcode == 4, the program outputs the value of its only parameter. E.g. 4,50
would output the value at address 50.
If Opcode == 5 and entry 1 is != 0, the intcode position moves to the index stored
at entry 2. Otherwise it does nothing.
If Opcode == 6 and entry 1 is 0, the intcode postion moves to the index stored
at entry 2. Otherwise it does nothing.
If Opcode == 7 and entry 1> entry 2, store 1 in position given by third param,
otherwise store 0 at position given by third param.
If Opcode == 7 and entry 1 = entry 2, store 1 in position given by third param,
otherwise store 0 at position given by third param.
If Opcode == 99, the program is completed and will stop running.
Parameters are digits to the left of the opcode, read left to right:
Parameter 0 -> Position mode - the entry is treated as an index location
Parameter 1 -> Immediate mode - the entry is treated as a value
"""
data = program[:]
answer = -1
params = [0, 0, 0]
param_modes = ['', '', '']
i = 0
while i < len(program):
opcode_str = '{:0>5d}'.format(data[i])
opcode = int(opcode_str[3:])
param_modes[0] = opcode_str[2]
param_modes[1] = opcode_str[1]
param_modes[2] = opcode_str[0]
for j in range(2):
if param_modes[j] == '0':
try:
params[j] = data[data[i + j + 1]]
except IndexError:
continue
else:
try:
params[j] = data[i + j + 1]
except IndexError:
continue
if opcode == 1:
data[data[i + 3]] = params[0] + params[1]
i += 4
elif opcode == 2:
data[data[i + 3]] = params[0] * params[1]
i += 4
elif opcode == 3:
data[data[i + 1]] = input_int
i += 2
elif opcode == 4:
answer = data[data[i + 1]]
print('Program output: ', data[data[i + 1]])
i += 2
elif opcode == 5:
if params[0] != 0:
i = params[1]
else:
i += 3
elif opcode == 6:
if params[0] == 0:
i = params[1]
else:
i += 3
elif opcode == 7:
if params[0] < params[1]:
data[data[i + 3]] = 1
else:
data[data[i + 3]] = 0
i += 4
elif opcode == 8:
if params[0] == params[1]:
data[data[i + 3]] = 1
else:
data[data[i + 3]] = 0
i += 4
elif opcode == 99:
print('Program ended by halt code')
break
else:
print('Problem with the Program')
break
return (data, answer)
program = read_data('day5input.txt')
(result1, answer1) = run_intcode(program, 1)
print('Part 1: Answer is: ', answer1)
(result2, answer2) = run_intcode(program, 5)
print('Part 2: Answer is: ', answer2) |
class Rectangle:
"""A rectangle shape that can be drawn on a Canvas object"""
def __init__(self, x, y, width, height, color):
self.x = x
self.y = y
self.width = width
self.height = height
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
# Changes a slice of the array with new values
canvas.data[self.x: self.x + self.height, self.y: self.y + self.width] = self.color
class Square:
"""A square shape that can be drawn on a Canvas object"""
def __init__(self, x, y, side, color):
self.x = x
self.y = y
self.side = side
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
# Changes a slice of the array with new values
canvas.data[self.x: self.x + self.side, self.y: self.y + self.side] = self.color
| class Rectangle:
"""A rectangle shape that can be drawn on a Canvas object"""
def __init__(self, x, y, width, height, color):
self.x = x
self.y = y
self.width = width
self.height = height
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
canvas.data[self.x:self.x + self.height, self.y:self.y + self.width] = self.color
class Square:
"""A square shape that can be drawn on a Canvas object"""
def __init__(self, x, y, side, color):
self.x = x
self.y = y
self.side = side
self.color = color
def draw(self, canvas):
"""Draws itself into the Canvas object"""
canvas.data[self.x:self.x + self.side, self.y:self.y + self.side] = self.color |
print(b)
print(c)
print(d)
print(e)
print(f)
print(g) | print(b)
print(c)
print(d)
print(e)
print(f)
print(g) |
print("hiiiiiiiiiiiiiiiix")
def sayhi():
print("2nd pkg said hi")
| print('hiiiiiiiiiiiiiiiix')
def sayhi():
print('2nd pkg said hi') |
base = int(input('Digite o valor da base: '))
expoente = 0
while expoente <= 0:
expoente = int(input('Digite o valor do expoente: '))
if expoente <= 0:
print('O expoente tem que ser positivo')
potencia = 1
for c in range(1, expoente + 1):
potencia *= base
print(f'{base}^ {expoente} = {potencia}')
| base = int(input('Digite o valor da base: '))
expoente = 0
while expoente <= 0:
expoente = int(input('Digite o valor do expoente: '))
if expoente <= 0:
print('O expoente tem que ser positivo')
potencia = 1
for c in range(1, expoente + 1):
potencia *= base
print(f'{base}^ {expoente} = {potencia}') |
# 084
# Ask the user to type in their postcode.Display the first two
# letters in uppercase.
# very simple
print(input('Enter your postcode: ')[0:2].upper()) | print(input('Enter your postcode: ')[0:2].upper()) |
test = """forward 5
down 5
forward 8
up 3
down 8
forward 2
"""
def part1(lines):
h = 0
d = 0
for line in lines:
direction, delta = line.split()
delta = int(delta)
if direction == 'forward':
h += delta
elif direction == 'down':
d += delta
elif direction == 'up':
d -= delta
print(h*d)
def part2(lines):
h = 0
d = 0
a = 0
for line in lines:
direction, delta = line.split()
delta = int(delta)
print(direction, delta)
if direction == 'forward':
h += delta
d += (delta * a)
elif direction == 'down':
a += delta
elif direction == 'up':
a -= delta
print(h*d)
if __name__ == '__main__':
part1(test.splitlines())
part1(open('in02.txt').readlines())
part2(test.splitlines())
part2(open('in02.txt').readlines())
| test = 'forward 5\ndown 5\nforward 8\nup 3\ndown 8\nforward 2\n'
def part1(lines):
h = 0
d = 0
for line in lines:
(direction, delta) = line.split()
delta = int(delta)
if direction == 'forward':
h += delta
elif direction == 'down':
d += delta
elif direction == 'up':
d -= delta
print(h * d)
def part2(lines):
h = 0
d = 0
a = 0
for line in lines:
(direction, delta) = line.split()
delta = int(delta)
print(direction, delta)
if direction == 'forward':
h += delta
d += delta * a
elif direction == 'down':
a += delta
elif direction == 'up':
a -= delta
print(h * d)
if __name__ == '__main__':
part1(test.splitlines())
part1(open('in02.txt').readlines())
part2(test.splitlines())
part2(open('in02.txt').readlines()) |
MANDRILL_API_KEY = 'MANDRILL_API_KEY'
UNSET_MANDRILL_API_KEY_MSG = f"Mandrill API key not set in environment variable {MANDRILL_API_KEY}"
CONTACT_LIST_QUERY = """
SELECT *
FROM `{{project}}.{{dataset}}.{{contact_table}}`
"""
EHR_OPERATIONS = 'EHR Ops'
EHR_OPS_ZENDESK = '[email protected]'
DATA_CURATION_LISTSERV = '[email protected]'
NO_REPLY_ADDRESS = '[email protected]'
NO_DATA_STEWARD = 'no data steward'
# HPO contact list table columns
SITE_NAME = 'site_name'
HPO_ID = 'hpo_id'
SITE_POINT_OF_CONTACT = 'site_point_of_contact'
# Mandrill API constants
MAIL_TO = 'mail_to'
EHR_OPS_SITE_URL = 'https://sites.google.com/view/ehrupload'
# Email content
EMAIL_BODY = """
<p style="font-size:115%;">Hi {{ site_name }},</p>
<p style="font-size:115%;">Your submission <b>{{ folder }}</b>
{% if submission_error %}was NOT successfully loaded on {{ timestamp }}.<br>
{% else %}was successfully loaded on {{ timestamp }}.<br>
{% endif %}
Please review the <code>results.html</code> submission report attached to this email{% if submission_error %}<br>
and resolve the errors before making a new submission{% endif %}.<br>
If any of your files have not been successfully uploaded, please run the
<a href="https://github.com/all-of-us/aou-ehr-file-check">local file check</a> before making your submission.<br>
To view the full set of curation reports, please visit the submission folder in your
GCS bucket <a href="{{ submission_folder_url }}">here</a>.<br>
For more information on the reports and how to download them, please refer to our
<a href="{{ ehr_ops_site_url }}">EHR Ops website</a>.</p>
<p style="font-size:115%;">You are receiving this email because you are listed as a point of contact
for HPO Site <em>{{ site_name }}</em>.<br>
If you have additional questions or wish to no longer receive these emails, please reply/send an
email to <a href="mailto:{{ eo_zendesk }}">{{ eo_zendesk }}</a>.</p>
<p style="font-size:115%;">EHR Ops team, DRC<br>
<em>All of Us</em> Research Program<br>
<img src="cid:{{ aou_logo }}"/></p>
"""
AOU_LOGO = 'aou_logo'
AOU_LOGO_PNG = 'all-of-us-logo.png'
| mandrill_api_key = 'MANDRILL_API_KEY'
unset_mandrill_api_key_msg = f'Mandrill API key not set in environment variable {MANDRILL_API_KEY}'
contact_list_query = '\nSELECT *\nFROM `{{project}}.{{dataset}}.{{contact_table}}`\n'
ehr_operations = 'EHR Ops'
ehr_ops_zendesk = '[email protected]'
data_curation_listserv = '[email protected]'
no_reply_address = '[email protected]'
no_data_steward = 'no data steward'
site_name = 'site_name'
hpo_id = 'hpo_id'
site_point_of_contact = 'site_point_of_contact'
mail_to = 'mail_to'
ehr_ops_site_url = 'https://sites.google.com/view/ehrupload'
email_body = '\n<p style="font-size:115%;">Hi {{ site_name }},</p>\n\n<p style="font-size:115%;">Your submission <b>{{ folder }}</b> \n{% if submission_error %}was NOT successfully loaded on {{ timestamp }}.<br>\n{% else %}was successfully loaded on {{ timestamp }}.<br>\n{% endif %}\nPlease review the <code>results.html</code> submission report attached to this email{% if submission_error %}<br>\nand resolve the errors before making a new submission{% endif %}.<br>\nIf any of your files have not been successfully uploaded, please run the\n <a href="https://github.com/all-of-us/aou-ehr-file-check">local file check</a> before making your submission.<br> \nTo view the full set of curation reports, please visit the submission folder in your\n GCS bucket <a href="{{ submission_folder_url }}">here</a>.<br>\nFor more information on the reports and how to download them, please refer to our\n <a href="{{ ehr_ops_site_url }}">EHR Ops website</a>.</p>\n\n<p style="font-size:115%;">You are receiving this email because you are listed as a point of contact\n for HPO Site <em>{{ site_name }}</em>.<br>\nIf you have additional questions or wish to no longer receive these emails, please reply/send an\n email to <a href="mailto:{{ eo_zendesk }}">{{ eo_zendesk }}</a>.</p>\n\n<p style="font-size:115%;">EHR Ops team, DRC<br>\n<em>All of Us</em> Research Program<br>\n<img src="cid:{{ aou_logo }}"/></p>\n'
aou_logo = 'aou_logo'
aou_logo_png = 'all-of-us-logo.png' |
MAP = 1
SPEED = 1.5
VELOCITYRESET = 6
WIDTH = 1280
HEIGHT = 720
X = WIDTH / 2 - 50
Y = HEIGHT / 2 - 50
MOUSER = 325
TICKRATES = 120
nfc = False
raspberry = False | map = 1
speed = 1.5
velocityreset = 6
width = 1280
height = 720
x = WIDTH / 2 - 50
y = HEIGHT / 2 - 50
mouser = 325
tickrates = 120
nfc = False
raspberry = False |
def main():
n = 111
gen = (n * 7 for x in range(10))
if 777 in gen:
print("Yes!")
if __name__ == '__main__':
main()
| def main():
n = 111
gen = (n * 7 for x in range(10))
if 777 in gen:
print('Yes!')
if __name__ == '__main__':
main() |
class BaseStorageManager(object):
def __init__(self, adpter):
self.adapter = adpter
def put(self, options):
try:
return self.adapter.put(options)
except Exception:
raise Exception('Failed to write data to storage')
def get(self, options):
try:
data = self.adapter.get(options)
return data
except Exception as e:
raise Exception('Failed to read data from storage' + str(e))
def list(self, options):
try:
return self.adapter.list(options)
except Exception:
raise Exception('Failed to list storage data')
def listPrefix(self, options):
try:
return self.adapter.listPrefix(options)
except Exception:
raise Exception('Failed to listPrefix storage data')
def delete(self, options):
try:
self.adapter.delete(options)
except Exception:
raise Exception('Failed to delete storage data')
| class Basestoragemanager(object):
def __init__(self, adpter):
self.adapter = adpter
def put(self, options):
try:
return self.adapter.put(options)
except Exception:
raise exception('Failed to write data to storage')
def get(self, options):
try:
data = self.adapter.get(options)
return data
except Exception as e:
raise exception('Failed to read data from storage' + str(e))
def list(self, options):
try:
return self.adapter.list(options)
except Exception:
raise exception('Failed to list storage data')
def list_prefix(self, options):
try:
return self.adapter.listPrefix(options)
except Exception:
raise exception('Failed to listPrefix storage data')
def delete(self, options):
try:
self.adapter.delete(options)
except Exception:
raise exception('Failed to delete storage data') |
def keychain_value_iter(d, key_chain=None, allowed_values=None):
key_chain = [] if key_chain is None else list(key_chain).copy()
if not isinstance(d, dict):
if allowed_values is not None:
assert isinstance(d, allowed_values), 'Value needs to be of type {}!'.format(
allowed_values)
yield key_chain, d
else:
for k, v in d.items():
yield from keychain_value_iter(
v,
key_chain + [k],
allowed_values=allowed_values) | def keychain_value_iter(d, key_chain=None, allowed_values=None):
key_chain = [] if key_chain is None else list(key_chain).copy()
if not isinstance(d, dict):
if allowed_values is not None:
assert isinstance(d, allowed_values), 'Value needs to be of type {}!'.format(allowed_values)
yield (key_chain, d)
else:
for (k, v) in d.items():
yield from keychain_value_iter(v, key_chain + [k], allowed_values=allowed_values) |
#!/usr/bin/python
# -*- coding: utf-8 -*-
def getstatus(code):
if code == "1000":
value = "Success!"
elif code == "1001":
value = "Unknown Message Received"
elif code == "1002":
value = "Connection to Fishbowl Server was lost"
elif code == "1003":
value = "Some Requests had errors -- now isn't that helpful..."
elif code == "1004":
value = "There was an error with the database."
elif code == "1009":
value = "Fishbowl Server has been shut down."
elif code == "1010":
value = "You have been logged off the server by an administrator."
elif code == "1012":
value = "Unknown request function."
elif code == "1100":
value = "Unknown login error occurred."
elif code == "1110":
value = "A new Integrated Application has been added to Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator to approve this Integrated Application."
elif code == "1111":
value = "This Integrated Application registration key does not match."
elif code == "1112":
value = "This Integrated Application has not been approved by the Fishbowl Inventory Administrator."
elif code == "1120":
value = "Invalid Username or Password."
elif code == "1130":
value = "Invalid Ticket passed to Fishbowl Inventory Server."
elif code == "1131":
value = "Invalid Key value."
elif code == "1140":
value = "Initialization token is not correct type."
elif code == "1150":
value = "Request was invalid"
elif code == "1160":
value = "Response was invalid."
elif code == "1162":
value = "The login limit has been reached for the server's key."
elif code == "1200":
value = "Custom Field is invalid."
elif code == "1500":
value = "The import was not properly formed."
elif code == "1501":
value = "That import type is not supported"
elif code == "1502":
value = "File not found."
elif code == "1503":
value = "That export type is not supported."
elif code == "1504":
value = "File could not be written to."
elif code == "1505":
value = "The import data was of the wrong type."
elif code == "2000":
value = "Was not able to find the Part {0}."
elif code == "2001":
value = "The part was invalid."
elif code == "2100":
value = "Was not able to find the Product {0}."
elif code == "2101":
value = "The product was invalid."
elif code == "2200":
value = "The yield failed."
elif code == "2201":
value = "Commit failed."
elif code == "2202":
value = "Add initial inventory failed."
elif code == "2203":
value = "Can not adjust committed inventory."
elif code == "2300":
value = "Was not able to find the Tag number {0}."
elif code == "2301":
value = "The tag is invalid."
elif code == "2302":
value = "The tag move failed."
elif code == "2303":
value = "Was not able to save Tag number {0}."
elif code == "2304":
value = "Not enough available inventory in Tagnumber {0}."
elif code == "2305":
value = "Tag number {0} is a location."
elif code == "2400":
value = "Invalid UOM."
elif code == "2401":
value = "UOM {0} not found."
elif code == "2402":
value = "Integer UOM {0} cannot have non-integer quantity."
elif code == "2500":
value = "The Tracking is not valid."
elif code == "2510":
value = "Serial number is missing."
elif code == "2511":
value = "Serial number is null."
elif code == "2512":
value = "Serial number is duplicate."
elif code == "2513":
value = "Serial number is not valid."
elif code == "2600":
value = "Location not found."
elif code == "2601":
value = "Invalid location."
elif code == "2602":
value = "Location Group {0} not found."
elif code == "3000":
value = "Customer {0} not found."
elif code == "3001":
value = "Customer is invalid."
elif code == "3100":
value = "Vendor {0} not found."
elif code == "3101":
value = "Vendor is invalid."
elif code == "4000":
value = "There was an error load PO {0}."
elif code == "4001":
value = "Unknow status {0}."
elif code == "4002":
value = "Unknown carrier {0}."
elif code == "4003":
value = "Unknown QuickBooks class {0}."
elif code == "4004":
value = "PO does not have a PO number. Please turn on the auto-assign PO number option in the purchase order module options."
else:
value = 'Unknown status'
return value
| def getstatus(code):
if code == '1000':
value = 'Success!'
elif code == '1001':
value = 'Unknown Message Received'
elif code == '1002':
value = 'Connection to Fishbowl Server was lost'
elif code == '1003':
value = "Some Requests had errors -- now isn't that helpful..."
elif code == '1004':
value = 'There was an error with the database.'
elif code == '1009':
value = 'Fishbowl Server has been shut down.'
elif code == '1010':
value = 'You have been logged off the server by an administrator.'
elif code == '1012':
value = 'Unknown request function.'
elif code == '1100':
value = 'Unknown login error occurred.'
elif code == '1110':
value = 'A new Integrated Application has been added to Fishbowl Inventory. Please contact your Fishbowl Inventory Administrator to approve this Integrated Application.'
elif code == '1111':
value = 'This Integrated Application registration key does not match.'
elif code == '1112':
value = 'This Integrated Application has not been approved by the Fishbowl Inventory Administrator.'
elif code == '1120':
value = 'Invalid Username or Password.'
elif code == '1130':
value = 'Invalid Ticket passed to Fishbowl Inventory Server.'
elif code == '1131':
value = 'Invalid Key value.'
elif code == '1140':
value = 'Initialization token is not correct type.'
elif code == '1150':
value = 'Request was invalid'
elif code == '1160':
value = 'Response was invalid.'
elif code == '1162':
value = "The login limit has been reached for the server's key."
elif code == '1200':
value = 'Custom Field is invalid.'
elif code == '1500':
value = 'The import was not properly formed.'
elif code == '1501':
value = 'That import type is not supported'
elif code == '1502':
value = 'File not found.'
elif code == '1503':
value = 'That export type is not supported.'
elif code == '1504':
value = 'File could not be written to.'
elif code == '1505':
value = 'The import data was of the wrong type.'
elif code == '2000':
value = 'Was not able to find the Part {0}.'
elif code == '2001':
value = 'The part was invalid.'
elif code == '2100':
value = 'Was not able to find the Product {0}.'
elif code == '2101':
value = 'The product was invalid.'
elif code == '2200':
value = 'The yield failed.'
elif code == '2201':
value = 'Commit failed.'
elif code == '2202':
value = 'Add initial inventory failed.'
elif code == '2203':
value = 'Can not adjust committed inventory.'
elif code == '2300':
value = 'Was not able to find the Tag number {0}.'
elif code == '2301':
value = 'The tag is invalid.'
elif code == '2302':
value = 'The tag move failed.'
elif code == '2303':
value = 'Was not able to save Tag number {0}.'
elif code == '2304':
value = 'Not enough available inventory in Tagnumber {0}.'
elif code == '2305':
value = 'Tag number {0} is a location.'
elif code == '2400':
value = 'Invalid UOM.'
elif code == '2401':
value = 'UOM {0} not found.'
elif code == '2402':
value = 'Integer UOM {0} cannot have non-integer quantity.'
elif code == '2500':
value = 'The Tracking is not valid.'
elif code == '2510':
value = 'Serial number is missing.'
elif code == '2511':
value = 'Serial number is null.'
elif code == '2512':
value = 'Serial number is duplicate.'
elif code == '2513':
value = 'Serial number is not valid.'
elif code == '2600':
value = 'Location not found.'
elif code == '2601':
value = 'Invalid location.'
elif code == '2602':
value = 'Location Group {0} not found.'
elif code == '3000':
value = 'Customer {0} not found.'
elif code == '3001':
value = 'Customer is invalid.'
elif code == '3100':
value = 'Vendor {0} not found.'
elif code == '3101':
value = 'Vendor is invalid.'
elif code == '4000':
value = 'There was an error load PO {0}.'
elif code == '4001':
value = 'Unknow status {0}.'
elif code == '4002':
value = 'Unknown carrier {0}.'
elif code == '4003':
value = 'Unknown QuickBooks class {0}.'
elif code == '4004':
value = 'PO does not have a PO number. Please turn on the auto-assign PO number option in the purchase order module options.'
else:
value = 'Unknown status'
return value |
# -*- coding: utf-8 -*-
"""This sub module provides a global variable to check for checking if the non-interactive argument was set
Exported variable:
interactive -- False, if the main the non-interactive argument was set, True, if it was not set
"""
global interactive
interactive = True; | """This sub module provides a global variable to check for checking if the non-interactive argument was set
Exported variable:
interactive -- False, if the main the non-interactive argument was set, True, if it was not set
"""
global interactive
interactive = True |
ans = dict()
pairs = dict()
def create_tree(p):
if p in ans:
return ans[p]
else:
try:
res = 0
if p in pairs:
for ch in pairs[p]:
res += create_tree(ch) + 1
ans[p] = res
return res
except:
pass
n = int(input())
for i in range(0, n-1):
child, parent = input().split()
if parent in pairs:
pairs[parent].append(child)
else:
pairs[parent] = [child]
if n > 0:
for k in pairs:
create_tree(k)
for key in sorted(ans.keys()):
print(key, ans[key]) | ans = dict()
pairs = dict()
def create_tree(p):
if p in ans:
return ans[p]
else:
try:
res = 0
if p in pairs:
for ch in pairs[p]:
res += create_tree(ch) + 1
ans[p] = res
return res
except:
pass
n = int(input())
for i in range(0, n - 1):
(child, parent) = input().split()
if parent in pairs:
pairs[parent].append(child)
else:
pairs[parent] = [child]
if n > 0:
for k in pairs:
create_tree(k)
for key in sorted(ans.keys()):
print(key, ans[key]) |
def italicize(s):
b = False
res = ''
for e in s:
if e == '"':
if b:
res += '{\\i}' + e
else:
res += e + '{i}'
b=not b
else:
res += e
return res
def main():
F=open('test_in.txt','r')
X=F.read()
F.close()
print(italicize(X))
return
if __name__ == "__main__":
main()
| def italicize(s):
b = False
res = ''
for e in s:
if e == '"':
if b:
res += '{\\i}' + e
else:
res += e + '{i}'
b = not b
else:
res += e
return res
def main():
f = open('test_in.txt', 'r')
x = F.read()
F.close()
print(italicize(X))
return
if __name__ == '__main__':
main() |
#!venv/bin/python3
cs = [int(c) for c in open("inputs/23.in", "r").readline().strip()]
def f(cs, ts):
p,cc = {n: cs[(i+1)%len(cs)] for i,n in enumerate(cs)},cs[-1]
for _ in range(ts):
cc,dc = p[cc],p[cc]-1 if p[cc]-1 > 0 else max(p.keys())
hc,p[cc] = [p[cc], p[p[cc]], p[p[p[cc]]]],p[p[p[p[cc]]]]
while dc in hc:
dc -= 1
if dc < 1:
dc = max(p.keys())
p[dc],p[hc[-1]] = hc[0],p[dc]
a,n = [],1
for _ in range(8):
n = p[n]
a.append(str(n))
return "".join(a), p[1] * p[p[1]]
print("Part 1:", f(cs.copy(), 100)[0])
print("Part 2:", f(cs.copy() + [i for i in range(10, 1000001)], 10000000)[1]) | cs = [int(c) for c in open('inputs/23.in', 'r').readline().strip()]
def f(cs, ts):
(p, cc) = ({n: cs[(i + 1) % len(cs)] for (i, n) in enumerate(cs)}, cs[-1])
for _ in range(ts):
(cc, dc) = (p[cc], p[cc] - 1 if p[cc] - 1 > 0 else max(p.keys()))
(hc, p[cc]) = ([p[cc], p[p[cc]], p[p[p[cc]]]], p[p[p[p[cc]]]])
while dc in hc:
dc -= 1
if dc < 1:
dc = max(p.keys())
(p[dc], p[hc[-1]]) = (hc[0], p[dc])
(a, n) = ([], 1)
for _ in range(8):
n = p[n]
a.append(str(n))
return (''.join(a), p[1] * p[p[1]])
print('Part 1:', f(cs.copy(), 100)[0])
print('Part 2:', f(cs.copy() + [i for i in range(10, 1000001)], 10000000)[1]) |
entrada = input("palabra")
listaDeLetras = []
for i in entrada:
listaDeLetras.append(i)
| entrada = input('palabra')
lista_de_letras = []
for i in entrada:
listaDeLetras.append(i) |
class Machine():
def __init__(self):
self.pointer = 0
self.accum = 0
self.visited = []
def run(self,program):
salir = False
while (salir == False):
if (self.pointer in self.visited):
return False
if (self.pointer >= len(program)):
return True
self.visited.append(self.pointer)
incremento = 1
if (program[self.pointer][0] == "acc"):
self.accum += program[self.pointer][1]
if (program[self.pointer][0] == "jmp"):
incremento = program[self.pointer][1]
self.pointer += incremento
return True
def getVisited(self):
return self.visited
def getAccum(self):
return self.accum
| class Machine:
def __init__(self):
self.pointer = 0
self.accum = 0
self.visited = []
def run(self, program):
salir = False
while salir == False:
if self.pointer in self.visited:
return False
if self.pointer >= len(program):
return True
self.visited.append(self.pointer)
incremento = 1
if program[self.pointer][0] == 'acc':
self.accum += program[self.pointer][1]
if program[self.pointer][0] == 'jmp':
incremento = program[self.pointer][1]
self.pointer += incremento
return True
def get_visited(self):
return self.visited
def get_accum(self):
return self.accum |
"""
Round a number
--------------
Input (float) A floating point number
(int) Number of decimals
Default value is: 0
Output (float) Rounded number
(int) Whether using the default decimals value, the return number
will be the nearest integer
"""
number = 103.14159
# Rounding with 2 decimals
number_rounded = round(number, 2)
print('Rounding with 2 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
# Rounding with -2 decimals
number_rounded = round(number, -2)
print('\nRounding with -2 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
# Rounding with 0 decimals
number_rounded = round(number, 0)
print('\nRounding with 0 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
# Rounding with default
# Result will be integer (!)
number_rounded = round(number)
print('\nRounding with default')
print('original number: {}, rounded: {}, type of rounded: {}'
.format(number, number_rounded, type(number_rounded)))
| """
Round a number
--------------
Input (float) A floating point number
(int) Number of decimals
Default value is: 0
Output (float) Rounded number
(int) Whether using the default decimals value, the return number
will be the nearest integer
"""
number = 103.14159
number_rounded = round(number, 2)
print('Rounding with 2 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'.format(number, number_rounded, type(number_rounded)))
number_rounded = round(number, -2)
print('\nRounding with -2 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'.format(number, number_rounded, type(number_rounded)))
number_rounded = round(number, 0)
print('\nRounding with 0 decimals')
print('original number: {}, rounded: {}, type of rounded: {}'.format(number, number_rounded, type(number_rounded)))
number_rounded = round(number)
print('\nRounding with default')
print('original number: {}, rounded: {}, type of rounded: {}'.format(number, number_rounded, type(number_rounded))) |
class PayabbhiError(Exception):
def __init__(self, description=None, http_status=None,
field=None):
self.description = description
self.http_status = http_status
self.field = field
self._message = self.error_message()
super(PayabbhiError, self).__init__(self._message)
def error_message(self):
msg = "message: " + self.description
msg = (msg + ", http_code: " + str(self.http_status)) if self.http_status else msg
msg = (msg + ", field: " + self.field) if self.field else msg
return msg + "\n"
class APIError(PayabbhiError):
pass
class APIConnectionError(PayabbhiError):
pass
class AuthenticationError(PayabbhiError):
pass
class InvalidRequestError(PayabbhiError):
pass
class GatewayError(PayabbhiError):
pass
class SignatureVerificationError(PayabbhiError):
pass
| class Payabbhierror(Exception):
def __init__(self, description=None, http_status=None, field=None):
self.description = description
self.http_status = http_status
self.field = field
self._message = self.error_message()
super(PayabbhiError, self).__init__(self._message)
def error_message(self):
msg = 'message: ' + self.description
msg = msg + ', http_code: ' + str(self.http_status) if self.http_status else msg
msg = msg + ', field: ' + self.field if self.field else msg
return msg + '\n'
class Apierror(PayabbhiError):
pass
class Apiconnectionerror(PayabbhiError):
pass
class Authenticationerror(PayabbhiError):
pass
class Invalidrequesterror(PayabbhiError):
pass
class Gatewayerror(PayabbhiError):
pass
class Signatureverificationerror(PayabbhiError):
pass |
class OrderedStream:
def __init__(self, n: int):
self.data = [None]*n
self.ptr = 0
def insert(self, id: int, value: str) -> List[str]:
id -= 1
self.data[id] = value
if id > self.ptr: return []
while self.ptr < len(self.data) and self.data[self.ptr]: self.ptr += 1
return self.data[id:self.ptr]
# Your OrderedStream object will be instantiated and called as such:
# obj = OrderedStream(n)
# param_1 = obj.insert(id,value)
| class Orderedstream:
def __init__(self, n: int):
self.data = [None] * n
self.ptr = 0
def insert(self, id: int, value: str) -> List[str]:
id -= 1
self.data[id] = value
if id > self.ptr:
return []
while self.ptr < len(self.data) and self.data[self.ptr]:
self.ptr += 1
return self.data[id:self.ptr] |
"""
The constants used in FLV files and their meanings.
"""
# Tag type
(TAG_TYPE_AUDIO, TAG_TYPE_VIDEO, TAG_TYPE_SCRIPT) = (8, 9, 18)
# Sound format
(SOUND_FORMAT_PCM_PLATFORM_ENDIAN,
SOUND_FORMAT_ADPCM,
SOUND_FORMAT_MP3,
SOUND_FORMAT_PCM_LITTLE_ENDIAN,
SOUND_FORMAT_NELLYMOSER_16KHZ,
SOUND_FORMAT_NELLYMOSER_8KHZ,
SOUND_FORMAT_NELLYMOSER,
SOUND_FORMAT_G711_A_LAW,
SOUND_FORMAT_G711_MU_LAW) = range(9)
(SOUND_FORMAT_AAC,
SOUND_FORMAT_SPEEX) = range(10, 12)
(SOUND_FORMAT_MP3_8KHZ,
SOUND_FORMAT_DEVICE_SPECIFIC) = range(14, 16)
sound_format_to_string = {
SOUND_FORMAT_PCM_PLATFORM_ENDIAN: "Linear PCM, platform endian",
SOUND_FORMAT_ADPCM: "ADPCM",
SOUND_FORMAT_MP3: "MP3",
SOUND_FORMAT_PCM_LITTLE_ENDIAN: "Linear PCM, little endian",
SOUND_FORMAT_NELLYMOSER_16KHZ: "Nellymoser 16-kHz mono",
SOUND_FORMAT_NELLYMOSER_8KHZ: "Nellymoser 8-kHz mono",
SOUND_FORMAT_NELLYMOSER: "Nellymoser",
SOUND_FORMAT_G711_A_LAW: "G.711 A-law logarithmic PCM",
SOUND_FORMAT_G711_MU_LAW: "G.711 mu-law logarithmic PCM",
SOUND_FORMAT_AAC: "AAC",
SOUND_FORMAT_SPEEX: "Speex",
SOUND_FORMAT_MP3_8KHZ: "MP3 8-kHz",
SOUND_FORMAT_DEVICE_SPECIFIC: "Device-specific sound"
}
# Sound rate
(SOUND_RATE_5_5_KHZ,
SOUND_RATE_11_KHZ,
SOUND_RATE_22_KHZ,
SOUND_RATE_44_KHZ) = range(4)
sound_rate_to_string = {
SOUND_RATE_5_5_KHZ: "5.5-kHz",
SOUND_RATE_11_KHZ: "11-kHz",
SOUND_RATE_22_KHZ: "22-kHz",
SOUND_RATE_44_KHZ: "44-kHz"
}
# Sound size
(SOUND_SIZE_8_BIT, SOUND_SIZE_16_BIT) = range(2)
sound_size_to_string = {
SOUND_SIZE_8_BIT: "snd8Bit",
SOUND_SIZE_16_BIT: "snd16Bit"
}
# Sound type
(SOUND_TYPE_MONO, SOUND_TYPE_STEREO) = range(2)
sound_type_to_string = {
SOUND_TYPE_MONO: "sndMono",
SOUND_TYPE_STEREO: "sndStereo"
}
# AAC packet type
(AAC_PACKET_TYPE_SEQUENCE_HEADER,
AAC_PACKET_TYPE_RAW) = range(2)
aac_packet_type_to_string = {
AAC_PACKET_TYPE_SEQUENCE_HEADER: "sequence header",
AAC_PACKET_TYPE_RAW: "raw"
}
# Codec ID
(CODEC_ID_JPEG,
CODEC_ID_H263,
CODEC_ID_SCREEN_VIDEO,
CODEC_ID_VP6,
CODEC_ID_VP6_WITH_ALPHA,
CODEC_ID_SCREEN_VIDEO_V2,
CODEC_ID_H264) = range(1, 8)
codec_id_to_string = {
CODEC_ID_JPEG: "JPEG",
CODEC_ID_H263: "Sorenson H.263",
CODEC_ID_SCREEN_VIDEO: "Screen video",
CODEC_ID_VP6: "On2 VP6",
CODEC_ID_VP6_WITH_ALPHA: "On2 VP6 with alpha channel",
CODEC_ID_SCREEN_VIDEO_V2: "Screen video version 2",
CODEC_ID_H264: "H.264"
}
# Frame type
(FRAME_TYPE_KEYFRAME,
FRAME_TYPE_INTERFRAME,
FRAME_TYPE_DISPOSABLE_INTERFRAME,
FRAME_TYPE_GENERATED_KEYFRAME,
FRAME_TYPE_INFO_FRAME) = range(1, 6)
frame_type_to_string = {
FRAME_TYPE_KEYFRAME: "keyframe",
FRAME_TYPE_INTERFRAME: "interframe",
FRAME_TYPE_DISPOSABLE_INTERFRAME: "disposable interframe",
FRAME_TYPE_GENERATED_KEYFRAME: "generated keyframe",
FRAME_TYPE_INFO_FRAME: "video info/command frame"
}
# H.264 packet type
(H264_PACKET_TYPE_SEQUENCE_HEADER,
H264_PACKET_TYPE_NALU,
H264_PACKET_TYPE_END_OF_SEQUENCE) = range(3)
h264_packet_type_to_string = {
H264_PACKET_TYPE_SEQUENCE_HEADER: "sequence header",
H264_PACKET_TYPE_NALU: "NAL unit",
H264_PACKET_TYPE_END_OF_SEQUENCE: "sequence end"
}
# Value type
(VALUE_TYPE_NUMBER,
VALUE_TYPE_BOOLEAN,
VALUE_TYPE_STRING,
VALUE_TYPE_OBJECT,
VALUE_TYPE_MOVIECLIP,
VALUE_TYPE_NULL,
VALUE_TYPE_UNDEFINED,
VALUE_TYPE_REFERENCE,
VALUE_TYPE_ECMA_ARRAY) = range(9)
(VALUE_TYPE_STRICT_ARRAY,
VALUE_TYPE_DATE,
VALUE_TYPE_LONGSTRING) = range(10, 13)
value_type_to_string = {
VALUE_TYPE_NUMBER: 'Number',
VALUE_TYPE_BOOLEAN: 'Boolean',
VALUE_TYPE_STRING: 'String',
VALUE_TYPE_OBJECT: 'Object',
VALUE_TYPE_MOVIECLIP: 'MovieClip',
VALUE_TYPE_NULL: 'Null',
VALUE_TYPE_UNDEFINED: 'Undefined',
VALUE_TYPE_REFERENCE: 'Reference',
VALUE_TYPE_ECMA_ARRAY: 'ECMA Array',
VALUE_TYPE_STRICT_ARRAY: 'Strict Array',
VALUE_TYPE_DATE: 'Date',
VALUE_TYPE_LONGSTRING: 'Longstring'
}
| """
The constants used in FLV files and their meanings.
"""
(tag_type_audio, tag_type_video, tag_type_script) = (8, 9, 18)
(sound_format_pcm_platform_endian, sound_format_adpcm, sound_format_mp3, sound_format_pcm_little_endian, sound_format_nellymoser_16_khz, sound_format_nellymoser_8_khz, sound_format_nellymoser, sound_format_g711_a_law, sound_format_g711_mu_law) = range(9)
(sound_format_aac, sound_format_speex) = range(10, 12)
(sound_format_mp3_8_khz, sound_format_device_specific) = range(14, 16)
sound_format_to_string = {SOUND_FORMAT_PCM_PLATFORM_ENDIAN: 'Linear PCM, platform endian', SOUND_FORMAT_ADPCM: 'ADPCM', SOUND_FORMAT_MP3: 'MP3', SOUND_FORMAT_PCM_LITTLE_ENDIAN: 'Linear PCM, little endian', SOUND_FORMAT_NELLYMOSER_16KHZ: 'Nellymoser 16-kHz mono', SOUND_FORMAT_NELLYMOSER_8KHZ: 'Nellymoser 8-kHz mono', SOUND_FORMAT_NELLYMOSER: 'Nellymoser', SOUND_FORMAT_G711_A_LAW: 'G.711 A-law logarithmic PCM', SOUND_FORMAT_G711_MU_LAW: 'G.711 mu-law logarithmic PCM', SOUND_FORMAT_AAC: 'AAC', SOUND_FORMAT_SPEEX: 'Speex', SOUND_FORMAT_MP3_8KHZ: 'MP3 8-kHz', SOUND_FORMAT_DEVICE_SPECIFIC: 'Device-specific sound'}
(sound_rate_5_5_khz, sound_rate_11_khz, sound_rate_22_khz, sound_rate_44_khz) = range(4)
sound_rate_to_string = {SOUND_RATE_5_5_KHZ: '5.5-kHz', SOUND_RATE_11_KHZ: '11-kHz', SOUND_RATE_22_KHZ: '22-kHz', SOUND_RATE_44_KHZ: '44-kHz'}
(sound_size_8_bit, sound_size_16_bit) = range(2)
sound_size_to_string = {SOUND_SIZE_8_BIT: 'snd8Bit', SOUND_SIZE_16_BIT: 'snd16Bit'}
(sound_type_mono, sound_type_stereo) = range(2)
sound_type_to_string = {SOUND_TYPE_MONO: 'sndMono', SOUND_TYPE_STEREO: 'sndStereo'}
(aac_packet_type_sequence_header, aac_packet_type_raw) = range(2)
aac_packet_type_to_string = {AAC_PACKET_TYPE_SEQUENCE_HEADER: 'sequence header', AAC_PACKET_TYPE_RAW: 'raw'}
(codec_id_jpeg, codec_id_h263, codec_id_screen_video, codec_id_vp6, codec_id_vp6_with_alpha, codec_id_screen_video_v2, codec_id_h264) = range(1, 8)
codec_id_to_string = {CODEC_ID_JPEG: 'JPEG', CODEC_ID_H263: 'Sorenson H.263', CODEC_ID_SCREEN_VIDEO: 'Screen video', CODEC_ID_VP6: 'On2 VP6', CODEC_ID_VP6_WITH_ALPHA: 'On2 VP6 with alpha channel', CODEC_ID_SCREEN_VIDEO_V2: 'Screen video version 2', CODEC_ID_H264: 'H.264'}
(frame_type_keyframe, frame_type_interframe, frame_type_disposable_interframe, frame_type_generated_keyframe, frame_type_info_frame) = range(1, 6)
frame_type_to_string = {FRAME_TYPE_KEYFRAME: 'keyframe', FRAME_TYPE_INTERFRAME: 'interframe', FRAME_TYPE_DISPOSABLE_INTERFRAME: 'disposable interframe', FRAME_TYPE_GENERATED_KEYFRAME: 'generated keyframe', FRAME_TYPE_INFO_FRAME: 'video info/command frame'}
(h264_packet_type_sequence_header, h264_packet_type_nalu, h264_packet_type_end_of_sequence) = range(3)
h264_packet_type_to_string = {H264_PACKET_TYPE_SEQUENCE_HEADER: 'sequence header', H264_PACKET_TYPE_NALU: 'NAL unit', H264_PACKET_TYPE_END_OF_SEQUENCE: 'sequence end'}
(value_type_number, value_type_boolean, value_type_string, value_type_object, value_type_movieclip, value_type_null, value_type_undefined, value_type_reference, value_type_ecma_array) = range(9)
(value_type_strict_array, value_type_date, value_type_longstring) = range(10, 13)
value_type_to_string = {VALUE_TYPE_NUMBER: 'Number', VALUE_TYPE_BOOLEAN: 'Boolean', VALUE_TYPE_STRING: 'String', VALUE_TYPE_OBJECT: 'Object', VALUE_TYPE_MOVIECLIP: 'MovieClip', VALUE_TYPE_NULL: 'Null', VALUE_TYPE_UNDEFINED: 'Undefined', VALUE_TYPE_REFERENCE: 'Reference', VALUE_TYPE_ECMA_ARRAY: 'ECMA Array', VALUE_TYPE_STRICT_ARRAY: 'Strict Array', VALUE_TYPE_DATE: 'Date', VALUE_TYPE_LONGSTRING: 'Longstring'} |
# convert2.py
# A program to convert Celsius temps to Fahrenheit.
# This version issues heat and cold warnings.
def main():
celsius = float(input("What is the Celsius temperature? "))
fahrenheit = 9 / 5 * celsius + 32
print("The temperature is", fahrenheit, "degrees fahrenheit.")
if fahrenheit >= 90:
print("It's really hot out there, be careful!")
if fahrenheit <= 30:
print("Brrrrr. Be sure to dress warmly")
main() | def main():
celsius = float(input('What is the Celsius temperature? '))
fahrenheit = 9 / 5 * celsius + 32
print('The temperature is', fahrenheit, 'degrees fahrenheit.')
if fahrenheit >= 90:
print("It's really hot out there, be careful!")
if fahrenheit <= 30:
print('Brrrrr. Be sure to dress warmly')
main() |
class LevenshteinDistance:
def solve(self, str_a, str_b):
a, b = str_a, str_b
dist = {(x,y):0 for x in range(len(a)) for y in range(len(b))}
for x in range(len(a)): dist[(x,-1)] = x+1
for y in range(len(b)): dist[(-1,y)] = y+1
dist[(-1,-1)] = 0
for i in range(len(a)):
for j in range(len(b)):
need_edit = a[i]!=b[j]
last_edits = min(dist[(i,j-1)], dist[(i-1,j)], dist[(i-1,j-1)])
dist[(i,j)] = last_edits + int(need_edit)
self.distance = dist
return dist[(i,j)]
def show(self):
if hasattr(self, 'distance'):
dist = self.distance
for x in range(-1,len(a)):
row = []
for y in range(-1, len(b)):
row.append(dist[(x,y)])
print(row)
# test
ld = LevenshteinDistance()
ld.solve('kitten','sitting')
ld.show() | class Levenshteindistance:
def solve(self, str_a, str_b):
(a, b) = (str_a, str_b)
dist = {(x, y): 0 for x in range(len(a)) for y in range(len(b))}
for x in range(len(a)):
dist[x, -1] = x + 1
for y in range(len(b)):
dist[-1, y] = y + 1
dist[-1, -1] = 0
for i in range(len(a)):
for j in range(len(b)):
need_edit = a[i] != b[j]
last_edits = min(dist[i, j - 1], dist[i - 1, j], dist[i - 1, j - 1])
dist[i, j] = last_edits + int(need_edit)
self.distance = dist
return dist[i, j]
def show(self):
if hasattr(self, 'distance'):
dist = self.distance
for x in range(-1, len(a)):
row = []
for y in range(-1, len(b)):
row.append(dist[x, y])
print(row)
ld = levenshtein_distance()
ld.solve('kitten', 'sitting')
ld.show() |
# -*- coding: utf-8 -*-
BROKER_URL = 'amqp://guest@localhost//'
CELERY_ACCEPT_CONTENT = ['json'],
CELERY_RESULT_BACKEND = 'amqp://guest@localhost//'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERY_ENABLE_UTC = False
| broker_url = 'amqp://guest@localhost//'
celery_accept_content = (['json'],)
celery_result_backend = 'amqp://guest@localhost//'
celery_result_serializer = 'json'
celery_task_serializer = 'json'
celery_timezone = 'Asia/Shanghai'
celery_enable_utc = False |
def structural_loss_dst68j3d(p_pred, v_pred):
v_pred = K.stop_gradient(v_pred)
def getlength(v):
return K.sqrt(K.sum(K.square(v), axis=-1))
"""Arms segments"""
joints_arms = p_pred[:, :, 16:37+1, :]
conf_arms = v_pred[:, :, 16:37+1]
diff_arms_r = joints_arms[:, :, 2:-1:2, :] - joints_arms[:, :, 0:-3:2, :]
diff_arms_l = joints_arms[:, :, 3::2, :] - joints_arms[:, :, 1:-2:2, :]
c2_arms_r = conf_arms[:, :, 2:-1:2] * conf_arms[:, :, 0:-3:2]
c2_arms_l = conf_arms[:, :, 3::2] * conf_arms[:, :, 1:-2:2]
"""Legs segments"""
joints_legs = p_pred[:, :, 48:67+1, :]
conf_legs = v_pred[:, :, 48:67+1]
diff_legs_r = joints_legs[:, :, 2:-1:2, :] - joints_legs[:, :, 0:-3:2, :]
diff_legs_l = joints_legs[:, :, 3::2, :] - joints_legs[:, :, 1:-2:2, :]
c2_legs_r = conf_legs[:, :, 2:-1:2] * conf_legs[:, :, 0:-3:2]
c2_legs_l = conf_legs[:, :, 3::2] * conf_legs[:, :, 1:-2:2]
"""Limbs segments"""
segs_limbs_r = getlength(K.concatenate([diff_arms_r, diff_legs_r], axis=-2))
segs_limbs_l = getlength(K.concatenate([diff_arms_l, diff_legs_l], axis=-2))
c2_limbs_r = K.concatenate([c2_arms_r, c2_legs_r], axis=-1)
c2_limbs_l = K.concatenate([c2_arms_l, c2_legs_l], axis=-1)
len_upperarm_r = K.sum(segs_limbs_r[:, :, 2:5], axis=-1, keepdims=True)
len_upperarm_l = K.sum(segs_limbs_l[:, :, 2:5], axis=-1, keepdims=True)
len_forearm_r = K.sum(segs_limbs_r[:, :, 5:8], axis=-1, keepdims=True)
len_forearm_l = K.sum(segs_limbs_l[:, :, 5:8], axis=-1, keepdims=True)
len_hand_r = K.sum(segs_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
len_hand_l = K.sum(segs_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
c2_upperarm_r = K.sum(c2_limbs_r[:, :, 2:5], axis=-1, keepdims=True)
c2_upperarm_l = K.sum(c2_limbs_l[:, :, 2:5], axis=-1, keepdims=True)
c2_forearm_r = K.sum(c2_limbs_r[:, :, 5:8], axis=-1, keepdims=True)
c2_forearm_l = K.sum(c2_limbs_l[:, :, 5:8], axis=-1, keepdims=True)
c2_hand_r = K.sum(c2_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
c2_hand_l = K.sum(c2_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
len_femur_r = K.sum(K.concatenate([
segs_limbs_r[:, :, 10:11],
segs_limbs_r[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
len_femur_l = K.sum(K.concatenate([
segs_limbs_l[:, :, 10:11],
segs_limbs_l[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
c2_femur_r = K.sum(K.concatenate([
c2_limbs_r[:, :, 10:11],
c2_limbs_r[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
c2_femur_l = K.sum(K.concatenate([
c2_limbs_l[:, :, 10:11],
c2_limbs_l[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
len_shin_r = K.sum(segs_limbs_r[:, :, 14:17], axis=-1, keepdims=True)
len_shin_l = K.sum(segs_limbs_l[:, :, 14:17], axis=-1, keepdims=True)
len_feet_r = K.sum(segs_limbs_r[:, :, 17:19], axis=-1, keepdims=True)
len_feet_l = K.sum(segs_limbs_l[:, :, 17:19], axis=-1, keepdims=True)
c2_shin_r = K.sum(c2_limbs_r[:, :, 14:17], axis=-1, keepdims=True)
c2_shin_l = K.sum(c2_limbs_l[:, :, 14:17], axis=-1, keepdims=True)
c2_feet_r = K.sum(c2_limbs_r[:, :, 17:19], axis=-1, keepdims=True)
c2_feet_l = K.sum(c2_limbs_l[:, :, 17:19], axis=-1, keepdims=True)
joints_head = K.concatenate([
p_pred[:, :, 11:11+1, :], p_pred[:, :, 11:11+1, :],
p_pred[:, :, 12:15+1, :],
p_pred[:, :, 8:8+1, :], p_pred[:, :, 8:8+1, :],
p_pred[:, :, 14:15+1, :],
], axis=-2)
conf_head = K.concatenate([
v_pred[:, :, 11:11+1], v_pred[:, :, 11:11+1],
v_pred[:, :, 12:15+1],
v_pred[:, :, 8:8+1], v_pred[:, :, 8:8+1],
v_pred[:, :, 14:15+1],
], axis=-1)
diff_head_r = joints_head[:, :, 2:-1:2, :] - joints_head[:, :, 0:-3:2, :]
diff_head_l = joints_head[:, :, 3::2, :] - joints_head[:, :, 1:-2:2, :]
c2_head_r = conf_head[:, :, 2:-1:2] * conf_head[:, :, 0:-3:2]
c2_head_l = conf_head[:, :, 3::2] * conf_head[:, :, 1:-2:2]
diff_cross_r = K.concatenate([
p_pred[:, :, 3:3+1, :] - p_pred[:, :, 20:20+1, :],
p_pred[:, :, 49:49+1, :] - p_pred[:, :, 3:3+1, :],
], axis=-2)
diff_cross_l = K.concatenate([
p_pred[:, :, 3:3+1, :] - p_pred[:, :, 21:21+1, :],
p_pred[:, :, 48:48+1, :] - p_pred[:, :, 3:3+1, :],
], axis=-2)
diff_spine = K.concatenate([
p_pred[:, :, 0:0+1, :] - p_pred[:, :, 7:7+1, :], # euclidean
p_pred[:, :, 1:7+1, :] - p_pred[:, :, 0:6+1, :], # geodesic
], axis=-2)
segs_spine = getlength(diff_spine)
spine_euclidian = K.stop_gradient(segs_spine[:, :, :1])
len_spine = K.sum(segs_spine[:, :, 1:], axis=-1, keepdims=True)
segs_midhead = getlength(p_pred[:, :, 9:11+1, :] - p_pred[:, :, 8:10+1, :])
len_midhead = K.sum(segs_midhead, axis=-1, keepdims=True)
segs_ears = getlength(K.concatenate([
p_pred[:, :, 12:12+1, :] - p_pred[:, :, 14:14+1, :],
p_pred[:, :, 9:9+1, :] - p_pred[:, :, 12:12+1, :],
p_pred[:, :, 13:13+1, :] - p_pred[:, :, 9:9+1, :],
p_pred[:, :, 15:15+1, :] - p_pred[:, :, 13:13+1, :]
], axis=-2))
len_ears = K.sum(segs_ears, axis=-1, keepdims=True)
len_cross_r = K.sum(getlength(diff_cross_r), axis=-1, keepdims=True)
len_cross_l = K.sum(getlength(diff_cross_l), axis=-1, keepdims=True)
ref_length = K.stop_gradient(
K.clip((len_cross_r + len_cross_l) / 2., 0.1, 1.))
"""Reference lengths based on ground truth poses from Human3.6M:
Spine wrt. ref: 0.715 (0.032 std.)
Spine wrt. euclidean: 1.430 (maximum) (0.046 std.)
MidHead wrt. ref: 0.266 (0.019 std.)
Shoulder wrt. ref: 0.150 (?? std.)
Upper arms wrt. ref: 0.364 (0.019 std.)
Fore arms wrt. ref: 0.326 (0.025 std.)
Hands wrt. ref: 0.155 (0.014 std.)
Femur wrt. ref: 0.721 (0.040 std.)
Shin wrt. ref: 0.549 (0.063 std.)
Feet wrt. ref: 0.294 (0.060 std.)
"""
rules_loss = K.concatenate([
c2_limbs_r * c2_limbs_l * (segs_limbs_r - segs_limbs_l),
len_spine - 0.715 * ref_length,
len_midhead - 0.266 * ref_length,
c2_upperarm_r * (len_upperarm_r - 0.364 * ref_length),
c2_upperarm_l * (len_upperarm_l - 0.364 * ref_length),
c2_forearm_r * (len_forearm_r - 0.326 * ref_length),
c2_forearm_l * (len_forearm_l - 0.326 * ref_length),
c2_hand_r * (len_hand_r - 0.155 * ref_length),
c2_hand_l * (len_hand_l - 0.155 * ref_length),
c2_femur_r * (len_femur_r - 0.721 * ref_length),
c2_femur_l * (len_femur_l - 0.721 * ref_length),
c2_shin_r * (len_shin_r - 0.549 * ref_length),
c2_shin_l * (len_shin_l - 0.549 * ref_length),
c2_feet_r * (len_feet_r - 0.294 * ref_length),
c2_feet_l * (len_feet_l - 0.294 * ref_length),
len_ears - 0.213 * ref_length,
], axis=-1)
rules = K.sum(K.square(rules_loss), axis=-1)
spine_bent = K.squeeze(K.maximum(0., len_spine - 1.430 * spine_euclidian),
axis=-1)
return K.mean(spine_bent + rules, axis=-1)
| def structural_loss_dst68j3d(p_pred, v_pred):
v_pred = K.stop_gradient(v_pred)
def getlength(v):
return K.sqrt(K.sum(K.square(v), axis=-1))
'Arms segments'
joints_arms = p_pred[:, :, 16:37 + 1, :]
conf_arms = v_pred[:, :, 16:37 + 1]
diff_arms_r = joints_arms[:, :, 2:-1:2, :] - joints_arms[:, :, 0:-3:2, :]
diff_arms_l = joints_arms[:, :, 3::2, :] - joints_arms[:, :, 1:-2:2, :]
c2_arms_r = conf_arms[:, :, 2:-1:2] * conf_arms[:, :, 0:-3:2]
c2_arms_l = conf_arms[:, :, 3::2] * conf_arms[:, :, 1:-2:2]
'Legs segments'
joints_legs = p_pred[:, :, 48:67 + 1, :]
conf_legs = v_pred[:, :, 48:67 + 1]
diff_legs_r = joints_legs[:, :, 2:-1:2, :] - joints_legs[:, :, 0:-3:2, :]
diff_legs_l = joints_legs[:, :, 3::2, :] - joints_legs[:, :, 1:-2:2, :]
c2_legs_r = conf_legs[:, :, 2:-1:2] * conf_legs[:, :, 0:-3:2]
c2_legs_l = conf_legs[:, :, 3::2] * conf_legs[:, :, 1:-2:2]
'Limbs segments'
segs_limbs_r = getlength(K.concatenate([diff_arms_r, diff_legs_r], axis=-2))
segs_limbs_l = getlength(K.concatenate([diff_arms_l, diff_legs_l], axis=-2))
c2_limbs_r = K.concatenate([c2_arms_r, c2_legs_r], axis=-1)
c2_limbs_l = K.concatenate([c2_arms_l, c2_legs_l], axis=-1)
len_upperarm_r = K.sum(segs_limbs_r[:, :, 2:5], axis=-1, keepdims=True)
len_upperarm_l = K.sum(segs_limbs_l[:, :, 2:5], axis=-1, keepdims=True)
len_forearm_r = K.sum(segs_limbs_r[:, :, 5:8], axis=-1, keepdims=True)
len_forearm_l = K.sum(segs_limbs_l[:, :, 5:8], axis=-1, keepdims=True)
len_hand_r = K.sum(segs_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
len_hand_l = K.sum(segs_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
c2_upperarm_r = K.sum(c2_limbs_r[:, :, 2:5], axis=-1, keepdims=True)
c2_upperarm_l = K.sum(c2_limbs_l[:, :, 2:5], axis=-1, keepdims=True)
c2_forearm_r = K.sum(c2_limbs_r[:, :, 5:8], axis=-1, keepdims=True)
c2_forearm_l = K.sum(c2_limbs_l[:, :, 5:8], axis=-1, keepdims=True)
c2_hand_r = K.sum(c2_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
c2_hand_l = K.sum(c2_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
len_femur_r = K.sum(K.concatenate([segs_limbs_r[:, :, 10:11], segs_limbs_r[:, :, 12:14]], axis=-1), axis=-1, keepdims=True)
len_femur_l = K.sum(K.concatenate([segs_limbs_l[:, :, 10:11], segs_limbs_l[:, :, 12:14]], axis=-1), axis=-1, keepdims=True)
c2_femur_r = K.sum(K.concatenate([c2_limbs_r[:, :, 10:11], c2_limbs_r[:, :, 12:14]], axis=-1), axis=-1, keepdims=True)
c2_femur_l = K.sum(K.concatenate([c2_limbs_l[:, :, 10:11], c2_limbs_l[:, :, 12:14]], axis=-1), axis=-1, keepdims=True)
len_shin_r = K.sum(segs_limbs_r[:, :, 14:17], axis=-1, keepdims=True)
len_shin_l = K.sum(segs_limbs_l[:, :, 14:17], axis=-1, keepdims=True)
len_feet_r = K.sum(segs_limbs_r[:, :, 17:19], axis=-1, keepdims=True)
len_feet_l = K.sum(segs_limbs_l[:, :, 17:19], axis=-1, keepdims=True)
c2_shin_r = K.sum(c2_limbs_r[:, :, 14:17], axis=-1, keepdims=True)
c2_shin_l = K.sum(c2_limbs_l[:, :, 14:17], axis=-1, keepdims=True)
c2_feet_r = K.sum(c2_limbs_r[:, :, 17:19], axis=-1, keepdims=True)
c2_feet_l = K.sum(c2_limbs_l[:, :, 17:19], axis=-1, keepdims=True)
joints_head = K.concatenate([p_pred[:, :, 11:11 + 1, :], p_pred[:, :, 11:11 + 1, :], p_pred[:, :, 12:15 + 1, :], p_pred[:, :, 8:8 + 1, :], p_pred[:, :, 8:8 + 1, :], p_pred[:, :, 14:15 + 1, :]], axis=-2)
conf_head = K.concatenate([v_pred[:, :, 11:11 + 1], v_pred[:, :, 11:11 + 1], v_pred[:, :, 12:15 + 1], v_pred[:, :, 8:8 + 1], v_pred[:, :, 8:8 + 1], v_pred[:, :, 14:15 + 1]], axis=-1)
diff_head_r = joints_head[:, :, 2:-1:2, :] - joints_head[:, :, 0:-3:2, :]
diff_head_l = joints_head[:, :, 3::2, :] - joints_head[:, :, 1:-2:2, :]
c2_head_r = conf_head[:, :, 2:-1:2] * conf_head[:, :, 0:-3:2]
c2_head_l = conf_head[:, :, 3::2] * conf_head[:, :, 1:-2:2]
diff_cross_r = K.concatenate([p_pred[:, :, 3:3 + 1, :] - p_pred[:, :, 20:20 + 1, :], p_pred[:, :, 49:49 + 1, :] - p_pred[:, :, 3:3 + 1, :]], axis=-2)
diff_cross_l = K.concatenate([p_pred[:, :, 3:3 + 1, :] - p_pred[:, :, 21:21 + 1, :], p_pred[:, :, 48:48 + 1, :] - p_pred[:, :, 3:3 + 1, :]], axis=-2)
diff_spine = K.concatenate([p_pred[:, :, 0:0 + 1, :] - p_pred[:, :, 7:7 + 1, :], p_pred[:, :, 1:7 + 1, :] - p_pred[:, :, 0:6 + 1, :]], axis=-2)
segs_spine = getlength(diff_spine)
spine_euclidian = K.stop_gradient(segs_spine[:, :, :1])
len_spine = K.sum(segs_spine[:, :, 1:], axis=-1, keepdims=True)
segs_midhead = getlength(p_pred[:, :, 9:11 + 1, :] - p_pred[:, :, 8:10 + 1, :])
len_midhead = K.sum(segs_midhead, axis=-1, keepdims=True)
segs_ears = getlength(K.concatenate([p_pred[:, :, 12:12 + 1, :] - p_pred[:, :, 14:14 + 1, :], p_pred[:, :, 9:9 + 1, :] - p_pred[:, :, 12:12 + 1, :], p_pred[:, :, 13:13 + 1, :] - p_pred[:, :, 9:9 + 1, :], p_pred[:, :, 15:15 + 1, :] - p_pred[:, :, 13:13 + 1, :]], axis=-2))
len_ears = K.sum(segs_ears, axis=-1, keepdims=True)
len_cross_r = K.sum(getlength(diff_cross_r), axis=-1, keepdims=True)
len_cross_l = K.sum(getlength(diff_cross_l), axis=-1, keepdims=True)
ref_length = K.stop_gradient(K.clip((len_cross_r + len_cross_l) / 2.0, 0.1, 1.0))
'Reference lengths based on ground truth poses from Human3.6M:\n Spine wrt. ref: 0.715 (0.032 std.)\n Spine wrt. euclidean: 1.430 (maximum) (0.046 std.)\n MidHead wrt. ref: 0.266 (0.019 std.)\n Shoulder wrt. ref: 0.150 (?? std.)\n Upper arms wrt. ref: 0.364 (0.019 std.)\n Fore arms wrt. ref: 0.326 (0.025 std.)\n Hands wrt. ref: 0.155 (0.014 std.)\n Femur wrt. ref: 0.721 (0.040 std.)\n Shin wrt. ref: 0.549 (0.063 std.)\n Feet wrt. ref: 0.294 (0.060 std.)\n '
rules_loss = K.concatenate([c2_limbs_r * c2_limbs_l * (segs_limbs_r - segs_limbs_l), len_spine - 0.715 * ref_length, len_midhead - 0.266 * ref_length, c2_upperarm_r * (len_upperarm_r - 0.364 * ref_length), c2_upperarm_l * (len_upperarm_l - 0.364 * ref_length), c2_forearm_r * (len_forearm_r - 0.326 * ref_length), c2_forearm_l * (len_forearm_l - 0.326 * ref_length), c2_hand_r * (len_hand_r - 0.155 * ref_length), c2_hand_l * (len_hand_l - 0.155 * ref_length), c2_femur_r * (len_femur_r - 0.721 * ref_length), c2_femur_l * (len_femur_l - 0.721 * ref_length), c2_shin_r * (len_shin_r - 0.549 * ref_length), c2_shin_l * (len_shin_l - 0.549 * ref_length), c2_feet_r * (len_feet_r - 0.294 * ref_length), c2_feet_l * (len_feet_l - 0.294 * ref_length), len_ears - 0.213 * ref_length], axis=-1)
rules = K.sum(K.square(rules_loss), axis=-1)
spine_bent = K.squeeze(K.maximum(0.0, len_spine - 1.43 * spine_euclidian), axis=-1)
return K.mean(spine_bent + rules, axis=-1) |
'''
Created on 2011-6-22
@author: dholer
'''
| """
Created on 2011-6-22
@author: dholer
""" |
"""Tests for the `sendoff` library."""
"""
The `sendoff` library tests validate the expected function of the library.
"""
| """Tests for the `sendoff` library."""
'\nThe `sendoff` library tests validate the expected function of the library.\n' |
total = 0
for n in range(1000, 1000000):
suma = 0
for i in str(n):
suma += int(i)**5
if (n == suma):
total += n
print(total) | total = 0
for n in range(1000, 1000000):
suma = 0
for i in str(n):
suma += int(i) ** 5
if n == suma:
total += n
print(total) |
class Solution:
@staticmethod
def naive(board,word):
rows,cols,n = len(board),len(board[0]),len(word)
visited = set()
def dfs(i,j,k):
idf = str(i)+','+str(j)
if i<0 or j<0 or i>cols-1 or j>rows-1 or \
board[j][i]!=word[k] or idf in visited:
return False
if k==n-1 and word[k]==board[j][i]:
return True
visited.add(idf)
if word[k]==board[j][i]:
return dfs(i+1,j,k+1) or dfs(i-1,j,k+1) or\
dfs(i,j+1,k+1) or dfs(i,j-1,k+1)
for j in range(rows):
for i in range(cols):
if board[j][i]==word[0]:
if dfs(i,j,0): return True
return False
@staticmethod
def quick(board,word):
''' Improve by,
1. Exclude set which stores visited coordinates, and use #.
2. No indicing in original word.
3. Quick exit for 4 directions.
'''
rows,cols,n = len(board),len(board[0]),len(word)
def dfs(i,j,remain):
if len(remain)==0: return True
if i<0 or j<0 or i>cols-1 or j>rows-1 or \
board[j][i]!=remain[0]: return False
board[j][i]="#"
ret = False
for rowOff,colOff in [(1,0),(-1,0),(0,1),(0,-1)]:
ret = dfs(i+colOff,j+rowOff,remain[1:])
if ret: break
board[j][i]=remain[0]
return ret
for j in range(rows):
for i in range(cols):
if board[j][i]==word[0]:
if dfs(i,j,word): return True
return False | class Solution:
@staticmethod
def naive(board, word):
(rows, cols, n) = (len(board), len(board[0]), len(word))
visited = set()
def dfs(i, j, k):
idf = str(i) + ',' + str(j)
if i < 0 or j < 0 or i > cols - 1 or (j > rows - 1) or (board[j][i] != word[k]) or (idf in visited):
return False
if k == n - 1 and word[k] == board[j][i]:
return True
visited.add(idf)
if word[k] == board[j][i]:
return dfs(i + 1, j, k + 1) or dfs(i - 1, j, k + 1) or dfs(i, j + 1, k + 1) or dfs(i, j - 1, k + 1)
for j in range(rows):
for i in range(cols):
if board[j][i] == word[0]:
if dfs(i, j, 0):
return True
return False
@staticmethod
def quick(board, word):
""" Improve by,
1. Exclude set which stores visited coordinates, and use #.
2. No indicing in original word.
3. Quick exit for 4 directions.
"""
(rows, cols, n) = (len(board), len(board[0]), len(word))
def dfs(i, j, remain):
if len(remain) == 0:
return True
if i < 0 or j < 0 or i > cols - 1 or (j > rows - 1) or (board[j][i] != remain[0]):
return False
board[j][i] = '#'
ret = False
for (row_off, col_off) in [(1, 0), (-1, 0), (0, 1), (0, -1)]:
ret = dfs(i + colOff, j + rowOff, remain[1:])
if ret:
break
board[j][i] = remain[0]
return ret
for j in range(rows):
for i in range(cols):
if board[j][i] == word[0]:
if dfs(i, j, word):
return True
return False |
# ----------------------------------------------------------------------
# CISCO-VLAN-MEMBERSHIP-MIB
# Compiled MIB
# Do not modify this file directly
# Run ./noc mib make-cmib instead
# ----------------------------------------------------------------------
# Copyright (C) 2007-2020 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# MIB Name
NAME = "CISCO-VLAN-MEMBERSHIP-MIB"
# Metadata
LAST_UPDATED = "2007-12-14"
COMPILED = "2020-01-19"
# MIB Data: name -> oid
MIB = {
"CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIB": "1.3.6.1.4.1.9.9.68",
"CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIBObjects": "1.3.6.1.4.1.9.9.68.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmps": "1.3.6.1.4.1.9.9.68.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsVQPVersion": "1.3.6.1.4.1.9.9.68.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRetries": "1.3.6.1.4.1.9.9.68.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmInterval": "1.3.6.1.4.1.9.9.68.1.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirm": "1.3.6.1.4.1.9.9.68.1.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmResult": "1.3.6.1.4.1.9.9.68.1.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsCurrent": "1.3.6.1.4.1.9.9.68.1.1.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsTable": "1.3.6.1.4.1.9.9.68.1.1.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsEntry": "1.3.6.1.4.1.9.9.68.1.1.7.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsIpAddress": "1.3.6.1.4.1.9.9.68.1.1.7.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsPrimary": "1.3.6.1.4.1.9.9.68.1.1.7.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRowStatus": "1.3.6.1.4.1.9.9.68.1.1.7.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembership": "1.3.6.1.4.1.9.9.68.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryTable": "1.3.6.1.4.1.9.9.68.1.2.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryEntry": "1.3.6.1.4.1.9.9.68.1.2.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryVlanIndex": "1.3.6.1.4.1.9.9.68.1.2.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMemberPorts": "1.3.6.1.4.1.9.9.68.1.2.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMember2kPorts": "1.3.6.1.4.1.9.9.68.1.2.1.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipTable": "1.3.6.1.4.1.9.9.68.1.2.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipEntry": "1.3.6.1.4.1.9.9.68.1.2.2.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlanType": "1.3.6.1.4.1.9.9.68.1.2.2.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlan": "1.3.6.1.4.1.9.9.68.1.2.2.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmPortStatus": "1.3.6.1.4.1.9.9.68.1.2.2.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans": "1.3.6.1.4.1.9.9.68.1.2.2.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans2k": "1.3.6.1.4.1.9.9.68.1.2.2.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans3k": "1.3.6.1.4.1.9.9.68.1.2.2.1.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlans4k": "1.3.6.1.4.1.9.9.68.1.2.2.1.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtTable": "1.3.6.1.4.1.9.9.68.1.2.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtEntry": "1.3.6.1.4.1.9.9.68.1.2.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipPortRangeIndex": "1.3.6.1.4.1.9.9.68.1.2.3.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtPorts": "1.3.6.1.4.1.9.9.68.1.2.3.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVlanCreationMode": "1.3.6.1.4.1.9.9.68.1.2.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmStatistics": "1.3.6.1.4.1.9.9.68.1.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPQueries": "1.3.6.1.4.1.9.9.68.1.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPResponses": "1.3.6.1.4.1.9.9.68.1.3.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChanges": "1.3.6.1.4.1.9.9.68.1.3.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPShutdown": "1.3.6.1.4.1.9.9.68.1.3.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPDenied": "1.3.6.1.4.1.9.9.68.1.3.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongDomain": "1.3.6.1.4.1.9.9.68.1.3.6",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongVersion": "1.3.6.1.4.1.9.9.68.1.3.7",
"CISCO-VLAN-MEMBERSHIP-MIB::vmInsufficientResources": "1.3.6.1.4.1.9.9.68.1.3.8",
"CISCO-VLAN-MEMBERSHIP-MIB::vmStatus": "1.3.6.1.4.1.9.9.68.1.4",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsEnabled": "1.3.6.1.4.1.9.9.68.1.4.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlan": "1.3.6.1.4.1.9.9.68.1.5",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanTable": "1.3.6.1.4.1.9.9.68.1.5.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanEntry": "1.3.6.1.4.1.9.9.68.1.5.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanId": "1.3.6.1.4.1.9.9.68.1.5.1.1.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanCdpVerifyEnable": "1.3.6.1.4.1.9.9.68.1.5.1.1.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotifications": "1.3.6.1.4.1.9.9.68.2",
"CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsPrefix": "1.3.6.1.4.1.9.9.68.2.0",
"CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChange": "1.3.6.1.4.1.9.9.68.2.0.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBConformance": "1.3.6.1.4.1.9.9.68.3",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBCompliances": "1.3.6.1.4.1.9.9.68.3.1",
"CISCO-VLAN-MEMBERSHIP-MIB::vmMIBGroups": "1.3.6.1.4.1.9.9.68.3.2",
}
DISPLAY_HINTS = {}
| name = 'CISCO-VLAN-MEMBERSHIP-MIB'
last_updated = '2007-12-14'
compiled = '2020-01-19'
mib = {'CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIB': '1.3.6.1.4.1.9.9.68', 'CISCO-VLAN-MEMBERSHIP-MIB::ciscoVlanMembershipMIBObjects': '1.3.6.1.4.1.9.9.68.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmps': '1.3.6.1.4.1.9.9.68.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsVQPVersion': '1.3.6.1.4.1.9.9.68.1.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRetries': '1.3.6.1.4.1.9.9.68.1.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmInterval': '1.3.6.1.4.1.9.9.68.1.1.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirm': '1.3.6.1.4.1.9.9.68.1.1.4', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsReconfirmResult': '1.3.6.1.4.1.9.9.68.1.1.5', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsCurrent': '1.3.6.1.4.1.9.9.68.1.1.6', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsTable': '1.3.6.1.4.1.9.9.68.1.1.7', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsEntry': '1.3.6.1.4.1.9.9.68.1.1.7.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsIpAddress': '1.3.6.1.4.1.9.9.68.1.1.7.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsPrimary': '1.3.6.1.4.1.9.9.68.1.1.7.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsRowStatus': '1.3.6.1.4.1.9.9.68.1.1.7.1.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembership': '1.3.6.1.4.1.9.9.68.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryTable': '1.3.6.1.4.1.9.9.68.1.2.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryEntry': '1.3.6.1.4.1.9.9.68.1.2.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryVlanIndex': '1.3.6.1.4.1.9.9.68.1.2.1.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMemberPorts': '1.3.6.1.4.1.9.9.68.1.2.1.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryMember2kPorts': '1.3.6.1.4.1.9.9.68.1.2.1.1.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipTable': '1.3.6.1.4.1.9.9.68.1.2.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipEntry': '1.3.6.1.4.1.9.9.68.1.2.2.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlanType': '1.3.6.1.4.1.9.9.68.1.2.2.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlan': '1.3.6.1.4.1.9.9.68.1.2.2.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmPortStatus': '1.3.6.1.4.1.9.9.68.1.2.2.1.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlans': '1.3.6.1.4.1.9.9.68.1.2.2.1.4', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlans2k': '1.3.6.1.4.1.9.9.68.1.2.2.1.5', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlans3k': '1.3.6.1.4.1.9.9.68.1.2.2.1.6', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlans4k': '1.3.6.1.4.1.9.9.68.1.2.2.1.7', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtTable': '1.3.6.1.4.1.9.9.68.1.2.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtEntry': '1.3.6.1.4.1.9.9.68.1.2.3.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipPortRangeIndex': '1.3.6.1.4.1.9.9.68.1.2.3.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMembershipSummaryExtPorts': '1.3.6.1.4.1.9.9.68.1.2.3.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVlanCreationMode': '1.3.6.1.4.1.9.9.68.1.2.4', 'CISCO-VLAN-MEMBERSHIP-MIB::vmStatistics': '1.3.6.1.4.1.9.9.68.1.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVQPQueries': '1.3.6.1.4.1.9.9.68.1.3.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVQPResponses': '1.3.6.1.4.1.9.9.68.1.3.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChanges': '1.3.6.1.4.1.9.9.68.1.3.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVQPShutdown': '1.3.6.1.4.1.9.9.68.1.3.4', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVQPDenied': '1.3.6.1.4.1.9.9.68.1.3.5', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongDomain': '1.3.6.1.4.1.9.9.68.1.3.6', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVQPWrongVersion': '1.3.6.1.4.1.9.9.68.1.3.7', 'CISCO-VLAN-MEMBERSHIP-MIB::vmInsufficientResources': '1.3.6.1.4.1.9.9.68.1.3.8', 'CISCO-VLAN-MEMBERSHIP-MIB::vmStatus': '1.3.6.1.4.1.9.9.68.1.4', 'CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsEnabled': '1.3.6.1.4.1.9.9.68.1.4.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlan': '1.3.6.1.4.1.9.9.68.1.5', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanTable': '1.3.6.1.4.1.9.9.68.1.5.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanEntry': '1.3.6.1.4.1.9.9.68.1.5.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanId': '1.3.6.1.4.1.9.9.68.1.5.1.1.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVoiceVlanCdpVerifyEnable': '1.3.6.1.4.1.9.9.68.1.5.1.1.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmNotifications': '1.3.6.1.4.1.9.9.68.2', 'CISCO-VLAN-MEMBERSHIP-MIB::vmNotificationsPrefix': '1.3.6.1.4.1.9.9.68.2.0', 'CISCO-VLAN-MEMBERSHIP-MIB::vmVmpsChange': '1.3.6.1.4.1.9.9.68.2.0.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMIBConformance': '1.3.6.1.4.1.9.9.68.3', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMIBCompliances': '1.3.6.1.4.1.9.9.68.3.1', 'CISCO-VLAN-MEMBERSHIP-MIB::vmMIBGroups': '1.3.6.1.4.1.9.9.68.3.2'}
display_hints = {} |
# module for adapting templates on the fly if components are reused
# check that all reused components are defined consistently -> else: exception
def check_consistency(components):
for j1 in components:
for j2 in components: # compare all components
if j1 == j2 and j1.__dict__ != j2.__dict__: # same name and reuseID but different other attributes
raise ValueError("Inconsistent definition of reused component {}.".format(j1))
# check and return number of reuses
def reuses(component, arcs):
# count number of reuses for each port
times = set() # set => no duplicates
for k in range(component.inputs):
times.add(len([a for a in arcs if a.ends_in(k, component)]))
for k in range(component.outputs):
times.add(len([a for a in arcs if a.starts_at(k, component)]))
# check if each port was reused the same number of times (requirement/assumption)
if len(times) != 1:
raise ValueError("Not all ports of {} are (re-)used the same number of times (required).".format(component))
return times.pop()
# return adapted templates with adapted reused components and exactly one arc per port (allows proportional output)
def adapt_for_reuse(templates):
# create set of components and arcs
arcs = []
for t in templates:
arcs += t.arcs
# find reused components and adapt them
component_reuses = {} # dictionary with components-#reuses
reused_components = [] # list of all reused components (contains duplicates) for consistency check
for t in templates:
for j in t.components:
uses = reuses(j, arcs)
if uses > 1: # used by >1 => reuse
if j.source:
raise ValueError("Source component {} cannot be reused".format(j))
j.adapt(uses) # add ports and functions on the fly
component_reuses[j] = uses
reused_components.append(j)
check_consistency(reused_components) # check consistent def of reused components
# adjust arcs to use new ports
for j in component_reuses:
uses = component_reuses[j]
port_offset = 0
for t in templates:
# adjust/shift ingoing arcs by offset to correct port
arc_shifted = False
for a in t.arcs:
if a.dest == j:
a.dest_in += port_offset
arc_shifted = True
if a.source == j:
a.src_out += port_offset
arc_shifted = True
# increase the offset for the next template if an arc was shifted
if arc_shifted:
if port_offset >= uses: # arc was shifted too often: something went wrong
raise ValueError("Port offset {} too high. Should be < {} (#reuses).".format(port_offset, uses))
port_offset += 1
return templates
| def check_consistency(components):
for j1 in components:
for j2 in components:
if j1 == j2 and j1.__dict__ != j2.__dict__:
raise value_error('Inconsistent definition of reused component {}.'.format(j1))
def reuses(component, arcs):
times = set()
for k in range(component.inputs):
times.add(len([a for a in arcs if a.ends_in(k, component)]))
for k in range(component.outputs):
times.add(len([a for a in arcs if a.starts_at(k, component)]))
if len(times) != 1:
raise value_error('Not all ports of {} are (re-)used the same number of times (required).'.format(component))
return times.pop()
def adapt_for_reuse(templates):
arcs = []
for t in templates:
arcs += t.arcs
component_reuses = {}
reused_components = []
for t in templates:
for j in t.components:
uses = reuses(j, arcs)
if uses > 1:
if j.source:
raise value_error('Source component {} cannot be reused'.format(j))
j.adapt(uses)
component_reuses[j] = uses
reused_components.append(j)
check_consistency(reused_components)
for j in component_reuses:
uses = component_reuses[j]
port_offset = 0
for t in templates:
arc_shifted = False
for a in t.arcs:
if a.dest == j:
a.dest_in += port_offset
arc_shifted = True
if a.source == j:
a.src_out += port_offset
arc_shifted = True
if arc_shifted:
if port_offset >= uses:
raise value_error('Port offset {} too high. Should be < {} (#reuses).'.format(port_offset, uses))
port_offset += 1
return templates |
"""Declare runtime dependencies
These are needed for local dev, and users must install them as well.
See https://docs.bazel.build/versions/main/skylark/deploying.html#dependencies
"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
# WARNING: any changes in this function may be BREAKING CHANGES for users
# because we'll fetch a dependency which may be different from one that
# they were previously fetching later in their WORKSPACE setup, and now
# ours took precedence. Such breakages are challenging for users, so any
# changes in this function should be marked as BREAKING in the commit message
# and released only in semver majors.
def rules_vue_dependencies():
# The minimal version of bazel_skylib we require
maybe(
http_archive,
name = "bazel_skylib",
sha256 = "c6966ec828da198c5d9adbaa94c05e3a1c7f21bd012a0b29ba8ddbccb2c93b0d",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz",
],
)
maybe(
http_archive,
name = "build_bazel_rules_nodejs",
sha256 = "4913ea835810c195df24d3a929315c29a64566cc48e409d8b0f35008b4e02e59",
urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/4.4.4/rules_nodejs-4.4.4.tar.gz"],
)
| """Declare runtime dependencies
These are needed for local dev, and users must install them as well.
See https://docs.bazel.build/versions/main/skylark/deploying.html#dependencies
"""
load('@bazel_tools//tools/build_defs/repo:http.bzl', 'http_archive')
load('@bazel_tools//tools/build_defs/repo:utils.bzl', 'maybe')
def rules_vue_dependencies():
maybe(http_archive, name='bazel_skylib', sha256='c6966ec828da198c5d9adbaa94c05e3a1c7f21bd012a0b29ba8ddbccb2c93b0d', urls=['https://github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz', 'https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.1.1/bazel-skylib-1.1.1.tar.gz'])
maybe(http_archive, name='build_bazel_rules_nodejs', sha256='4913ea835810c195df24d3a929315c29a64566cc48e409d8b0f35008b4e02e59', urls=['https://github.com/bazelbuild/rules_nodejs/releases/download/4.4.4/rules_nodejs-4.4.4.tar.gz']) |
def findDecision(obj): #obj[0]: Passanger, obj[1]: Time, obj[2]: Coupon, obj[3]: Gender, obj[4]: Age, obj[5]: Education, obj[6]: Occupation, obj[7]: Bar, obj[8]: Coffeehouse, obj[9]: Restaurant20to50, obj[10]: Direction_same, obj[11]: Distance
# {"feature": "Age", "instances": 51, "metric_value": 0.9662, "depth": 1}
if obj[4]>0:
# {"feature": "Occupation", "instances": 44, "metric_value": 0.9024, "depth": 2}
if obj[6]>1:
# {"feature": "Bar", "instances": 33, "metric_value": 0.9834, "depth": 3}
if obj[7]<=1.0:
# {"feature": "Education", "instances": 22, "metric_value": 0.994, "depth": 4}
if obj[5]>0:
# {"feature": "Passanger", "instances": 17, "metric_value": 0.9774, "depth": 5}
if obj[0]<=2:
# {"feature": "Time", "instances": 11, "metric_value": 0.994, "depth": 6}
if obj[1]<=2:
# {"feature": "Restaurant20to50", "instances": 8, "metric_value": 0.9544, "depth": 7}
if obj[9]>0.0:
# {"feature": "Coffeehouse", "instances": 6, "metric_value": 0.65, "depth": 8}
if obj[8]<=2.0:
return 'True'
elif obj[8]>2.0:
return 'False'
else: return 'False'
elif obj[9]<=0.0:
return 'False'
else: return 'False'
elif obj[1]>2:
return 'False'
else: return 'False'
elif obj[0]>2:
# {"feature": "Gender", "instances": 6, "metric_value": 0.65, "depth": 6}
if obj[3]>0:
return 'True'
elif obj[3]<=0:
# {"feature": "Time", "instances": 2, "metric_value": 1.0, "depth": 7}
if obj[1]<=2:
return 'True'
elif obj[1]>2:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
elif obj[5]<=0:
return 'False'
else: return 'False'
elif obj[7]>1.0:
# {"feature": "Coupon", "instances": 11, "metric_value": 0.684, "depth": 4}
if obj[2]>2:
return 'True'
elif obj[2]<=2:
# {"feature": "Direction_same", "instances": 4, "metric_value": 1.0, "depth": 5}
if obj[10]>0:
return 'True'
elif obj[10]<=0:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
elif obj[6]<=1:
return 'True'
else: return 'True'
elif obj[4]<=0:
# {"feature": "Passanger", "instances": 7, "metric_value": 0.5917, "depth": 2}
if obj[0]>0:
return 'False'
elif obj[0]<=0:
return 'True'
else: return 'True'
else: return 'False'
| def find_decision(obj):
if obj[4] > 0:
if obj[6] > 1:
if obj[7] <= 1.0:
if obj[5] > 0:
if obj[0] <= 2:
if obj[1] <= 2:
if obj[9] > 0.0:
if obj[8] <= 2.0:
return 'True'
elif obj[8] > 2.0:
return 'False'
else:
return 'False'
elif obj[9] <= 0.0:
return 'False'
else:
return 'False'
elif obj[1] > 2:
return 'False'
else:
return 'False'
elif obj[0] > 2:
if obj[3] > 0:
return 'True'
elif obj[3] <= 0:
if obj[1] <= 2:
return 'True'
elif obj[1] > 2:
return 'False'
else:
return 'False'
else:
return 'True'
else:
return 'True'
elif obj[5] <= 0:
return 'False'
else:
return 'False'
elif obj[7] > 1.0:
if obj[2] > 2:
return 'True'
elif obj[2] <= 2:
if obj[10] > 0:
return 'True'
elif obj[10] <= 0:
return 'False'
else:
return 'False'
else:
return 'True'
else:
return 'True'
elif obj[6] <= 1:
return 'True'
else:
return 'True'
elif obj[4] <= 0:
if obj[0] > 0:
return 'False'
elif obj[0] <= 0:
return 'True'
else:
return 'True'
else:
return 'False' |
"""
A simple python module for converting kilometers to miles or vice versa.
So simple that it doesn't even have any dependencies.
"""
def kilometers_to_miles(dist_in_km):
"""
Actually does the conversion of distance from km to mi.
PARAMETERS
--------
dist_in_km: float
A distance in kilometers.
RETURNS
-------
dist_in_mi: float
The same distance converted to miles.
"""
return (dist_in_km)/1.609344
def miles_to_kilometers(dist_in_mi):
"""
Actually does the conversion of distance from mi to km.
PARAMETERS
----------
dist_in_mi: float
A distance to miles.
RETURNS
-------
dist_in_km: float
The same distance converted to kilometers.
"""
return (dist_in_mi)*1.609344
| """
A simple python module for converting kilometers to miles or vice versa.
So simple that it doesn't even have any dependencies.
"""
def kilometers_to_miles(dist_in_km):
"""
Actually does the conversion of distance from km to mi.
PARAMETERS
--------
dist_in_km: float
A distance in kilometers.
RETURNS
-------
dist_in_mi: float
The same distance converted to miles.
"""
return dist_in_km / 1.609344
def miles_to_kilometers(dist_in_mi):
"""
Actually does the conversion of distance from mi to km.
PARAMETERS
----------
dist_in_mi: float
A distance to miles.
RETURNS
-------
dist_in_km: float
The same distance converted to kilometers.
"""
return dist_in_mi * 1.609344 |
# -*- coding: utf-8 -*-
PIXIVUTIL_VERSION = '20191220-beta1'
PIXIVUTIL_LINK = 'https://github.com/Nandaka/PixivUtil2/releases'
PIXIVUTIL_DONATE = 'https://bit.ly/PixivUtilDonation'
# Log Settings
PIXIVUTIL_LOG_FILE = 'pixivutil.log'
PIXIVUTIL_LOG_SIZE = 10485760
PIXIVUTIL_LOG_COUNT = 10
PIXIVUTIL_LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
# Download Results
PIXIVUTIL_NOT_OK = -1
PIXIVUTIL_OK = 0
PIXIVUTIL_SKIP_OLDER = 1
PIXIVUTIL_SKIP_BLACKLIST = 2
PIXIVUTIL_KEYBOARD_INTERRUPT = 3
PIXIVUTIL_SKIP_DUPLICATE = 4
PIXIVUTIL_SKIP_LOCAL_LARGER = 5
PIXIVUTIL_CHECK_DOWNLOAD = 6
PIXIVUTIL_ABORTED = 9999
BUFFER_SIZE = 8192
| pixivutil_version = '20191220-beta1'
pixivutil_link = 'https://github.com/Nandaka/PixivUtil2/releases'
pixivutil_donate = 'https://bit.ly/PixivUtilDonation'
pixivutil_log_file = 'pixivutil.log'
pixivutil_log_size = 10485760
pixivutil_log_count = 10
pixivutil_log_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
pixivutil_not_ok = -1
pixivutil_ok = 0
pixivutil_skip_older = 1
pixivutil_skip_blacklist = 2
pixivutil_keyboard_interrupt = 3
pixivutil_skip_duplicate = 4
pixivutil_skip_local_larger = 5
pixivutil_check_download = 6
pixivutil_aborted = 9999
buffer_size = 8192 |
'''
Created on 10/11/2017
@author: [email protected]
Pledge: I pledge my honor that I have abided by the Stevens Honor System -Joshua Schmidt
CS115 - Lab 6
'''
def isOdd(n):
'''Returns whether or not the integer argument is odd.'''
#question 1: base_2 of 42: 101010
if n == 0:
return False
if n % 2 != 0:
return True
return False
#question 2: if given an odd base-10 number, the least-significant bit of its base-2 representation will be a 1.
#question 3: if given an even base-10 number, the least-significant bit of its base-2 representation will be a 0.
#This is because 2^0 = 1, and that is the only way to make an odd number, by having a 1 in the least significant bit.
#question 4: By eliminating the least significant bit, the original number decreases by a factor of 2, if the bit is a 0.
#if the least significant bit is a 1, the original number is decreased by a factor of 2, - 1.
#question 5: If N is odd, the base-2 of N is Y + "1". If N is even, the base-2 of N is Y + "0".
#This is because to get from N base-10 to N base-2 you do successive division by 2, keeping the remainder, so given
#the base-2 of all of the division except for the first, one must put that remainder in front, hence the answer given.
def numToBinary(n):
'''Precondition: integer argument is non-negative.
Returns the string with the binary representation of non-negative integer n.
If n is 0, the empty string is returned.'''
if n == 0:
return ""
elif isOdd(n):
return numToBinary(n // 2) + "1"
else: return numToBinary(n // 2) + "0"
#print(numToBinary(15))
def binaryToNum(s):
'''Precondition: s is a string of 0s and 1s.
Returns the integer corresponding to the binary representation in s.
Note: the empty string represents 0.'''
if s == "":
return 0
return int(s[0])*(2**(len(s)-1)) + binaryToNum(s[1:])
#print(binaryToNum("1111"))
def addBin(s, numAdd, carry = 0):
"""adds 2 binary numbers"""
if s == "" or numAdd == "":
if carry == 0:
return s + numAdd
place = carry
carry = 0
if s != "" and s[-1] == "1":
carry = place
place = 1 - place
if numAdd != "" and numAdd[-1] == "1":
carry += place
place = 1 - place
return addBin(s[:-1], numAdd[:-1], carry) + str(place)
#print(addBin("100", "001", 0))
def makeEightBit(a):
"""makes a binary number 8 bit"""
if len(a) == 8:
print(str(a))
return str(a)
elif len(a) > 8:
#print(a[(len(a)-8):])
makeEightBit(a[(len(a)-8):])
else:
makeEightBit("0" + a)
return ""
def increment(s):
'''Precondition: s is a string of 8 bits.
Returns the binary representation of binaryToNum(s) + 1.'''
#numAdd = "00000001"
dec = binaryToNum(s)
dec += 1
answer = numToBinary(dec)
#print(answer)
if len(answer) > 8:
return answer[(len(answer)-8):]
answer = (8-len(answer))*"0" + answer
return answer
#print(increment("1110100000"))
def count(s, n):
'''Precondition: s is an 8-bit string and n >= 0.
Prints s and its n successors.'''
if n == 0:
print(s)
return ""
print(s)
return count(increment(s), n-1)
#print(count("11111110", 5))
#print("a")
def numToTernary(n):
'''Precondition: integer argument is non-negative.
Returns the string with the ternary representation of non-negative integer
n. If n is 0, the empty string is returned.'''
if n == 0:
return ""
return numToTernary(n // 3) + str(n % 3)
#print(numToTernary(42))
def ternaryToNum(s):
'''Precondition: s is a string of 0s, 1s, and 2s.
Returns the integer corresponding to the ternary representation in s.
Note: the empty string represents 0.'''
if s == "":
return 0
return int(s[0])*(3**(len(s)-1)) + ternaryToNum(s[1:])
#print(ternaryToNum('12211010'))
| """
Created on 10/11/2017
@author: [email protected]
Pledge: I pledge my honor that I have abided by the Stevens Honor System -Joshua Schmidt
CS115 - Lab 6
"""
def is_odd(n):
"""Returns whether or not the integer argument is odd."""
if n == 0:
return False
if n % 2 != 0:
return True
return False
def num_to_binary(n):
"""Precondition: integer argument is non-negative.
Returns the string with the binary representation of non-negative integer n.
If n is 0, the empty string is returned."""
if n == 0:
return ''
elif is_odd(n):
return num_to_binary(n // 2) + '1'
else:
return num_to_binary(n // 2) + '0'
def binary_to_num(s):
"""Precondition: s is a string of 0s and 1s.
Returns the integer corresponding to the binary representation in s.
Note: the empty string represents 0."""
if s == '':
return 0
return int(s[0]) * 2 ** (len(s) - 1) + binary_to_num(s[1:])
def add_bin(s, numAdd, carry=0):
"""adds 2 binary numbers"""
if s == '' or numAdd == '':
if carry == 0:
return s + numAdd
place = carry
carry = 0
if s != '' and s[-1] == '1':
carry = place
place = 1 - place
if numAdd != '' and numAdd[-1] == '1':
carry += place
place = 1 - place
return add_bin(s[:-1], numAdd[:-1], carry) + str(place)
def make_eight_bit(a):
"""makes a binary number 8 bit"""
if len(a) == 8:
print(str(a))
return str(a)
elif len(a) > 8:
make_eight_bit(a[len(a) - 8:])
else:
make_eight_bit('0' + a)
return ''
def increment(s):
"""Precondition: s is a string of 8 bits.
Returns the binary representation of binaryToNum(s) + 1."""
dec = binary_to_num(s)
dec += 1
answer = num_to_binary(dec)
if len(answer) > 8:
return answer[len(answer) - 8:]
answer = (8 - len(answer)) * '0' + answer
return answer
def count(s, n):
"""Precondition: s is an 8-bit string and n >= 0.
Prints s and its n successors."""
if n == 0:
print(s)
return ''
print(s)
return count(increment(s), n - 1)
def num_to_ternary(n):
"""Precondition: integer argument is non-negative.
Returns the string with the ternary representation of non-negative integer
n. If n is 0, the empty string is returned."""
if n == 0:
return ''
return num_to_ternary(n // 3) + str(n % 3)
def ternary_to_num(s):
"""Precondition: s is a string of 0s, 1s, and 2s.
Returns the integer corresponding to the ternary representation in s.
Note: the empty string represents 0."""
if s == '':
return 0
return int(s[0]) * 3 ** (len(s) - 1) + ternary_to_num(s[1:]) |
class Solution:
def destCity(self, paths: List[List[str]]) -> str:
bads = set()
cities = set()
for u, v in paths:
cities.add(u)
cities.add(v)
bads.add(u)
ans = cities - bads
return list(ans)[0]
| class Solution:
def dest_city(self, paths: List[List[str]]) -> str:
bads = set()
cities = set()
for (u, v) in paths:
cities.add(u)
cities.add(v)
bads.add(u)
ans = cities - bads
return list(ans)[0] |
#!/usr/bin/env python3
# The format of your own localizable method.
# This is an example of '"string".localized'
SUFFIX = '.localized'
KEY = r'"(?:\\.|[^"\\])*"'
LOCALIZABLE_RE = r'%s%s' % (KEY, SUFFIX)
# Specify the path of localizable files in project.
LOCALIZABLE_FILE_PATH = ''
LOCALIZABLE_FILE_NAMES = ['Localizable']
LOCALIZABLE_FILE_TYPES = ['strings']
# File types of source file.
SEARCH_TYPES = ['swift', 'm', 'json']
SOURCE_FILE_EXCLUSIVE_PATHS = [
'Assets.xcassets', 'Carthage', 'ThirdParty',
'Pods', 'Media.xcassets', 'Framework', 'bin']
LOCALIZABLE_FILE_EXCLUSIVE_PATHS = ['Carthage', 'ThirdParty',
'Pods', 'Framework', 'bin']
LOCALIZABLE_FORMAT_RE = r'"(?:\\.|[^"\\])*"\s*=\s*"(?:\\.|[^"\\])*";\n'
DEFAULT_TARGET_PATH = 'generated.strings'
| suffix = '.localized'
key = '"(?:\\\\.|[^"\\\\])*"'
localizable_re = '%s%s' % (KEY, SUFFIX)
localizable_file_path = ''
localizable_file_names = ['Localizable']
localizable_file_types = ['strings']
search_types = ['swift', 'm', 'json']
source_file_exclusive_paths = ['Assets.xcassets', 'Carthage', 'ThirdParty', 'Pods', 'Media.xcassets', 'Framework', 'bin']
localizable_file_exclusive_paths = ['Carthage', 'ThirdParty', 'Pods', 'Framework', 'bin']
localizable_format_re = '"(?:\\\\.|[^"\\\\])*"\\s*=\\s*"(?:\\\\.|[^"\\\\])*";\\n'
default_target_path = 'generated.strings' |
def longest_common_prefix(s1: str, s2: str) -> str:
"""
Finds the longest common prefix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common prefix between s1 and s2
>>> longest_common_prefix("ACTA", "GCCT")
''
>>> longest_common_prefix("ACTA", "ACT")
'ACT'
>>> longest_common_prefix("ACT", "ACTA")
'ACT'
>>> longest_common_prefix("GATA", "GAAT")
'GA'
>>> longest_common_prefix("ATGA", "")
''
>>> longest_common_prefix("", "GCCT")
''
>>> longest_common_prefix("GCCT", "GCCT")
'GCCT'
"""
i = 0
while i < min(len(s1), len(s2)):
if s1[i] != s2[i]:
break
i += 1
return s1[:i]
def longest_common_suffix(s1: str, s2: str) -> str:
"""
Finds the longest common suffix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common suffix between s1 and s2
>>> longest_common_suffix("ACTA", "GCCT")
''
>>> longest_common_suffix("ACTA", "CTA")
'CTA'
>>> longest_common_suffix("CTA", "ACTA")
'CTA'
>>> longest_common_suffix("GATAT", "GAATAT")
'ATAT'
>>> longest_common_suffix("ACTA", "")
''
>>> longest_common_suffix("", "GCCT")
''
>>> longest_common_suffix("GCCT", "GCCT")
'GCCT'
"""
return longest_common_prefix(s1[::-1], s2[::-1])[::-1]
def find_hamming_distance(s1: str, s2: str) -> int:
"""Compute the Hamming distance between two strings of equal length
>>> find_hamming_distance("ATG", "ATC")
1
>>> find_hamming_distance("ATG", "TGA")
3
>>> find_hamming_distance("A", "A")
0
>>> find_hamming_distance("ATG", "ATG")
0
>>> find_hamming_distance("", "")
0
>>> find_hamming_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
"""
assert len(s1) == len(s2)
return sum(1 for i in range(len(s1)) if s1[i] != s2[i])
def find_levenshtein_distance(s1: str, s2: str) -> int:
"""Compute the Levenshtein distance between two strings (i.e., minimum number
of edits including substitution, insertion and deletion needed in a string to
turn it into another)
>>> find_levenshtein_distance("AT", "")
2
>>> find_levenshtein_distance("AT", "ATC")
1
>>> find_levenshtein_distance("ATG", "ATC")
1
>>> find_levenshtein_distance("ATG", "TGA")
2
>>> find_levenshtein_distance("ATG", "ATG")
0
>>> find_levenshtein_distance("", "")
0
>>> find_levenshtein_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
>>> find_levenshtein_distance("TGGCCGCGCAAAAACAGC", "TGACCGCGCAAAACAGC")
2
>>> find_levenshtein_distance("GCGTATGCGGCTAACGC", "GCTATGCGGCTATACGC")
2
"""
# initializing a matrix for with `len(s1) + 1` rows and `len(s2) + 1` columns
D = [[0 for x in range(len(s2) + 1)] for y in range(len(s1) + 1)]
# fill first column
for i in range(len(s1) + 1):
D[i][0] = i
# fill first row
for j in range(len(s2) + 1):
D[0][j] = j
# fill rest of the matrix
for i in range(1, len(s1) + 1):
for j in range(1, len(s2) + 1):
distance_left = D[i][j - 1] + 1 # deletion in pattern
distance_above = D[i - 1][j] + 1 # insertion in pattern
distance_diagonal = D[i - 1][j - 1] + (
s1[i - 1] != s2[j - 1]
) # substitution
D[i][j] = min(distance_left, distance_above, distance_diagonal)
# return the last value (i.e., right most bottom value)
return D[-1][-1]
| def longest_common_prefix(s1: str, s2: str) -> str:
"""
Finds the longest common prefix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common prefix between s1 and s2
>>> longest_common_prefix("ACTA", "GCCT")
''
>>> longest_common_prefix("ACTA", "ACT")
'ACT'
>>> longest_common_prefix("ACT", "ACTA")
'ACT'
>>> longest_common_prefix("GATA", "GAAT")
'GA'
>>> longest_common_prefix("ATGA", "")
''
>>> longest_common_prefix("", "GCCT")
''
>>> longest_common_prefix("GCCT", "GCCT")
'GCCT'
"""
i = 0
while i < min(len(s1), len(s2)):
if s1[i] != s2[i]:
break
i += 1
return s1[:i]
def longest_common_suffix(s1: str, s2: str) -> str:
"""
Finds the longest common suffix (substring) given two strings
s1: First string to compare
s2: Second string to compare
Returns:
Longest common suffix between s1 and s2
>>> longest_common_suffix("ACTA", "GCCT")
''
>>> longest_common_suffix("ACTA", "CTA")
'CTA'
>>> longest_common_suffix("CTA", "ACTA")
'CTA'
>>> longest_common_suffix("GATAT", "GAATAT")
'ATAT'
>>> longest_common_suffix("ACTA", "")
''
>>> longest_common_suffix("", "GCCT")
''
>>> longest_common_suffix("GCCT", "GCCT")
'GCCT'
"""
return longest_common_prefix(s1[::-1], s2[::-1])[::-1]
def find_hamming_distance(s1: str, s2: str) -> int:
"""Compute the Hamming distance between two strings of equal length
>>> find_hamming_distance("ATG", "ATC")
1
>>> find_hamming_distance("ATG", "TGA")
3
>>> find_hamming_distance("A", "A")
0
>>> find_hamming_distance("ATG", "ATG")
0
>>> find_hamming_distance("", "")
0
>>> find_hamming_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
"""
assert len(s1) == len(s2)
return sum((1 for i in range(len(s1)) if s1[i] != s2[i]))
def find_levenshtein_distance(s1: str, s2: str) -> int:
"""Compute the Levenshtein distance between two strings (i.e., minimum number
of edits including substitution, insertion and deletion needed in a string to
turn it into another)
>>> find_levenshtein_distance("AT", "")
2
>>> find_levenshtein_distance("AT", "ATC")
1
>>> find_levenshtein_distance("ATG", "ATC")
1
>>> find_levenshtein_distance("ATG", "TGA")
2
>>> find_levenshtein_distance("ATG", "ATG")
0
>>> find_levenshtein_distance("", "")
0
>>> find_levenshtein_distance("GAGGTAGCGGCGTTTAAC", "GTGGTAACGGGGTTTAAC")
3
>>> find_levenshtein_distance("TGGCCGCGCAAAAACAGC", "TGACCGCGCAAAACAGC")
2
>>> find_levenshtein_distance("GCGTATGCGGCTAACGC", "GCTATGCGGCTATACGC")
2
"""
d = [[0 for x in range(len(s2) + 1)] for y in range(len(s1) + 1)]
for i in range(len(s1) + 1):
D[i][0] = i
for j in range(len(s2) + 1):
D[0][j] = j
for i in range(1, len(s1) + 1):
for j in range(1, len(s2) + 1):
distance_left = D[i][j - 1] + 1
distance_above = D[i - 1][j] + 1
distance_diagonal = D[i - 1][j - 1] + (s1[i - 1] != s2[j - 1])
D[i][j] = min(distance_left, distance_above, distance_diagonal)
return D[-1][-1] |
def task_pos_args():
def show_params(param1, pos):
print('param1 is: {0}'.format(param1))
for index, pos_arg in enumerate(pos):
print('positional-{0}: {1}'.format(index, pos_arg))
return {'actions':[(show_params,)],
'params':[{'name':'param1',
'short':'p',
'default':'default value'},
],
'pos_arg': 'pos',
'verbosity': 2,
}
| def task_pos_args():
def show_params(param1, pos):
print('param1 is: {0}'.format(param1))
for (index, pos_arg) in enumerate(pos):
print('positional-{0}: {1}'.format(index, pos_arg))
return {'actions': [(show_params,)], 'params': [{'name': 'param1', 'short': 'p', 'default': 'default value'}], 'pos_arg': 'pos', 'verbosity': 2} |
# Lets create a linked list that has the following elements
'''
1. FE
2. SE
3. TE
4. BE
'''
# Creating a Node class to create individual Nodes
class Node:
def __init__(self,data):
self.__data = data
self.__next = None
def get_data(self):
return self.__data
def set_data(self, data):
self.__data = data
def get_next(self):
return self.__next
def set_next(self,next_node):
self.__next = next_node
class LinkedList:
def __init__(self):
self.__head = None
self.__tail = None
def get_head(self):
return self.__head
def get_tail(self):
return self.__tail
# ADDING ELEMENT IN THE LINKED LIST
def add(self,data):
new_node = Node(data)
if(self.__head==None):
self.__head=self.__tail=new_node
else:
self.__tail.set_next(new_node)
self.__tail=new_node
number_list= LinkedList()
number_list.add("FE")
number_list.add("SE")
number_list.add("TE")
number_list.add("BE")
| """
1. FE
2. SE
3. TE
4. BE
"""
class Node:
def __init__(self, data):
self.__data = data
self.__next = None
def get_data(self):
return self.__data
def set_data(self, data):
self.__data = data
def get_next(self):
return self.__next
def set_next(self, next_node):
self.__next = next_node
class Linkedlist:
def __init__(self):
self.__head = None
self.__tail = None
def get_head(self):
return self.__head
def get_tail(self):
return self.__tail
def add(self, data):
new_node = node(data)
if self.__head == None:
self.__head = self.__tail = new_node
else:
self.__tail.set_next(new_node)
self.__tail = new_node
number_list = linked_list()
number_list.add('FE')
number_list.add('SE')
number_list.add('TE')
number_list.add('BE') |
ten_things = "Apples Oranges cows Telephone Light Sugar"
print ("Wait there are not 10 things in that list. Let's fix")
stuff = ten_things.split(' ')
more_stuff = {"Day", "Night", "Song", "Firebee",
"Corn", "Banana", "Girl", "Boy"}
while len(stuff) !=10:
next_one = more_stuff.pop()
print("Adding: ", next_one)
stuff.append(next_one)
print (f"There are {len(stuff)} items n ow.")
print ("There we go : ", stuff)
print ("Let's do some things with stuff.")
print (stuff[1])
print (stuff[-1]) # whoa! cool!
print (stuff.pop())
print (' '.join(stuff)) # what? cool !
print ('#'.join(stuff[3:5])) #super stealler!
| ten_things = 'Apples Oranges cows Telephone Light Sugar'
print("Wait there are not 10 things in that list. Let's fix")
stuff = ten_things.split(' ')
more_stuff = {'Day', 'Night', 'Song', 'Firebee', 'Corn', 'Banana', 'Girl', 'Boy'}
while len(stuff) != 10:
next_one = more_stuff.pop()
print('Adding: ', next_one)
stuff.append(next_one)
print(f'There are {len(stuff)} items n ow.')
print('There we go : ', stuff)
print("Let's do some things with stuff.")
print(stuff[1])
print(stuff[-1])
print(stuff.pop())
print(' '.join(stuff))
print('#'.join(stuff[3:5])) |
"""
Given an input string, reverse the string word by word.
For example,
Given s = "the sky is blue",
return "blue is sky the".
For C programmers: Try to solve it in-place in O(1) space.
Clarification:
* What constitutes a word?
A sequence of non-space characters constitutes a word.
* Could the input string contain leading or trailing spaces?
Yes. However, your reversed string should not contain leading or trailing spaces.
* How about multiple spaces between two words?
Reduce them to a single space in the reversed string.
https://leetcode.com/problems/reverse-words-in-a-string/
"""
class Solution:
# @param s, a string
# @return a string
def reverseWords(self, s):
elements = s.split(" ")
elements = [x for x in elements if x != ""]
elements = elements[::-1]
return " ".join(elements) | """
Given an input string, reverse the string word by word.
For example,
Given s = "the sky is blue",
return "blue is sky the".
For C programmers: Try to solve it in-place in O(1) space.
Clarification:
* What constitutes a word?
A sequence of non-space characters constitutes a word.
* Could the input string contain leading or trailing spaces?
Yes. However, your reversed string should not contain leading or trailing spaces.
* How about multiple spaces between two words?
Reduce them to a single space in the reversed string.
https://leetcode.com/problems/reverse-words-in-a-string/
"""
class Solution:
def reverse_words(self, s):
elements = s.split(' ')
elements = [x for x in elements if x != '']
elements = elements[::-1]
return ' '.join(elements) |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python equivalent of jdbc_type.h.
Python definition of the JDBC type constant values defined in Java class
java.sql.Types. Since the values don't fall into the range allowed by
a protocol buffer enum, we use Python constants instead.
If you update this, update jdbc_type.py also.
"""
BIT = -7
TINYINT = -6
SMALLINT = 5
INTEGER = 4
BIGINT = -5
FLOAT = 6
REAL = 7
DOUBLE = 8
NUMERIC = 2
DECIMAL = 3
CHAR = 1
VARCHAR = 12
LONGVARCHAR = -1
DATE = 91
TIME = 92
TIMESTAMP = 93
BINARY = -2
VARBINARY = -3
LONGVARBINARY = -4
NULL = 0
OTHER = 1111
JAVA_OBJECT = 2000
DISTINCT = 2001
STRUCT = 2002
ARRAY = 2003
BLOB = 2004
CLOB = 2005
REF = 2006
DATALINK = 70
BOOLEAN = 16
ROWID = -8
NCHAR = -15
NVARCHAR = -9
LONGNVARCHAR = -16
NCLOB = 2011
SQLXML = 2009
| """Python equivalent of jdbc_type.h.
Python definition of the JDBC type constant values defined in Java class
java.sql.Types. Since the values don't fall into the range allowed by
a protocol buffer enum, we use Python constants instead.
If you update this, update jdbc_type.py also.
"""
bit = -7
tinyint = -6
smallint = 5
integer = 4
bigint = -5
float = 6
real = 7
double = 8
numeric = 2
decimal = 3
char = 1
varchar = 12
longvarchar = -1
date = 91
time = 92
timestamp = 93
binary = -2
varbinary = -3
longvarbinary = -4
null = 0
other = 1111
java_object = 2000
distinct = 2001
struct = 2002
array = 2003
blob = 2004
clob = 2005
ref = 2006
datalink = 70
boolean = 16
rowid = -8
nchar = -15
nvarchar = -9
longnvarchar = -16
nclob = 2011
sqlxml = 2009 |
_base_ = './pspnet_r50-d8_512x512_80k_loveda.py'
model = dict(
backbone=dict(
depth=18,
init_cfg=dict(
type='Pretrained', checkpoint='open-mmlab://resnet18_v1c')),
decode_head=dict(
in_channels=512,
channels=128,
),
auxiliary_head=dict(in_channels=256, channels=64))
| _base_ = './pspnet_r50-d8_512x512_80k_loveda.py'
model = dict(backbone=dict(depth=18, init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnet18_v1c')), decode_head=dict(in_channels=512, channels=128), auxiliary_head=dict(in_channels=256, channels=64)) |
# Easy
# https://leetcode.com/problems/palindrome-number/
# Time Complexity: O(log(x) to base 10)
# Space Complexity: O(1)
class Solution:
def isPalindrome(self, x: int) -> bool:
temp = x
rev = 0
while temp > 0:
rev = rev * 10 + temp % 10
temp //= 10
return rev == x | class Solution:
def is_palindrome(self, x: int) -> bool:
temp = x
rev = 0
while temp > 0:
rev = rev * 10 + temp % 10
temp //= 10
return rev == x |
"""Constants about the Gro ontology that can be imported and re-used anywhere."""
REGION_LEVELS = {
'world': 1,
'continent': 2,
'country': 3,
'province': 4, # Equivalent to state in the United States
'district': 5, # Equivalent to county in the United States
'city': 6,
'market': 7,
'other': 8,
'coordinate': 9
}
ENTITY_TYPES_PLURAL = ['metrics', 'items', 'regions', 'frequencies', 'sources', 'units']
DATA_SERIES_UNIQUE_TYPES_ID = [
'metric_id',
'item_id',
'region_id',
'partner_region_id',
'frequency_id',
'source_id'
]
ENTITY_KEY_TO_TYPE = {
'metric_id': 'metrics',
'item_id': 'items',
'region_id': 'regions',
'partner_region_id': 'regions',
'source_id': 'sources',
'frequency_id': 'frequencies',
'unit_id': 'units'
}
DATA_POINTS_UNIQUE_COLS = DATA_SERIES_UNIQUE_TYPES_ID + [
'reporting_date',
'start_date',
'end_date'
]
| """Constants about the Gro ontology that can be imported and re-used anywhere."""
region_levels = {'world': 1, 'continent': 2, 'country': 3, 'province': 4, 'district': 5, 'city': 6, 'market': 7, 'other': 8, 'coordinate': 9}
entity_types_plural = ['metrics', 'items', 'regions', 'frequencies', 'sources', 'units']
data_series_unique_types_id = ['metric_id', 'item_id', 'region_id', 'partner_region_id', 'frequency_id', 'source_id']
entity_key_to_type = {'metric_id': 'metrics', 'item_id': 'items', 'region_id': 'regions', 'partner_region_id': 'regions', 'source_id': 'sources', 'frequency_id': 'frequencies', 'unit_id': 'units'}
data_points_unique_cols = DATA_SERIES_UNIQUE_TYPES_ID + ['reporting_date', 'start_date', 'end_date'] |
# Use zip() to create a new variable called names_and_dogs_names that combines owners and dogs_names lists into a zip object.
# Then, create a new variable named list_of_names_and_dogs_names by calling the list() function on names_and_dogs_names.
# Print list_of_names_and_dogs_names.
owners = ["Jenny", "Alexus", "Sam", "Grace"]
dogs_names = ["Elphonse", "Dr. Doggy DDS", "Carter", "Ralph"]
names_and_dogs_names = zip(owners, dogs_names)
list_of_names_and_dogs_names = list(names_and_dogs_names)
print(list_of_names_and_dogs_names) | owners = ['Jenny', 'Alexus', 'Sam', 'Grace']
dogs_names = ['Elphonse', 'Dr. Doggy DDS', 'Carter', 'Ralph']
names_and_dogs_names = zip(owners, dogs_names)
list_of_names_and_dogs_names = list(names_and_dogs_names)
print(list_of_names_and_dogs_names) |
# -*- coding: utf-8 -*-
def ordered_set(iter):
"""Creates an ordered set
@param iter: list or tuple
@return: list with unique values
"""
final = []
for i in iter:
if i not in final:
final.append(i)
return final
def class_slots(ob):
"""Get object attributes from child class attributes
@param ob: Defaults object
@type ob: Defaults
@return: Tuple of slots
"""
current_class = type(ob).__mro__[0]
if not getattr(current_class, 'allslots', None) \
and current_class != object:
_allslots = [list(getattr(cls, '__slots__', []))
for cls in type(ob).__mro__]
_fslots = []
for slot in _allslots:
_fslots = _fslots + slot
current_class.allslots = tuple(ordered_set(_fslots))
return current_class.allslots
def use_if_none_cls(alternative_attr):
def use_if_none(original_attr, ob, kwargs):
"""
Try and get a value from kwargs for original_attr. If there
is no original_attr in kwargs use the alternative_attr value
in the object ob
@param alternative_attr: the alternative attribute
@param original_attr: the original attribute
@param ob: the object with the attributes
@param kwargs: key values
@return: final value
"""
return kwargs.get(original_attr, getattr(ob, alternative_attr, None))
return use_if_none
def usef(attr):
"""Use another value as default
@param attr: the name of the attribute to
use as alternative value
@return: value of alternative attribute
"""
return use_if_none_cls(attr)
use_name_if_none = usef('Name')
def choose_alt(attr, ob, kwargs):
"""If the declared class attribute of ob is callable
then use that callable to get a default ob
instance value if a value is not available in kwargs.
@param attr: ob class attribute name
@param ob: the object instance whose default value needs to be set
@param kwargs: the kwargs values passed to the ob __init__ method
@return: value to be used to set ob instance
"""
result = ob.__class__.__dict__.get(attr, None)
if type(result).__name__ == "member_descriptor":
result = None
elif callable(result):
result = result(attr, ob, kwargs)
return result
class Defaults(object):
"""A base class which allows using slots to define
attributes and the ability to set object
instance defaults at the child class level"""
def __init__(self, **kwargs):
"""Assign kwargs to attributes and defaults to attributes"""
allslots = class_slots(self)
for attr in allslots:
setattr(self, attr, kwargs.get(
attr, choose_alt(attr, self, kwargs)))
def to_dict(self):
"""Returns attributes with values as dict
@return: dictionary of attributes with values
"""
allslots = class_slots(self)
return {
item: getattr(self, item, None)
for item in allslots
}
def to_dict_clean(self):
"""Return a dict where there values of None
are not included
@return: dict of the object properties with values
"""
attribs = self.to_dict()
return {
k: v
for k, v in attribs.items() if v
}
| def ordered_set(iter):
"""Creates an ordered set
@param iter: list or tuple
@return: list with unique values
"""
final = []
for i in iter:
if i not in final:
final.append(i)
return final
def class_slots(ob):
"""Get object attributes from child class attributes
@param ob: Defaults object
@type ob: Defaults
@return: Tuple of slots
"""
current_class = type(ob).__mro__[0]
if not getattr(current_class, 'allslots', None) and current_class != object:
_allslots = [list(getattr(cls, '__slots__', [])) for cls in type(ob).__mro__]
_fslots = []
for slot in _allslots:
_fslots = _fslots + slot
current_class.allslots = tuple(ordered_set(_fslots))
return current_class.allslots
def use_if_none_cls(alternative_attr):
def use_if_none(original_attr, ob, kwargs):
"""
Try and get a value from kwargs for original_attr. If there
is no original_attr in kwargs use the alternative_attr value
in the object ob
@param alternative_attr: the alternative attribute
@param original_attr: the original attribute
@param ob: the object with the attributes
@param kwargs: key values
@return: final value
"""
return kwargs.get(original_attr, getattr(ob, alternative_attr, None))
return use_if_none
def usef(attr):
"""Use another value as default
@param attr: the name of the attribute to
use as alternative value
@return: value of alternative attribute
"""
return use_if_none_cls(attr)
use_name_if_none = usef('Name')
def choose_alt(attr, ob, kwargs):
"""If the declared class attribute of ob is callable
then use that callable to get a default ob
instance value if a value is not available in kwargs.
@param attr: ob class attribute name
@param ob: the object instance whose default value needs to be set
@param kwargs: the kwargs values passed to the ob __init__ method
@return: value to be used to set ob instance
"""
result = ob.__class__.__dict__.get(attr, None)
if type(result).__name__ == 'member_descriptor':
result = None
elif callable(result):
result = result(attr, ob, kwargs)
return result
class Defaults(object):
"""A base class which allows using slots to define
attributes and the ability to set object
instance defaults at the child class level"""
def __init__(self, **kwargs):
"""Assign kwargs to attributes and defaults to attributes"""
allslots = class_slots(self)
for attr in allslots:
setattr(self, attr, kwargs.get(attr, choose_alt(attr, self, kwargs)))
def to_dict(self):
"""Returns attributes with values as dict
@return: dictionary of attributes with values
"""
allslots = class_slots(self)
return {item: getattr(self, item, None) for item in allslots}
def to_dict_clean(self):
"""Return a dict where there values of None
are not included
@return: dict of the object properties with values
"""
attribs = self.to_dict()
return {k: v for (k, v) in attribs.items() if v} |
class RegipyException(Exception):
"""
This is the parent exception for all regipy exceptions
"""
pass
class RegipyGeneralException(RegipyException):
"""
General exception
"""
pass
class RegistryValueNotFoundException(RegipyException):
pass
class NoRegistrySubkeysException(RegipyException):
pass
class NoRegistryValuesException(RegipyException):
pass
class RegistryKeyNotFoundException(RegipyException):
pass
class UnidentifiedHiveException(RegipyException):
pass
class RegistryRecoveryException(RegipyException):
pass
class RegistryParsingException(RegipyException):
"""
Raised when there is a parsing error, most probably a corrupted hive
"""
pass
class NtSidDecodingException(RegipyException):
"""
Raised when the binary Windows NT SID representation can not be decoded
"""
| class Regipyexception(Exception):
"""
This is the parent exception for all regipy exceptions
"""
pass
class Regipygeneralexception(RegipyException):
"""
General exception
"""
pass
class Registryvaluenotfoundexception(RegipyException):
pass
class Noregistrysubkeysexception(RegipyException):
pass
class Noregistryvaluesexception(RegipyException):
pass
class Registrykeynotfoundexception(RegipyException):
pass
class Unidentifiedhiveexception(RegipyException):
pass
class Registryrecoveryexception(RegipyException):
pass
class Registryparsingexception(RegipyException):
"""
Raised when there is a parsing error, most probably a corrupted hive
"""
pass
class Ntsiddecodingexception(RegipyException):
"""
Raised when the binary Windows NT SID representation can not be decoded
""" |
def _replace_formatted(ctx, manifest, files):
out = ctx.actions.declare_file(ctx.label.name)
# this makes it easier to add variables
file_lines = [
"""#!/bin/bash -e
WORKSPACE_ROOT="${1:-$BUILD_WORKSPACE_DIRECTORY}" """,
"""RUNPATH="${TEST_SRCDIR-$0.runfiles}"/""" + ctx.workspace_name,
"""RUNPATH=(${RUNPATH//bin/ })
RUNPATH="${RUNPATH[0]}"bin
echo $WORKSPACE_ROOT
echo $RUNPATH
while read original formatted; do
if [[ ! -z "$original" ]] && [[ ! -z "$formatted" ]]; then
if ! cmp -s "$WORKSPACE_ROOT/$original" "$RUNPATH/$formatted"; then
echo "Formatting $original"
cp "$RUNPATH/$formatted" "$WORKSPACE_ROOT/$original"
fi
fi
done < "$RUNPATH"/""" + manifest.short_path,
]
file_content = "\n".join(file_lines)
ctx.actions.write(
output = out,
content = file_content,
)
files.append(manifest)
return [DefaultInfo(files = depset(files), executable = out)]
def _build_format_py(ctx):
files = []
manifest_content = []
for src in ctx.files.srcs:
if src.is_source:
file = ctx.actions.declare_file("{}.format.output".format(src.short_path))
files.append(file)
ctx.actions.run(
arguments = [src.path, file.path],
executable = ctx.executable._fmt,
outputs = [file],
inputs = [src, ctx.file._style],
)
manifest_content.append("{} {}".format(src.short_path, file.short_path))
manifest = ctx.actions.declare_file("format/{}/manifest.txt".format(ctx.label.name))
ctx.actions.write(manifest, "\n".join(manifest_content) + "\n")
return manifest, files
def _format_py_impl(ctx):
manifest, files = _build_format_py(ctx)
return _replace_formatted(ctx, manifest, files)
format_py = rule(
implementation = _format_py_impl,
executable = True,
attrs = {
"srcs": attr.label_list(
allow_files = [".py"],
mandatory = True,
),
"_fmt": attr.label(
cfg = "host",
default = "//format:format_py",
executable = True,
),
"_style": attr.label(
allow_single_file = True,
default = ":setup.cfg",
),
},
)
| def _replace_formatted(ctx, manifest, files):
out = ctx.actions.declare_file(ctx.label.name)
file_lines = ['#!/bin/bash -e\nWORKSPACE_ROOT="${1:-$BUILD_WORKSPACE_DIRECTORY}" ', 'RUNPATH="${TEST_SRCDIR-$0.runfiles}"/' + ctx.workspace_name, 'RUNPATH=(${RUNPATH//bin/ })\nRUNPATH="${RUNPATH[0]}"bin\necho $WORKSPACE_ROOT\necho $RUNPATH\nwhile read original formatted; do\n if [[ ! -z "$original" ]] && [[ ! -z "$formatted" ]]; then\n if ! cmp -s "$WORKSPACE_ROOT/$original" "$RUNPATH/$formatted"; then\n echo "Formatting $original"\n cp "$RUNPATH/$formatted" "$WORKSPACE_ROOT/$original"\n fi\n fi\ndone < "$RUNPATH"/' + manifest.short_path]
file_content = '\n'.join(file_lines)
ctx.actions.write(output=out, content=file_content)
files.append(manifest)
return [default_info(files=depset(files), executable=out)]
def _build_format_py(ctx):
files = []
manifest_content = []
for src in ctx.files.srcs:
if src.is_source:
file = ctx.actions.declare_file('{}.format.output'.format(src.short_path))
files.append(file)
ctx.actions.run(arguments=[src.path, file.path], executable=ctx.executable._fmt, outputs=[file], inputs=[src, ctx.file._style])
manifest_content.append('{} {}'.format(src.short_path, file.short_path))
manifest = ctx.actions.declare_file('format/{}/manifest.txt'.format(ctx.label.name))
ctx.actions.write(manifest, '\n'.join(manifest_content) + '\n')
return (manifest, files)
def _format_py_impl(ctx):
(manifest, files) = _build_format_py(ctx)
return _replace_formatted(ctx, manifest, files)
format_py = rule(implementation=_format_py_impl, executable=True, attrs={'srcs': attr.label_list(allow_files=['.py'], mandatory=True), '_fmt': attr.label(cfg='host', default='//format:format_py', executable=True), '_style': attr.label(allow_single_file=True, default=':setup.cfg')}) |
r, c, m = map(int, input().split())
n = int(input())
op = [list(map(lambda x: int(x) - 1, input().split())) for _ in range(n)]
board = [[0 for _ in range(c)] for _ in range(r)]
for ra, rb, ca, cb in op:
for j in range(ra, rb + 1):
for k in range(ca, cb + 1):
board[j][k] += 1
cnt = 0
for i in range(r):
for j in range(c):
board[i][j] %= 4
if board[i][j] == 0:
cnt += 1
for i in range(n):
ra, rb, ca, cb = op[i]
cnti = cnt
for j in range(ra, rb + 1):
for k in range(ca, cb + 1):
if board[j][k] == 0:
cnti -= 1
elif board[j][k] == 1:
cnti += 1
if cnti == m:
print(i + 1)
| (r, c, m) = map(int, input().split())
n = int(input())
op = [list(map(lambda x: int(x) - 1, input().split())) for _ in range(n)]
board = [[0 for _ in range(c)] for _ in range(r)]
for (ra, rb, ca, cb) in op:
for j in range(ra, rb + 1):
for k in range(ca, cb + 1):
board[j][k] += 1
cnt = 0
for i in range(r):
for j in range(c):
board[i][j] %= 4
if board[i][j] == 0:
cnt += 1
for i in range(n):
(ra, rb, ca, cb) = op[i]
cnti = cnt
for j in range(ra, rb + 1):
for k in range(ca, cb + 1):
if board[j][k] == 0:
cnti -= 1
elif board[j][k] == 1:
cnti += 1
if cnti == m:
print(i + 1) |
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
http://code.activestate.com/recipes/576693/
"""
try:
all
except NameError:
def all(seq):
for elem in seq:
if not elem:
return False
return True
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError("expected at most 1 arguments, got %d" % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end]
self.__map = {}
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError("dictionary is empty")
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return "%s()" % (self.__class__.__name__,)
return "%s(%r)" % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
return len(self) == len(other) and all(p == q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other | """Drop-in replacement for collections.OrderedDict by Raymond Hettinger
http://code.activestate.com/recipes/576693/
"""
try:
all
except NameError:
def all(seq):
for elem in seq:
if not elem:
return False
return True
class Ordereddict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise type_error('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end]
self.__map = {}
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
(key, prev, next) = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise key_error('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return (key, value)
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = (self.__map, self.__end)
del self.__map, self.__end
inst_dict = vars(self).copy()
(self.__map, self.__end) = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return (self.__class__, (items,))
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
return len(self) == len(other) and all((p == q for (p, q) in zip(self.items(), other.items())))
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other |
# Convert a Number to a String!
# We need a function that can transform a number into a string.
# What ways of achieving this do you know?
def number_to_string(num: int) -> str:
str_num = str(num)
return str_num
print(number_to_string(123))
print(type(number_to_string(123))) | def number_to_string(num: int) -> str:
str_num = str(num)
return str_num
print(number_to_string(123))
print(type(number_to_string(123))) |
floatVar = 1.0
listVar = [3, "hello"]
dictVar = {
"myField": "value"
}
aotVar = [dictVar, dictVar]
intVar = 1 | float_var = 1.0
list_var = [3, 'hello']
dict_var = {'myField': 'value'}
aot_var = [dictVar, dictVar]
int_var = 1 |
# Python3 program to print
# given matrix in spiral form
def spiralPrint(m, n, a):
start_row_index = 0
start_col_index = 0
l = 0
''' start_row_index - starting row index
m - ending row index
start_col_index - starting column index
n - ending column index
i - iterator '''
while (start_row_index < m and start_col_index < n):
# Print the first row from
# the remaining rows
for i in range(start_col_index, n):
print(a[start_row_index][i], end=" ")
start_row_index += 1
# Print the last column from
# the remaining columns
for i in range(start_row_index, m):
print(a[i][n - 1], end=" ")
n -= 1
# Print the last row from
# the remaining rows
if (start_row_index < m):
for i in range(n - 1, (start_col_index - 1), -1):
print(a[m - 1][i], end=" ")
m -= 1
# Print the first column from
# the remaining columns
if (start_col_index < n):
for i in range(m - 1, start_row_index - 1, -1):
print(a[i][start_col_index], end=" ")
start_col_index += 1
# Driver Code
a = [[1, 2, 3, 4, 5, 6],
[7, 8, 9, 10, 11, 12],
[13, 14, 15, 16, 17, 18]]
R = 3
C = 6
spiralPrint(R, C, a)
| def spiral_print(m, n, a):
start_row_index = 0
start_col_index = 0
l = 0
' start_row_index - starting row index \n\t\tm - ending row index \n\t\tstart_col_index - starting column index \n\t\tn - ending column index \n\t\ti - iterator '
while start_row_index < m and start_col_index < n:
for i in range(start_col_index, n):
print(a[start_row_index][i], end=' ')
start_row_index += 1
for i in range(start_row_index, m):
print(a[i][n - 1], end=' ')
n -= 1
if start_row_index < m:
for i in range(n - 1, start_col_index - 1, -1):
print(a[m - 1][i], end=' ')
m -= 1
if start_col_index < n:
for i in range(m - 1, start_row_index - 1, -1):
print(a[i][start_col_index], end=' ')
start_col_index += 1
a = [[1, 2, 3, 4, 5, 6], [7, 8, 9, 10, 11, 12], [13, 14, 15, 16, 17, 18]]
r = 3
c = 6
spiral_print(R, C, a) |
"""
********************************************************************************
compas_blender.forms
********************************************************************************
.. currentmodule:: compas_blender.forms
"""
__all__ = []
| """
********************************************************************************
compas_blender.forms
********************************************************************************
.. currentmodule:: compas_blender.forms
"""
__all__ = [] |
# configs for the model training
class model_training_configs:
VALIDATION_ERRORS_DIRECTORY = 'results/validation_errors/'
INFO_FREQ = 1
# configs for the model testing
class model_testing_configs:
RNN_FORECASTS_DIRECTORY = 'results/rnn_forecasts/'
RNN_ERRORS_DIRECTORY = 'results/errors'
PROCESSED_RNN_FORECASTS_DIRECTORY = '/results/processed_rnn_forecasts/'
# configs for hyperparameter tuning(SMAC3)
class hyperparameter_tuning_configs:
SMAC_RUNCOUNT_LIMIT = 50
class gpu_configs:
log_device_placement = False
| class Model_Training_Configs:
validation_errors_directory = 'results/validation_errors/'
info_freq = 1
class Model_Testing_Configs:
rnn_forecasts_directory = 'results/rnn_forecasts/'
rnn_errors_directory = 'results/errors'
processed_rnn_forecasts_directory = '/results/processed_rnn_forecasts/'
class Hyperparameter_Tuning_Configs:
smac_runcount_limit = 50
class Gpu_Configs:
log_device_placement = False |
name = "cluster"
num_cores = 1000
GENERAL_PARTITIONS = ["regular"]
GPU_PARTITIONS = ["gpu"]
PARTITIONS = GENERAL_PARTITIONS + GPU_PARTITIONS
ACTIVE_JOB_STATES = ["RUNNING", "COMPLETING"]
FINISHED_JOB_STATES = ["COMPLETED", "NODE_FAIL", "TIMEOUT", "FAILED", "CANCELLED"]
JOB_STATES = ACTIVE_JOB_STATES + FINISHED_JOB_STATES
def node2int(node):
"""custom function to convert nodename to int
this one removes all chars from names like node1-001-01"""
return int(''.join(filter(lambda x: x.isdigit(), node)))
| name = 'cluster'
num_cores = 1000
general_partitions = ['regular']
gpu_partitions = ['gpu']
partitions = GENERAL_PARTITIONS + GPU_PARTITIONS
active_job_states = ['RUNNING', 'COMPLETING']
finished_job_states = ['COMPLETED', 'NODE_FAIL', 'TIMEOUT', 'FAILED', 'CANCELLED']
job_states = ACTIVE_JOB_STATES + FINISHED_JOB_STATES
def node2int(node):
"""custom function to convert nodename to int
this one removes all chars from names like node1-001-01"""
return int(''.join(filter(lambda x: x.isdigit(), node))) |
workdays = float(input())
daily_tips = float(input())
exchange_rate = float(input())
salary = workdays * daily_tips
annual_income = salary * 12 + salary * 2.5
net_income = annual_income - annual_income * 25 / 100
result = net_income / 365 * exchange_rate
print('%.2f' % result)
| workdays = float(input())
daily_tips = float(input())
exchange_rate = float(input())
salary = workdays * daily_tips
annual_income = salary * 12 + salary * 2.5
net_income = annual_income - annual_income * 25 / 100
result = net_income / 365 * exchange_rate
print('%.2f' % result) |
def main():
# Pass a string to show_mammal_info...
show_mammal_info('I am a string')
# The show_mammal_info function accepts an object
# as an argument, and calls its show_species
# and make_sound methods.
def show_mammal_info(creature):
creature.show_species()
creature.make_sound()
# Call the main function.
main()
| def main():
show_mammal_info('I am a string')
def show_mammal_info(creature):
creature.show_species()
creature.make_sound()
main() |
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 21 08:09:31 2020
@author: Shivadhar SIngh
"""
def count_capitals(string):
count = 0
for ch in string:
if ord(ch) >= 65 and ord(ch) <= 90:
count += 1
return count
def remove_substring_everywhere(string, substring):
'''
Remove all occurrences of substring from string, and return
the resulting string. Both arguments must be strings.
'''
p = string.find(substring)
if p == -1:
return string
i = p
newstr = string[0:i]
lsub = len(substring) # length of the substring
while p < len(string) and string.find(substring) != -1:
p = string.find(substring)
if p==-1:
return newstr+string[i+lsub:]
newstr += string[p + lsub : p]
return newstr | """
Created on Tue Apr 21 08:09:31 2020
@author: Shivadhar SIngh
"""
def count_capitals(string):
count = 0
for ch in string:
if ord(ch) >= 65 and ord(ch) <= 90:
count += 1
return count
def remove_substring_everywhere(string, substring):
"""
Remove all occurrences of substring from string, and return
the resulting string. Both arguments must be strings.
"""
p = string.find(substring)
if p == -1:
return string
i = p
newstr = string[0:i]
lsub = len(substring)
while p < len(string) and string.find(substring) != -1:
p = string.find(substring)
if p == -1:
return newstr + string[i + lsub:]
newstr += string[p + lsub:p]
return newstr |
COLOR_BLUE = '\033[0;34m'
COLOR_GREEN = '\033[0;32m'
COLOR_CYAN = '\033[0;36m'
COLOR_RED = '\033[0;31m'
COLOR_PURPLE = '\033[0;35m'
COLOR_BROWN = '\033[0;33m'
COLOR_YELLOW = '\033[1;33m'
COLOR_GRAY = '\033[1;30m'
COLOR_RESET = '\033[0m'
FG_COLORS = [
# COLOR_BLUE,
COLOR_GREEN,
# COLOR_CYAN,
# COLOR_RED,
# COLOR_PURPLE,
# COLOR_BROWN,
# COLOR_YELLOW,
]
def next_color(color):
assert color in FG_COLORS
index = FG_COLORS.index(color)
index += 1
try:
return FG_COLORS[index]
except IndexError:
index = 0
return FG_COLORS[index]
def c(string, color):
global COLOR_RESET
return f"{color}{string}{COLOR_RESET}"
| color_blue = '\x1b[0;34m'
color_green = '\x1b[0;32m'
color_cyan = '\x1b[0;36m'
color_red = '\x1b[0;31m'
color_purple = '\x1b[0;35m'
color_brown = '\x1b[0;33m'
color_yellow = '\x1b[1;33m'
color_gray = '\x1b[1;30m'
color_reset = '\x1b[0m'
fg_colors = [COLOR_GREEN]
def next_color(color):
assert color in FG_COLORS
index = FG_COLORS.index(color)
index += 1
try:
return FG_COLORS[index]
except IndexError:
index = 0
return FG_COLORS[index]
def c(string, color):
global COLOR_RESET
return f'{color}{string}{COLOR_RESET}' |
class Hey:
def __init__(jose, name="mours"):
jose.name = name
def get_name(jose):
return jose.name
class Person(object):
def __init__(self, name, phone):
self.name = name
self.phone = phone
class Teenager(Person):
def __init__(self, *args, **kwargs):
self.website = kwargs.pop("website")
super(Teenager, self).__init__(*args, **kwargs)
if __name__ == "__main__":
#print(Hey().get_name())
teen = Teenager("Joseph Njeri", 924, "www.fowr.gd")
print(teen.website) | class Hey:
def __init__(jose, name='mours'):
jose.name = name
def get_name(jose):
return jose.name
class Person(object):
def __init__(self, name, phone):
self.name = name
self.phone = phone
class Teenager(Person):
def __init__(self, *args, **kwargs):
self.website = kwargs.pop('website')
super(Teenager, self).__init__(*args, **kwargs)
if __name__ == '__main__':
teen = teenager('Joseph Njeri', 924, 'www.fowr.gd')
print(teen.website) |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""IIIF API for Invenio."""
IIIF_API_PREFIX = '/iiif/'
"""URL prefix to IIIF API."""
IIIF_UI_URL = '/api{}'.format(IIIF_API_PREFIX)
"""URL to IIIF API endpoint (allow hostname)."""
IIIF_PREVIEWER_PARAMS = {
'size': '750,'
}
"""Parameters for IIIF image previewer extension."""
IIIF_PREVIEW_TEMPLATE = 'invenio_iiif/preview.html'
"""Template for IIIF image preview."""
IIIF_API_DECORATOR_HANDLER = 'invenio_iiif.handlers:protect_api'
"""Image opener handler decorator."""
IIIF_IMAGE_OPENER_HANDLER = 'invenio_iiif.handlers:image_opener'
"""Image opener handler function."""
| """IIIF API for Invenio."""
iiif_api_prefix = '/iiif/'
'URL prefix to IIIF API.'
iiif_ui_url = '/api{}'.format(IIIF_API_PREFIX)
'URL to IIIF API endpoint (allow hostname).'
iiif_previewer_params = {'size': '750,'}
'Parameters for IIIF image previewer extension.'
iiif_preview_template = 'invenio_iiif/preview.html'
'Template for IIIF image preview.'
iiif_api_decorator_handler = 'invenio_iiif.handlers:protect_api'
'Image opener handler decorator.'
iiif_image_opener_handler = 'invenio_iiif.handlers:image_opener'
'Image opener handler function.' |
def sysrc(value):
"""Call sysrc.
CLI Example:
.. code-block:: bash
salt '*' freebsd_common.sysrc sshd_enable=YES
salt '*' freebsd_common.sysrc static_routes
"""
return __salt__['cmd.run_all']("sysrc %s" % value)
| def sysrc(value):
"""Call sysrc.
CLI Example:
.. code-block:: bash
salt '*' freebsd_common.sysrc sshd_enable=YES
salt '*' freebsd_common.sysrc static_routes
"""
return __salt__['cmd.run_all']('sysrc %s' % value) |
# Autor: Anuj Sharma (@optider)
# Github Profile: https://github.com/Optider/
# Problem Link: https://leetcode.com/problems/contains-duplicate/
class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
count = {}
for n in nums :
if count.get(n) != None :
return True
count[n] = 1
return False
| class Solution:
def contains_duplicate(self, nums: List[int]) -> bool:
count = {}
for n in nums:
if count.get(n) != None:
return True
count[n] = 1
return False |
{
'target_defaults': {
'win_delay_load_hook': 'false',
'conditions': [
['OS=="win"', {
'msvs_disabled_warnings': [
4530, # C++ exception handler used, but unwind semantics are not enabled
4506, # no definition for inline function
],
}],
],
},
'targets': [
{
'target_name': 'fs_admin',
'defines': [
"NAPI_VERSION=<(napi_build_version)",
],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions' ],
'xcode_settings': { 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'CLANG_CXX_LIBRARY': 'libc++',
'MACOSX_DEPLOYMENT_TARGET': '10.7',
},
'msvs_settings': {
'VCCLCompilerTool': { 'ExceptionHandling': 1 },
},
'sources': [
'src/main.cc',
],
'include_dirs': [
'<!(node -p "require(\'node-addon-api\').include_dir")',
],
'conditions': [
['OS=="win"', {
'sources': [
'src/fs-admin-win.cc',
],
'libraries': [
'-lole32.lib',
'-lshell32.lib',
],
}],
['OS=="mac"', {
'sources': [
'src/fs-admin-darwin.cc',
],
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
}],
['OS=="linux"', {
'sources': [
'src/fs-admin-linux.cc',
],
}],
],
}
]
}
| {'target_defaults': {'win_delay_load_hook': 'false', 'conditions': [['OS=="win"', {'msvs_disabled_warnings': [4530, 4506]}]]}, 'targets': [{'target_name': 'fs_admin', 'defines': ['NAPI_VERSION=<(napi_build_version)'], 'cflags!': ['-fno-exceptions'], 'cflags_cc!': ['-fno-exceptions'], 'xcode_settings': {'GCC_ENABLE_CPP_EXCEPTIONS': 'YES', 'CLANG_CXX_LIBRARY': 'libc++', 'MACOSX_DEPLOYMENT_TARGET': '10.7'}, 'msvs_settings': {'VCCLCompilerTool': {'ExceptionHandling': 1}}, 'sources': ['src/main.cc'], 'include_dirs': ['<!(node -p "require(\'node-addon-api\').include_dir")'], 'conditions': [['OS=="win"', {'sources': ['src/fs-admin-win.cc'], 'libraries': ['-lole32.lib', '-lshell32.lib']}], ['OS=="mac"', {'sources': ['src/fs-admin-darwin.cc'], 'libraries': ['$(SDKROOT)/System/Library/Frameworks/Security.framework']}], ['OS=="linux"', {'sources': ['src/fs-admin-linux.cc']}]]}]} |
DEBUG = True
USE_TZ = True
SECRET_KEY = "dummy"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"rest_framework",
"django_filters",
"belt",
"tests.app",
]
SITE_ID = 1
ROOT_URLCONF = "tests.app.urls"
MIDDLEWARE = ()
REST_FRAMEWORK = {
"DEFAULT_FILTER_BACKENDS": ("django_filters.rest_framework.DjangoFilterBackend",)
}
| debug = True
use_tz = True
secret_key = 'dummy'
databases = {'default': {'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:'}}
installed_apps = ['django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sites', 'rest_framework', 'django_filters', 'belt', 'tests.app']
site_id = 1
root_urlconf = 'tests.app.urls'
middleware = ()
rest_framework = {'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',)} |
#!/usr/bin/env python
''' This module provides configuration options for OS project. No more magic numbers! '''
BLOCK_SIZE = 16 # words
WORD_SIZE = 4 # bytes
# length od RS in blocks
RESTRICTED_LENGTH = 1
# length of DS in blocks
DS_LENGTH = 6
# timer value
TIMER_VALUE = 10
# buffer size
BUFFER_SIZE = 16
# number of blocks in HD
HD_BLOCKS_SIZE = 500
# default priorities
ROOT_PRIORITY = 40
VM_PRIORITY = 50
LOADER_PRIORITY = 60
INTERRUPT_PRIORITY = 70
PRINT_PRIORITY = 70
# Process states
RUNNING_STATE = 'running'
READY_STATE = 'ready'
BLOCKED_STATE = 'blocked'
# Page tables
PAGE_TABLE_STARTING_BLOCK = 0
PAGE_TABLE_ENDING_BLOCK = 14
# Shared memory
SH_MEMEORY_STARTING_BLOCK = 15
SH_MEMORY_ENDING_BLOCK = 31
# blocks dedicated for user tasks are from
USER_STARTING_BLOCK = 32
USER_ENDING_BLOCK = 255
| """ This module provides configuration options for OS project. No more magic numbers! """
block_size = 16
word_size = 4
restricted_length = 1
ds_length = 6
timer_value = 10
buffer_size = 16
hd_blocks_size = 500
root_priority = 40
vm_priority = 50
loader_priority = 60
interrupt_priority = 70
print_priority = 70
running_state = 'running'
ready_state = 'ready'
blocked_state = 'blocked'
page_table_starting_block = 0
page_table_ending_block = 14
sh_memeory_starting_block = 15
sh_memory_ending_block = 31
user_starting_block = 32
user_ending_block = 255 |
class Solution:
def jump(self, nums: List[int]) -> int:
n = len(nums)
dp = [float('inf')] * n
dp[0] = 0
tail = 1
for i in range(n):
limit = min(n, i + nums[i] + 1)
for j in range(tail, limit):
dp[j] = min(dp[j], dp[i] + 1)
tail = limit - 1
return dp[-1]
| class Solution:
def jump(self, nums: List[int]) -> int:
n = len(nums)
dp = [float('inf')] * n
dp[0] = 0
tail = 1
for i in range(n):
limit = min(n, i + nums[i] + 1)
for j in range(tail, limit):
dp[j] = min(dp[j], dp[i] + 1)
tail = limit - 1
return dp[-1] |
class Solution:
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
if not height:
return 0
left = 0
right = len(height)-1
total_area = 0
if height[left] <= height[right]:
m = left
else:
m =right
while(left < right):
if height[left] <= height[right]:
# move m from left to right
m += 1
if height[m] >= height[left]:
# found a local convave shape
left = m # search the remainder part from [m,right]
m = left if height[left] <= height[right] else right # reset m as min hight between left and right
else:
# since right is higher than left, we can guarantee that
# each index in interval (left,right) will increase height[left]-height[m] 's water trapped area
total_area += height[left]-height[m]
else:
# move m from right to left
m-=1
if height[m] >= height[right]:
# found a local convave shape
right = m
m = left if height[left] <= height[right] else right
else:
# same as left part above
total_area += height[right]-height[m]
return total_area
if __name__ == '__main__':
res = Solution().trap([])
print(res) | class Solution:
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
if not height:
return 0
left = 0
right = len(height) - 1
total_area = 0
if height[left] <= height[right]:
m = left
else:
m = right
while left < right:
if height[left] <= height[right]:
m += 1
if height[m] >= height[left]:
left = m
m = left if height[left] <= height[right] else right
else:
total_area += height[left] - height[m]
else:
m -= 1
if height[m] >= height[right]:
right = m
m = left if height[left] <= height[right] else right
else:
total_area += height[right] - height[m]
return total_area
if __name__ == '__main__':
res = solution().trap([])
print(res) |
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
load("//antlir/bzl:maybe_export_file.bzl", "maybe_export_file")
load("//antlir/bzl:shape.bzl", "shape")
load(
"//antlir/bzl:target_tagger.bzl",
"image_source_as_target_tagged_shape",
"new_target_tagger",
"target_tagged_image_source_shape",
"target_tagger_to_feature",
)
tarball_t = shape.shape(
force_root_ownership = shape.field(bool, optional = True),
into_dir = shape.path(),
source = target_tagged_image_source_shape,
)
def image_tarball(source, dest, force_root_ownership = False):
"""
`image.tarball("files/xyz.tar", "/a/b")` extracts tarball located at `files/xyz.tar` to `/a/b` in the image --
- `source` is one of:
- an `image.source` (docs in `image_source.bzl`), or
- the path of a target outputting a tarball target path,
e.g. an `export_file` or a `genrule`
- `dest` is the destination of the unpacked tarball in the image.
This is an image-absolute path to a directory that must be created
by another `feature_new` item.
"""
target_tagger = new_target_tagger()
tarball = shape.new(
tarball_t,
force_root_ownership = force_root_ownership,
into_dir = dest,
source = image_source_as_target_tagged_shape(
target_tagger,
maybe_export_file(source),
),
)
return target_tagger_to_feature(
target_tagger,
items = struct(tarballs = [tarball]),
# The `fake_macro_library` docblock explains this self-dependency
extra_deps = ["//antlir/bzl/image_actions:tarball"],
)
| load('//antlir/bzl:maybe_export_file.bzl', 'maybe_export_file')
load('//antlir/bzl:shape.bzl', 'shape')
load('//antlir/bzl:target_tagger.bzl', 'image_source_as_target_tagged_shape', 'new_target_tagger', 'target_tagged_image_source_shape', 'target_tagger_to_feature')
tarball_t = shape.shape(force_root_ownership=shape.field(bool, optional=True), into_dir=shape.path(), source=target_tagged_image_source_shape)
def image_tarball(source, dest, force_root_ownership=False):
"""
`image.tarball("files/xyz.tar", "/a/b")` extracts tarball located at `files/xyz.tar` to `/a/b` in the image --
- `source` is one of:
- an `image.source` (docs in `image_source.bzl`), or
- the path of a target outputting a tarball target path,
e.g. an `export_file` or a `genrule`
- `dest` is the destination of the unpacked tarball in the image.
This is an image-absolute path to a directory that must be created
by another `feature_new` item.
"""
target_tagger = new_target_tagger()
tarball = shape.new(tarball_t, force_root_ownership=force_root_ownership, into_dir=dest, source=image_source_as_target_tagged_shape(target_tagger, maybe_export_file(source)))
return target_tagger_to_feature(target_tagger, items=struct(tarballs=[tarball]), extra_deps=['//antlir/bzl/image_actions:tarball']) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 3 19:02:33 2019
@author: sercangul
"""
def maxConsecutiveOnes(x):
# Initialize result
count = 0
# Count the number of iterations to
# reach x = 0.
while (x!=0):
# This operation reduces length
# of every sequence of 1s by one.
x = (x & (x << 1))
count=count+1
return count
if __name__ == '__main__':
n = int(input())
result = maxConsecutiveOnes(n)
print(result) | """
Created on Mon Jun 3 19:02:33 2019
@author: sercangul
"""
def max_consecutive_ones(x):
count = 0
while x != 0:
x = x & x << 1
count = count + 1
return count
if __name__ == '__main__':
n = int(input())
result = max_consecutive_ones(n)
print(result) |
#! /usr/bin/python3
# Description: Data_Ghost, concealing data into spaces and tabs making it imperceptable to human eyes.
# Author: Ajay Dyavathi
# Github: Radical Ajay
class Ghost():
def __init__(self, file_name, output_format='txt'):
''' Converts ascii text to spaces and tabs '''
self.file_name = file_name
self.output_format = output_format
def ascii2bin(self, asc):
''' Converting ascii to bianry '''
return ''.join('{:08b}'.format(ord(i)) for i in asc)
def bin2ascii(self, bid):
''' Converting binary to ascii '''
return ''.join(chr(int(bid[i:i + 8], 2)) for i in range(0, len(bid), 8))
def ghost(self, filename):
''' Ghosting data converting it to spaces and tabs '''
with open(filename, 'w') as out_f:
with open(self.file_name, 'r') as in_f:
for in_data in in_f.readlines():
bin_data = self.ascii2bin(in_data)
out_data = bin_data.replace('1', '\t')
out_data = out_data.replace('0', ' ')
out_f.write(out_data)
def unghost(self, in_filename, out_filename):
''' Unghosting data converting back from spaces and tabs to human-readable text '''
with open(out_filename, 'w') as out_f:
with open(in_filename, 'r') as in_f:
for line in in_f.readlines():
line = line.replace('\t', '1')
line = line.replace(' ', '0')
out_f.write(self.bin2ascii(line))
# USAGE:
# ghoster = Ghost('data.txt')
# ghoster.ghost('ghosted.txt')
# ghoster.unghost('ghosted.txt', 'unghosted.txt')
| class Ghost:
def __init__(self, file_name, output_format='txt'):
""" Converts ascii text to spaces and tabs """
self.file_name = file_name
self.output_format = output_format
def ascii2bin(self, asc):
""" Converting ascii to bianry """
return ''.join(('{:08b}'.format(ord(i)) for i in asc))
def bin2ascii(self, bid):
""" Converting binary to ascii """
return ''.join((chr(int(bid[i:i + 8], 2)) for i in range(0, len(bid), 8)))
def ghost(self, filename):
""" Ghosting data converting it to spaces and tabs """
with open(filename, 'w') as out_f:
with open(self.file_name, 'r') as in_f:
for in_data in in_f.readlines():
bin_data = self.ascii2bin(in_data)
out_data = bin_data.replace('1', '\t')
out_data = out_data.replace('0', ' ')
out_f.write(out_data)
def unghost(self, in_filename, out_filename):
""" Unghosting data converting back from spaces and tabs to human-readable text """
with open(out_filename, 'w') as out_f:
with open(in_filename, 'r') as in_f:
for line in in_f.readlines():
line = line.replace('\t', '1')
line = line.replace(' ', '0')
out_f.write(self.bin2ascii(line)) |
class Solution:
def defangIPaddr(self, address: str) -> str:
i=0
j=0
strlist=list(address)
defang=[]
while i< len(strlist):
if strlist[i] == '.':
defang.append('[')
defang.append('.')
defang.append(']')
else:
defang.append(address[i])
i+=1
retstr=""
# return string
return (retstr.join(defang))
| class Solution:
def defang_i_paddr(self, address: str) -> str:
i = 0
j = 0
strlist = list(address)
defang = []
while i < len(strlist):
if strlist[i] == '.':
defang.append('[')
defang.append('.')
defang.append(']')
else:
defang.append(address[i])
i += 1
retstr = ''
return retstr.join(defang) |
class Node:
def __init__(self, path, libgraphql_type, location, name):
self.path = path
self.parent = None
self.children = []
self.libgraphql_type = libgraphql_type
self.location = location
self.name = name
def __repr__(self):
return "%s(%s)" % (self.libgraphql_type, self.name)
| class Node:
def __init__(self, path, libgraphql_type, location, name):
self.path = path
self.parent = None
self.children = []
self.libgraphql_type = libgraphql_type
self.location = location
self.name = name
def __repr__(self):
return '%s(%s)' % (self.libgraphql_type, self.name) |
Subsets and Splits