repo_name
stringlengths 5
88
| path
stringlengths 4
199
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 855
832k
| license
stringclasses 15
values | hash
int64 -9,223,128,179,723,874,000
9,223,237,214B
| line_mean
float64 3.5
99
| line_max
int64 15
999
| alpha_frac
float64 0.25
0.87
| autogenerated
bool 1
class | ratio
float64 1.5
7.55
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class | score
float64 0
0.2
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
lakshayg/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/moving_stats_test.py | 46 | 5107 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for computing moving-average statistics."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import moving_stats
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
rng = np.random.RandomState(0)
class MovingReduceMeanVarianceTest(test.TestCase):
def test_assign_moving_mean_variance(self):
shape = [1, 2]
true_mean = np.array([[0., 3.]])
true_stddev = np.array([[1.1, 0.5]])
with self.test_session() as sess:
# Start "x" out with this mean.
mean_var = variables.Variable(array_ops.zeros_like(true_mean))
variance_var = variables.Variable(array_ops.ones_like(true_stddev))
x = random_ops.random_normal(shape, dtype=np.float64, seed=0)
x = true_stddev * x + true_mean
ema, emv = moving_stats.assign_moving_mean_variance(
mean_var, variance_var, x, decay=0.99)
self.assertEqual(ema.dtype.base_dtype, dtypes.float64)
self.assertEqual(emv.dtype.base_dtype, dtypes.float64)
# Run 1000 updates; moving averages should be near the true values.
variables.global_variables_initializer().run()
for _ in range(2000):
sess.run([ema, emv])
[mean_var_, variance_var_, ema_, emv_] = sess.run([
mean_var, variance_var, ema, emv])
# Test that variables are passed-through.
self.assertAllEqual(mean_var_, ema_)
self.assertAllEqual(variance_var_, emv_)
# Test that values are as expected.
self.assertAllClose(true_mean, ema_, rtol=0.005, atol=0.015)
self.assertAllClose(true_stddev**2., emv_, rtol=0.06, atol=0.)
# Change the mean, var then update some more. Moving averages should
# re-converge.
sess.run([
mean_var.assign(np.array([[-1., 2.]])),
variance_var.assign(np.array([[2., 1.]])),
])
for _ in range(2000):
sess.run([ema, emv])
[mean_var_, variance_var_, ema_, emv_] = sess.run([
mean_var, variance_var, ema, emv])
# Test that variables are passed-through.
self.assertAllEqual(mean_var_, ema_)
self.assertAllEqual(variance_var_, emv_)
# Test that values are as expected.
self.assertAllClose(true_mean, ema_, rtol=0.005, atol=0.015)
self.assertAllClose(true_stddev**2., emv_, rtol=0.1, atol=0.)
def test_moving_mean_variance(self):
shape = [1, 2]
true_mean = np.array([[0., 3.]])
true_stddev = np.array([[1.1, 0.5]])
with self.test_session() as sess:
# Start "x" out with this mean.
x = random_ops.random_normal(shape, dtype=np.float64, seed=0)
x = true_stddev * x + true_mean
ema, emv = moving_stats.moving_mean_variance(
x, decay=0.99)
self.assertEqual(ema.dtype.base_dtype, dtypes.float64)
self.assertEqual(emv.dtype.base_dtype, dtypes.float64)
# Run 1000 updates; moving averages should be near the true values.
variables.global_variables_initializer().run()
for _ in range(2000):
sess.run([ema, emv])
[ema_, emv_] = sess.run([ema, emv])
self.assertAllClose(true_mean, ema_, rtol=0.005, atol=0.015)
self.assertAllClose(true_stddev**2., emv_, rtol=0.06, atol=0.)
class MovingLogExponentialMovingMeanExpTest(test.TestCase):
def test_assign_log_moving_mean_exp(self):
shape = [1, 2]
true_mean = np.array([[0., 3.]])
true_stddev = np.array([[1.1, 0.5]])
decay = 0.99
with self.test_session() as sess:
# Start "x" out with this mean.
x = random_ops.random_normal(shape, dtype=np.float64, seed=0)
x = true_stddev * x + true_mean
log_mean_exp_var = variables.Variable(array_ops.zeros_like(true_mean))
variables.global_variables_initializer().run()
log_mean_exp = moving_stats.assign_log_moving_mean_exp(
log_mean_exp_var, x, decay=decay)
expected_ = np.zeros_like(true_mean)
for _ in range(2000):
x_, log_mean_exp_ = sess.run([x, log_mean_exp])
expected_ = np.log(decay * np.exp(expected_) + (1 - decay) * np.exp(x_))
self.assertAllClose(expected_, log_mean_exp_, rtol=1e-6, atol=1e-9)
if __name__ == "__main__":
test.main()
| apache-2.0 | 1,057,861,798,496,475,500 | 38.589147 | 80 | 0.647543 | false | 3.42063 | true | false | false | 0.01077 |
ledocc/hunter | maintenance/upload-cache-to-github.py | 1 | 15346 | #!/usr/bin/env python3
import argparse
import base64
import hashlib
import json
import os
import requests
import sys
import time
def sleep_time(attempt):
if attempt <= 0:
raise Exception('Unexpected')
if attempt == 1:
return 0
if attempt == 2:
return 15
if attempt == 3:
return 60
if attempt == 4:
return 90
if attempt == 5:
return 300
return 1200
def retry(func_in):
def func_out(*args, **kwargs):
retry_max = 10
i = 0
while True:
i = i + 1
try:
return func_in(*args, **kwargs)
except Exception as exc:
if i > retry_max:
raise exc
print('Operation failed. Exception:\n {}'.format(exc))
sec = sleep_time(i)
print('Retry #{} (of {}) after {} seconds'.format(i, retry_max, sec))
time.sleep(sec)
raise Exception('Unreachable')
return func_out
# http://stackoverflow.com/a/16696317/2288008
@retry
def download_file(url, local_file, auth, chunk_size=1024):
print('Downloading:\n {}\n -> {}'.format(url, local_file))
r = requests.get(url, stream=True, auth=auth)
if not r.ok:
raise Exception('Downloading failed')
with open(local_file, 'wb') as f:
for chunk in r.iter_content(chunk_size=chunk_size):
if chunk:
f.write(chunk)
class Github:
def __init__(self, username, password, repo_owner, repo):
self.repo_owner = repo_owner
self.repo = repo
self.auth = requests.auth.HTTPBasicAuth(username, password)
self.simple_request()
@retry
def simple_request(self):
print('Processing simple request')
r = requests.get('https://api.github.com', auth=self.auth)
if not r.ok:
sys.exit('Simple request fails. Check your password.')
limit = int(r.headers['X-RateLimit-Remaining'])
print('GitHub Limit: {}'.format(limit))
if limit == 0:
raise Exception('GitHub limit is 0')
print('Simple request pass')
@retry
def get_release_by_tag(self, tagname):
print('Get release-id by tag `{}`'.format(tagname))
# https://developer.github.com/v3/repos/releases/#get-a-release-by-tag-name
# GET /repos/:owner/:repo/releases/tags/:tag
url = 'https://api.github.com/repos/{}/{}/releases/tags/{}'.format(
self.repo_owner,
self.repo,
tagname
)
r = requests.get(url, auth=self.auth)
if not r.ok:
raise Exception('Get tag id failed. Requested url: {}'.format(url))
tag_id = r.json()['id']
print('Tag id is {}'.format(tag_id))
return tag_id
@retry
def find_asset_id_by_name(self, release_id, name):
# https://developer.github.com/v3/repos/releases/#list-assets-for-a-release
# GET /repos/:owner/:repo/releases/:id/assets
page_number = 1
keep_searching = True
while keep_searching:
url = 'https://api.github.com/repos/{}/{}/releases/{}/assets?page={}'.format(
self.repo_owner,
self.repo,
release_id,
page_number
)
print('Requesting URL: {}'.format(url))
r = requests.get(url, auth=self.auth)
if not r.ok:
raise Exception('Getting list of assets failed. Requested url: {}'.format(url))
json = r.json()
for x in json:
if name == x['name']:
return x['id']
if not json:
keep_searching = False
page_number = page_number + 1
return None
@retry
def delete_asset_by_id(self, asset_id, asset_name):
# https://developer.github.com/v3/repos/releases/#delete-a-release-asset
# DELETE /repos/:owner/:repo/releases/assets/:id
url = 'https://api.github.com/repos/{}/{}/releases/assets/{}'.format(
self.repo_owner,
self.repo,
asset_id
)
r = requests.delete(url, auth=self.auth)
if r.status_code == 204:
print('Asset removed: {}'.format(asset_name))
else:
raise Exception('Deletion of asset failed: {}'.format(asset_name))
def delete_asset_if_exists(self, release_id, asset_name):
asset_id = self.find_asset_id_by_name(release_id, asset_name)
if not asset_id:
print('Asset not exists: {}'.format(asset_name))
return
self.delete_asset_by_id(asset_id, asset_name)
def upload_bzip_once(self, url, local_path):
headers = {'Content-Type': 'application/x-bzip2'}
file_to_upload = open(local_path, 'rb')
r = requests.post(url, data=file_to_upload, headers=headers, auth=self.auth)
if not r.ok:
raise Exception('Upload of file failed')
@retry
def upload_bzip(self, url, local_path, release_id, asset_name):
print('Uploading:\n {}\n -> {}'.format(local_path, url))
try:
self.upload_bzip_once(url, local_path)
except Exception as exc:
print('Exception catched while uploading, removing asset...')
self.delete_asset_if_exists(release_id, asset_name)
raise exc
def upload_raw_file(self, local_path):
tagname = 'cache'
release_id = self.get_release_by_tag(tagname)
# https://developer.github.com/v3/repos/releases/#upload-a-release-asset
# POST https://<upload_url>/repos/:owner/:repo/releases/:id/assets?name=foo.zip
asset_name = hashlib.sha1(open(local_path, 'rb').read()).hexdigest()
asset_name = asset_name + '.tar.bz2'
url = 'https://uploads.github.com/repos/{}/{}/releases/{}/assets?name={}'.format(
self.repo_owner,
self.repo,
release_id,
asset_name
)
self.upload_bzip(url, local_path, release_id, asset_name)
@retry
def create_new_file(self, local_path, github_path):
# https://developer.github.com/v3/repos/contents/#create-a-file
# PUT /repos/:owner/:repo/contents/:path
message = 'Uploading cache info\n\n'
message += 'Create file: {}\n\n'.format(github_path)
env_list = []
job_url = ''
if os.getenv('TRAVIS') == 'true':
# * https://docs.travis-ci.com/user/environment-variables/#Default-Environment-Variables
message += 'Travis:\n'
job_url = 'https://travis-ci.org/{}/jobs/{}'.format(
os.getenv('TRAVIS_REPO_SLUG'),
os.getenv('TRAVIS_JOB_ID')
)
env_list += [
'TRAVIS_BRANCH',
'TRAVIS_BUILD_ID',
'TRAVIS_BUILD_NUMBER',
'TRAVIS_JOB_ID',
'TRAVIS_JOB_NUMBER',
'TRAVIS_OS_NAME',
'TRAVIS_REPO_SLUG'
]
if os.getenv('APPVEYOR') == 'True':
# * http://www.appveyor.com/docs/environment-variables
message += 'AppVeyor:\n'
job_url = 'https://ci.appveyor.com/project/{}/{}/build/{}/job/{}'.format(
os.getenv('APPVEYOR_ACCOUNT_NAME'),
os.getenv('APPVEYOR_PROJECT_SLUG'),
os.getenv('APPVEYOR_BUILD_VERSION'),
os.getenv('APPVEYOR_JOB_ID')
)
env_list += [
'APPVEYOR_ACCOUNT_NAME',
'APPVEYOR_PROJECT_ID',
'APPVEYOR_PROJECT_NAME',
'APPVEYOR_PROJECT_SLUG',
'APPVEYOR_BUILD_ID',
'APPVEYOR_BUILD_NUMBER',
'APPVEYOR_BUILD_VERSION',
'APPVEYOR_JOB_ID',
'APPVEYOR_JOB_NAME',
'APPVEYOR_REPO_BRANCH'
]
# Store some info about build
for env_name in env_list:
env_value = os.getenv(env_name)
if env_value:
message += ' {}: {}\n'.format(env_name, env_value)
if job_url:
message += '\n Job URL: {}\n'.format(job_url)
url = 'https://api.github.com/repos/{}/{}/contents/{}'.format(
self.repo_owner,
self.repo,
github_path
)
content = base64.b64encode(open(local_path, 'rb').read()).decode()
put_data = {
'message': message,
'content': content
}
r = requests.put(url, data = json.dumps(put_data), auth=self.auth)
if not r.ok:
print('Put failed. Status code: {}'.format(r.status_code))
if r.status_code == 409:
raise Exception('Unavailable repository')
return r.ok
class CacheEntry:
def __init__(self, cache_done_path, cache_dir, temp_dir):
self.cache_dir = cache_dir
self.temp_dir = temp_dir
self.cache_raw = os.path.join(self.cache_dir, 'raw')
self.cache_meta = os.path.join(self.cache_dir, 'meta')
self.cache_done_path = cache_done_path
if not os.path.exists(cache_done_path):
raise Exception('File not exists: {}'.format(cache_done_path))
self.cache_done_dir = os.path.dirname(self.cache_done_path)
self.from_server = os.path.join(self.cache_done_dir, 'from.server')
self.cache_sha1 = os.path.join(self.cache_done_dir, 'cache.sha1')
self.internal_deps_id = os.path.split(self.cache_done_dir)[0]
self.type_id = os.path.split(self.internal_deps_id)[0]
self.args_id = os.path.split(self.type_id)[0]
self.archive_id = os.path.split(self.args_id)[0]
self.version = os.path.split(self.archive_id)[0]
self.component = os.path.split(self.version)[0]
if os.path.split(self.component)[1].startswith('__'):
self.package = os.path.split(self.component)[0]
else:
self.package = self.component
self.component = ''
self.toolchain_id = os.path.split(self.package)[0]
meta = os.path.split(self.toolchain_id)[0]
assert(meta == self.cache_meta)
def entry_from_server(self):
return os.path.exists(self.from_server)
def upload_raw(self, github):
sha1 = open(self.cache_sha1, 'r').read()
raw = os.path.join(self.cache_raw, sha1 + '.tar.bz2')
github.upload_raw_file(raw)
def upload_meta(self, github, cache_done):
self.upload_files_from_common_dir(github, self.cache_done_dir, cache_done)
self.upload_files_from_common_dir(github, self.internal_deps_id, cache_done)
self.upload_files_from_common_dir(github, self.type_id, cache_done)
self.upload_files_from_common_dir(github, self.args_id, cache_done)
self.upload_files_from_common_dir(github, self.archive_id, cache_done)
self.upload_files_from_common_dir(github, self.version, cache_done, check_is_empty=True)
if self.component != '':
self.upload_files_from_common_dir(github, self.component, cache_done, check_is_empty=True)
self.upload_files_from_common_dir(github, self.package, cache_done, check_is_empty=True)
self.upload_files_from_common_dir(github, self.toolchain_id, cache_done)
def upload_files_from_common_dir(self, github, dir_path, cache_done, check_is_empty=False):
to_upload = []
for i in os.listdir(dir_path):
if i == 'cmake.lock':
continue
if i == 'DONE':
continue
done_file = (i == 'CACHE.DONE') or (i == 'basic-deps.DONE')
if done_file and not cache_done:
continue
if not done_file and cache_done:
continue
i_fullpath = os.path.join(dir_path, i)
if os.path.isfile(i_fullpath):
to_upload.append(i_fullpath)
if not cache_done:
if check_is_empty and len(to_upload) != 0:
raise Exception('Expected no files in directory: {}'.format(dir_path))
if not check_is_empty and len(to_upload) == 0:
raise Exception('No files found in directory: {}'.format(dir_path))
for i in to_upload:
relative_path = i[len(self.cache_meta)+1:]
relative_unix_path = relative_path.replace('\\', '/') # convert windows path
expected_download_url = 'https://raw.githubusercontent.com/{}/{}/master/{}'.format(
github.repo_owner,
github.repo,
relative_unix_path
)
github_url = 'https://github.com/{}/{}/blob/master/{}'.format(
github.repo_owner,
github.repo,
relative_unix_path
)
print('Uploading file: {}'.format(relative_path))
ok = github.create_new_file(i, relative_unix_path)
if not ok:
print('Already exist')
temp_file = os.path.join(self.temp_dir, '__TEMP.FILE')
download_file(expected_download_url, temp_file, github.auth)
expected_content = open(i, 'rb').read()
downloaded_content = open(temp_file, 'rb').read()
expected_hash = hashlib.sha1(expected_content).hexdigest()
downloaded_hash = hashlib.sha1(downloaded_content).hexdigest()
os.remove(temp_file)
if expected_hash != downloaded_hash:
print('Hash mismatch:')
print(
' expected {} (content: {})'.format(
expected_hash, expected_content
)
)
print(
' downloaded {} (content: {})'.format(
downloaded_hash, downloaded_content
)
)
print('GitHub link: {}'.format(github_url))
raise Exception('Hash mismatch')
class Cache:
def __init__(self, cache_dir, temp_dir):
self.entries = self.create_entries(cache_dir, temp_dir)
self.remove_entries_from_server()
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
def create_entries(self, cache_dir, temp_dir):
print('Searching for CACHE.DONE files in directory:\n {}\n'.format(cache_dir))
entries = []
for root, dirs, files in os.walk(cache_dir):
for filename in files:
if filename == 'CACHE.DONE':
entries.append(CacheEntry(os.path.join(root, filename), cache_dir, temp_dir))
print('Found {} files:'.format(len(entries)))
for i in entries:
print(' {}'.format(i.cache_done_path))
print('')
return entries
def remove_entries_from_server(self):
new_entries = []
for i in self.entries:
if i.entry_from_server():
print('Remove entry (from server):\n {}'.format(i.cache_done_path))
else:
new_entries.append(i)
self.entries = new_entries
def upload_raw(self, github):
for i in self.entries:
i.upload_raw(github)
def upload_meta(self, github, cache_done):
for i in self.entries:
i.upload_meta(github, cache_done)
parser = argparse.ArgumentParser(
description='Script for uploading Hunter cache files to GitHub'
)
parser.add_argument(
'--username',
required=True,
help='Username'
)
parser.add_argument(
'--repo-owner',
required=True,
help='Repository owner'
)
parser.add_argument(
'--repo',
required=True,
help='Repository name'
)
parser.add_argument(
'--cache-dir',
required=True,
help='Hunter cache directory, e.g. /home/user/.hunter/_Base/Cache'
)
parser.add_argument(
'--temp-dir',
required=True,
help='Temporary directory where files will be downloaded for verification'
)
parser.add_argument(
'--skip-raw', action='store_true', help="Skip uploading of raw files"
)
args = parser.parse_args()
cache_dir = os.path.normpath(args.cache_dir)
if not os.path.isdir(cache_dir):
raise Exception('Not a directory: {}'.format(cache_dir))
if os.path.split(cache_dir)[1] != 'Cache':
raise Exception('Cache directory path should ends with Cache: {}'.format(cache_dir))
cache = Cache(cache_dir, args.temp_dir)
password = os.getenv('GITHUB_USER_PASSWORD')
if password == '' or password is None:
raise Exception('Expected GITHUB_USER_PASSWORD environment variable')
github = Github(
username = args.username,
password = password,
repo_owner = args.repo_owner,
repo = args.repo
)
if args.skip_raw:
print('*** WARNING *** Skip uploading of raw files')
else:
cache.upload_raw(github)
cache.upload_meta(github, cache_done=False)
print('Uploading DONE files')
cache.upload_meta(github, cache_done=True) # Should be last
| bsd-2-clause | -5,787,685,296,679,103,000 | 30.706612 | 96 | 0.621856 | false | 3.345542 | false | false | false | 0.00997 |
zvolsky/platby | modules/export_csv.py | 2 | 9784 | #!/usr/bin/env python
# -*- coding: utf8 -*-
u'''
export do csv pro předstírání Jirkovým Společným aktivitám, že načítají csv z banky
exportují se 2 typy záznamů:
"plánované"
- příjem bankou s neznámým ss (možná dočasně, a později i toto budeme zadržovat)
- zatím nepodporován příjem s neznámým ss na místě do pokladny (a asi ani nebude - musí se registrovat)
- manuálně naplánované - k 13.10. sice naprogramováno, ale neodzkoušeno,
a nebudeme to spouštět, nebude-li na to extra tlak.
Pokud by se přece jen dodělávala možnost naplánování částky, nechť nemění
zálohu. Zálohu totiž změní až samotný csv export. Je naprogramován tak,
že zkontroluje stav zálohy v okamžiku exportu, a není-li dost peněz,
částku sníží (k vynulování zálohy) nebo export stopne (při záloze <=0)
"dlužné"
- má-li zůstatek na záloze a zjistí-li se, že Jirkovy aktivity mají pohledávku,
exportuje se pohledávka, snížená o právě exportované plánované
db.systab musí obsahovat (viz fce csv.py: init_systab):
kod: last_csv, hodnota: dd.mm.rrrr posledního exportu
kod: csv_czk , hodnota: nnnnnn.nn zůstatek na účtu
'''
url_zakaznici = 'http://www.spolecneaktivity.cz/administrace/komunity/980eb0fc-3a9d-43b6-8028-59bf28fbb67e/zakaznici'
import os
from datetime import datetime, date, timedelta, time
from time import sleep
from bs4 import BeautifulSoup
from spolecneaktivity_cz import sa_login, unformat_castka
from mz_wkasa_platby import Uc_sa
import vfp
def export_csv(db, app_folder):
rec_last_csv = db(db.systab.kod=='last_csv').select().first()
datum_od = datetime.strptime(rec_last_csv.hodnota, '%d.%m.%Y'
).date()+timedelta(1)
datum_do = date.today()-timedelta(1)
csv_nejpozdeji = datetime.combine(datum_do, time(23,59,59))
if datum_od>datum_do:
print "Od posledního generování musí uplynout alespoň jeden den."
return 0
vypis = ''
sumplus = summinus = 0
evidence = {} # key je auth_user.id, value je celková částka z 379-11 záznamů
pocet, vypis, sumplus, summinus = predej_planovane(
evidence, db, vypis, sumplus, summinus, csv_nejpozdeji)
pocet, vypis, sumplus, summinus = predej_dluzne(
evidence, db, vypis, sumplus, summinus, pocet, csv_nejpozdeji)
make_csv(db, vypis, sumplus, summinus, rec_last_csv, datum_od, datum_do,
app_folder)
return pocet
def predej_planovane(evidence, db, vypis, sumplus, summinus, csv_nejpozdeji):
'''podle stavu na 379-11 účtu (plánováno k převodu na jirkovo)'''
predat = db((db.pohyb.iddal==Uc_sa.pro_sa)&(db.pohyb.id_pokynu==None)
).select()
# protože naplánováním převodu u zákazníka se ještě nesmí měnit záloha,
# až zde, samotným převodem
pocet = 0
for predat1 in predat:
vypis1, sumplus1, summinus1 = __predej1(
predat1, evidence, db, csv_nejpozdeji)
vypis += vypis1
sumplus += sumplus1
summinus += summinus1
pocet += 1
return pocet, vypis, sumplus, summinus
def __predej1(pohyb, evidence, db, csv_nejpozdeji):
if pohyb.idauth_user:
zakaznik = db(db.auth_user.id==pohyb.idauth_user).select(
db.auth_user.id, db.auth_user.ss).first()
ss = zakaznik.ss or pohyb.ss
evidence[zakaznik.id] = evidence.get(zakaznik.id, 0) + castka
else:
zakaznik = None
ss = pohyb.ss
if pohyb.iddal==Uc_sa.pro_sa: # předem plánovaný převod na SA
if zakaznik:
castka = min(zakaznik.zaloha, pohyb.castka)
if castka<=0:
pohyb.update_record(castka=0, id_pokynu="nemá peníze")
return '', 0, 0 # zrušeno pro nedostatek peněz na záloze
if castka!=pohyb.castka:
pohyb.update_record(castka=castka)
zakaznik.update_record(zaloha=zakaznik.zaloha-castka)
else:
castka = pohyb.castka
id_pohybu = db.pohyb.insert(
idauth_user=pohyb.idauth_user,
idma_dati=Uc_sa.pro_sa,
iddal=Uc_sa.oz_sa,
datum=datetime.now(),
castka=castka,
cislo_uctu=pohyb.cislo_uctu,
kod_banky=pohyb.kod_banky,
nazev_banky=pohyb.nazev_banky,
vs=pohyb.vs,
ss=ss,
ks=pohyb.ks,
id_pokynu=str(pohyb.id)
)
pohyb.update_record(id_pokynu=id_pohybu)
vypis1, sumplus1, summinus1 = __add_csv(pohyb, csv_nejpozdeji)
#db.commit() - commit je v kontroléru csv.py
return vypis1, sumplus1, summinus1
def predej_dluzne(evidence, db, vypis, sumplus, summinus, pocet,
csv_nejpozdeji):
#jirkovo = nacti_jirkovo_ze_souboru('jirkovo.html')
br = sa_login("Mirek Zv.", "miiirek1+1")
sleep(2)
jirkovo = br.open(url_zakaznici).read()
vfp.strtofile(jirkovo, os.path.join(os.getcwd(),
'applications', 'platby', 'downloads', 'zakaznici.html'))
# mírná duplicita v controllers/platby.py, kde tento soubor parsuji
# ke zjištění aktuální zálohy
soup = BeautifulSoup(jirkovo)
for zakaznik in soup.table('tr'):
sloupce = zakaznik('td')
if len(sloupce): # první řádek (hlavička) totiž <td> nemá
planovano = unformat_castka(sloupce[-1].string)
neuhrazeno = unformat_castka(sloupce[-2].string)
zaloha = unformat_castka(sloupce[-4].string)
chybi = planovano + neuhrazeno - zaloha
if chybi>0:
symbol = str(sloupce[0].a.string).strip().lstrip('0')
wk_zakaznik = db(db.auth_user.ss==symbol).select().first()
if wk_zakaznik and wk_zakaznik.zaloha>0:
jeste_chybi = chybi - evidence.get(wk_zakaznik.id, 0)
# minus kolik jsme mu právě vyplatili v predej_planovane()
if jeste_chybi:
fl_zaloha = float(wk_zakaznik.zaloha)
popis = (u'z sa.cz poptával %s Kč' % jeste_chybi
) if (jeste_chybi>fl_zaloha) else ''
posleme_mu = min(jeste_chybi, fl_zaloha)
id_pohybu = db.pohyb.insert(
idauth_user=wk_zakaznik.id,
idma_dati=Uc_sa.oz,
iddal=Uc_sa.oz_sa,
datum=datetime.now(),
castka=posleme_mu,
ss=symbol,
popis=popis
)
wk_zakaznik.update_record(zaloha=fl_zaloha-posleme_mu)
pohyb = db(db.pohyb.id==id_pohybu).select().first()
vypis1, sumplus1, summinus1 = __add_csv(
pohyb, csv_nejpozdeji)
vypis += vypis1
sumplus += sumplus1
summinus += summinus1
#db.commit() - commit je v kontroléru csv.py
pocet += 1
return pocet, vypis, sumplus, summinus
def __add_csv(pohyb, csv_nejpozdeji):
'''zapíše jednu transakci do csv
'''
#0;06.09.2013;85,00;670100-2207318349;6210;;2550;425;PAVEL KUBIŠTA;Bezhotovostní příjem;;;BRE Bank S.A., organizační složka podniku;
vypis1 = (
'0;%(datum)s;%(castka)s;%(ucet)s;%(banka)s;%(ks)s;%(vs)s;%(ss)s;%(ss)s;%(bhp)s;;;banka;\n'
% dict(datum=min(pohyb.datum, csv_nejpozdeji)
.strftime('%d.%m.%Y'),
castka=('%0.2f' % pohyb.castka).replace('.',','),
ucet=pohyb.cislo_uctu or '',
banka=pohyb.kod_banky or '',
bhp=u'Bezhotovostní příjem'.encode('cp1250'),
ks=pohyb.ks or '',
vs=pohyb.vs or '',
ss=pohyb.ss or ''))
sumplus1 = float(pohyb.castka) if pohyb.castka>0 else 0.
summinus1 = float(pohyb.castka) if pohyb.castka<0 else 0.
return vypis1, sumplus1, summinus1
def make_csv(db, vypis, sumplus, summinus, rec_last_csv, datum_od, datum_do,
app_folder):
maska = vfp.filetostr(os.path.join(os.getcwd(),
'applications', 'platby', 'others', 'maska.csv'))
rec_csv_czk = db(db.systab.kod=='csv_czk').select().first()
vychozi = float(rec_csv_czk.hodnota)
koncova = vychozi + sumplus + summinus
vfp.strtofile(maska % dict(
nyni=datetime.now().strftime('%d.%m.%Y %H:%M:%S'),
od=_no_zeros(datum_od),
do=_no_zeros(datum_do),
vychozi=_form_castka(vychozi),
koncova=_form_castka(koncova),
prijmy=_form_castka(sumplus),
vydaje=_form_castka(summinus),
zaznamy=vypis,
suma=_form_castka(sumplus+summinus)
), os.path.join(app_folder, 'import_wk',
datum_od.strftime('%Y_%m%d')+datum_do.strftime('_%m%d')+'.csv'))
rec_csv_czk.update_record(hodnota=str(koncova))
rec_last_csv.update_record(hodnota=datum_do.strftime('%d.%m.%Y'))
#db.commit() - commit je v kontroléru csv.py
def _no_zeros(datum):
return datum.strftime('%d.%m.%Y').replace('.0','.').lstrip('0')
def _form_castka(castka):
return ('%0.2f' % castka).replace('.',',')
| agpl-3.0 | 134,352,567,931,373,810 | 44.650485 | 141 | 0.568262 | false | 2.605748 | false | false | false | 0.008741 |
klassenjs/geomoose-js | util/createSprite.py | 3 | 3696 | #!/usr/bin/python
#
# Copyright (c) 2009-2012, Dan "Ducky" Little & GeoMOOSE.org
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Yay, PIL!!
import Image
import sys
import os
# All of these images are assumed to be 20x20.
# It's fine if they are smaller because they'll just get more padding,
# larger than 20x20 and it'll get a little trickier.
imagesRoot = '../images/';
spriteFolders = ['silk/', 'geosilk/img/'];
try:
os.remove(imageRoot+'all.png')
except:
pass
all_files = list()
for path in spriteFolders:
imgs = os.listdir(imagesRoot+path)
imgs = map(lambda x: imagesRoot+path+x, imgs)
all_files = all_files + imgs
images = list()
for f in all_files:
# this test should be better... but I'm lazy.
if((f.find('.png') > 0 or f.find('.gif') > 0) and f.find('-selected') < 0):
images.append(f)
images.sort()
sprite = Image.new('RGBA', (40,len(images)*30), (0,0,0,0))
i = 0
cssHeader = """
/* Notice:
*
* This file is generated (along with ../images/all.png)
* from the images in images/toolbar by createSprite.py
* in the ../util directory
*/
.sprite-control {
background-image: url('../images/all.png');
background-repeat: no-repeat;
height: 18px; /* nee, < 2.6 20px */
width: 20px;
display: inline-block;
cursor: pointer;
background-position: 0px -%dpx; /* This should default to the 'find' icon */
/* IE hacks for the sprites. */
*zoom: 1;
*display: inline;
}
"""
cssTemplate = """ .sprite-control-%s { background-position: 0px -%dpx; } """
cssSelectedTemplate = """ .sprite-control-%s-selected { background-position: -20px -%dpx !important; } """
cssText = "/*\n" + open('../LICENSE', 'r').read() + '*/\n\n' + cssHeader
height = (len(images)+1)*30+10
findPosition = 0
for image in images:
imagePath = image.split('/')
imageName = imagePath[-1].split('.')[0]
selectedImage = image
for ext in ['gif','png','jpg']:
selectedImage = selectedImage.replace('.'+ext,'-selected.'+ext)
if(not(os.path.isfile(selectedImage))):
selectedImage = image
icon = Image.open(image)
selected_icon = Image.open(selectedImage)
offsetLeft = (20 - icon.size[0]) / 2
offsetHeight = (20 - icon.size[1]) / 2
sprite.paste(icon, (offsetLeft, i*30+10+offsetHeight))
offsetLeft = 20 + (20 - selected_icon.size[0]) / 2
offsetHeight = (20 - selected_icon.size[1]) / 2
sprite.paste(selected_icon, (offsetLeft, i*30+10+offsetHeight))
i+=1
h = height-(height-((i-1)*30))+10
cssText += cssTemplate % (imageName , h)
cssText += cssSelectedTemplate % (imageName, h)
cssText += '\n'
if(imageName == 'find'):
findPosition = h
#print cssTemplate % (imageName , ((i+1)*30+10))
print cssText % findPosition
sprite.save(imagesRoot+'all.png')
| mit | -1,007,491,008,696,859,800 | 28.806452 | 106 | 0.693723 | false | 3.153584 | false | false | false | 0.018669 |
gunthercox/ChatterBot | chatterbot/logic/unit_conversion.py | 1 | 5824 | from chatterbot.logic import LogicAdapter
from chatterbot.conversation import Statement
from chatterbot.exceptions import OptionalDependencyImportError
from chatterbot import languages
from chatterbot import parsing
from mathparse import mathparse
import re
class UnitConversion(LogicAdapter):
"""
The UnitConversion logic adapter parse inputs to convert values
between several metric units.
For example:
User: 'How many meters are in one kilometer?'
Bot: '1000.0'
:kwargs:
* *language* (``object``) --
The language is set to ``chatterbot.languages.ENG`` for English by default.
"""
def __init__(self, chatbot, **kwargs):
super().__init__(chatbot, **kwargs)
try:
from pint import UnitRegistry
except ImportError:
message = (
'Unable to import "pint".\n'
'Please install "pint" before using the UnitConversion logic adapter:\n'
'pip3 install pint'
)
raise OptionalDependencyImportError(message)
self.language = kwargs.get('language', languages.ENG)
self.cache = {}
self.patterns = [
(
re.compile(r'''
(([Hh]ow\s+many)\s+
(?P<target>\S+)\s+ # meter, celsius, hours
((are)*\s*in)\s+
(?P<number>([+-]?\d+(?:\.\d+)?)|(a|an)|(%s[-\s]?)+)\s+
(?P<from>\S+)\s*) # meter, celsius, hours
''' % (parsing.numbers),
(re.VERBOSE | re.IGNORECASE)
),
lambda m: self.handle_matches(m)
),
(
re.compile(r'''
((?P<number>([+-]?\d+(?:\.\d+)?)|(%s[-\s]?)+)\s+
(?P<from>\S+)\s+ # meter, celsius, hours
(to)\s+
(?P<target>\S+)\s*) # meter, celsius, hours
''' % (parsing.numbers),
(re.VERBOSE | re.IGNORECASE)
),
lambda m: self.handle_matches(m)
),
(
re.compile(r'''
((?P<number>([+-]?\d+(?:\.\d+)?)|(a|an)|(%s[-\s]?)+)\s+
(?P<from>\S+)\s+ # meter, celsius, hours
(is|are)\s+
(how\s+many)*\s+
(?P<target>\S+)\s*) # meter, celsius, hours
''' % (parsing.numbers),
(re.VERBOSE | re.IGNORECASE)
),
lambda m: self.handle_matches(m)
)
]
self.unit_registry = UnitRegistry()
def get_unit(self, unit_variations):
"""
Get the first match unit metric object supported by pint library
given a variation of unit metric names (Ex:['HOUR', 'hour']).
:param unit_variations: A list of strings with names of units
:type unit_variations: str
"""
for unit in unit_variations:
try:
return getattr(self.unit_registry, unit)
except Exception:
continue
return None
def get_valid_units(self, from_unit, target_unit):
"""
Returns the first match `pint.unit.Unit` object for from_unit and
target_unit strings from a possible variation of metric unit names
supported by pint library.
:param from_unit: source metric unit
:type from_unit: str
:param from_unit: target metric unit
:type from_unit: str
"""
from_unit_variations = [from_unit.lower(), from_unit.upper()]
target_unit_variations = [target_unit.lower(), target_unit.upper()]
from_unit = self.get_unit(from_unit_variations)
target_unit = self.get_unit(target_unit_variations)
return from_unit, target_unit
def handle_matches(self, match):
"""
Returns a response statement from a matched input statement.
:param match: It is a valid matched pattern from the input statement
:type: `_sre.SRE_Match`
"""
response = Statement(text='')
from_parsed = match.group("from")
target_parsed = match.group("target")
n_statement = match.group("number")
if n_statement == 'a' or n_statement == 'an':
n_statement = '1.0'
n = mathparse.parse(n_statement, self.language.ISO_639.upper())
from_parsed, target_parsed = self.get_valid_units(from_parsed, target_parsed)
if from_parsed is None or target_parsed is None:
response.confidence = 0.0
else:
from_value = self.unit_registry.Quantity(float(n), from_parsed)
target_value = from_value.to(target_parsed)
response.confidence = 1.0
response.text = str(target_value.magnitude)
return response
def can_process(self, statement):
response = self.process(statement)
self.cache[statement.text] = response
return response.confidence == 1.0
def process(self, statement, additional_response_selection_parameters=None):
response = Statement(text='')
input_text = statement.text
try:
# Use the result cached by the process method if it exists
if input_text in self.cache:
response = self.cache[input_text]
self.cache = {}
return response
for pattern, func in self.patterns:
p = pattern.match(input_text)
if p is not None:
response = func(p)
if response.confidence == 1.0:
break
except Exception:
response.confidence = 0.0
finally:
return response
| bsd-3-clause | -8,994,046,293,773,082,000 | 34.512195 | 88 | 0.527129 | false | 4.217234 | false | false | false | 0.000687 |
python-provy/provy | tests/unit/more/centos/package/test_yum.py | 1 | 8675 | from datetime import datetime, timedelta
import sys
from mock import patch, MagicMock
from nose.tools import istest
from provy.more.centos import YumRole, PackageNotFound
from provy.more.centos.package import yum
from tests.unit.tools.helpers import ProvyTestCase
class YumRoleTest(ProvyTestCase):
def setUp(self):
super(YumRoleTest, self).setUp()
self.role = YumRole(prov=None, context={})
@istest
def installs_necessary_packages_to_provision(self):
with self.mock_role_methods('ensure_up_to_date', 'ensure_package_installed'):
self.role.provision()
self.role.ensure_up_to_date.assert_called_once_with()
self.role.ensure_package_installed.assert_called_once_with('curl')
@istest
def ensures_gpg_key_is_added(self):
with self.execute_mock():
self.role.ensure_gpg_key('http://some.repo')
self.role.execute.assert_called_once_with('curl http://some.repo | rpm --import -', sudo=True, stdout=False)
@istest
def checks_that_repository_exists_in_yum_repos(self):
with self.execute_mock() as execute:
execute.return_value = '''
some
repo
foo-bar
'''
result = self.role.has_source('foo-bar')
self.assertTrue(result)
execute.assert_called_once_with("cat /etc/yum.repos.d/CentOS-Base.repo", sudo=True, stdout=False)
@istest
def checks_that_repository_doesnt_exist_in_apt_source(self):
with self.execute_mock() as execute:
execute.return_value = 'some repo'
result = self.role.has_source('foo-bar')
self.assertFalse(result)
@istest
def ensures_a_source_string_is_added_to_the_repos(self):
source_line = 'foo-bar-repo'
with self.execute_mock() as execute, self.mock_role_method('has_source') as has_source:
has_source.return_value = False
self.assertTrue(self.role.ensure_yum_source(source_line))
self.assertTrue(has_source.called)
execute.assert_called_once_with('echo "{}" >> /etc/yum.repos.d/CentOS-Base.repo'.format(source_line), sudo=True, stdout=False)
@istest
def doesnt_add_source_if_it_already_exists(self):
source_line = 'foo-bar-repo'
with self.execute_mock() as execute, self.mock_role_method('has_source') as has_source:
has_source.return_value = True
self.assertFalse(self.role.ensure_yum_source(source_line))
self.assertFalse(execute.called)
@istest
def gets_update_date_file_as_a_property(self):
with self.mock_role_method('remote_temp_dir'):
self.role.remote_temp_dir.return_value = '/foo/bar'
self.assertEqual(self.role.update_date_file, '/foo/bar/last_yum_update')
@istest
def stores_update_date(self):
with self.mock_role_methods('update_date_file', 'execute'), patch.object(yum, 'datetime') as mock_datetime:
self.role.update_date_file = '/foo/bar'
when = datetime.strptime('2013-01-01', '%Y-%m-%d')
mock_datetime.now.return_value = when
self.role.store_update_date()
self.role.execute.assert_called_once_with('echo "01-01-13 00:00:00" > /foo/bar', stdout=False)
@istest
def gets_last_update_date(self):
with self.mock_role_methods('remote_exists', 'update_date_file', 'read_remote_file'):
self.role.update_date_file = '/foo/bar'
self.role.remote_exists.return_value = True
self.role.read_remote_file.return_value = '01-01-13 00:00:00'
result = self.role.get_last_update_date()
self.assertEqual(result, datetime.strptime('2013-01-01', '%Y-%m-%d'))
self.role.remote_exists.assert_called_once_with(self.role.update_date_file)
self.role.read_remote_file.assert_called_once_with(self.role.update_date_file)
@istest
def gets_none_as_last_update_if_there_was_no_update_yet(self):
with self.mock_role_methods('remote_exists', 'update_date_file', 'read_remote_file'):
self.role.update_date_file = '/foo/bar'
self.role.remote_exists.return_value = False
result = self.role.get_last_update_date()
self.assertIsNone(result)
self.assertFalse(self.role.read_remote_file.called)
@istest
def updates_yum_when_passed_time_limit(self):
with patch.object(yum, 'datetime') as mock_datetime, self.mock_role_methods('get_last_update_date', 'force_update'):
now = datetime.strptime('2013-01-01', '%Y-%m-%d')
then = now - timedelta(minutes=31)
mock_datetime.now.return_value = now
self.role.get_last_update_date.return_value = then
self.role.ensure_up_to_date()
self.role.get_last_update_date.assert_called_once_with()
self.role.force_update.assert_called_once_with()
@istest
def doesnt_update_if_not_passed_from_time_limit(self):
with patch.object(yum, 'datetime') as mock_datetime, self.mock_role_methods('get_last_update_date', 'force_update'):
now = datetime.strptime('2013-01-01', '%Y-%m-%d')
then = now - timedelta(minutes=29)
mock_datetime.now.return_value = now
self.role.get_last_update_date.return_value = then
self.role.ensure_up_to_date()
self.assertFalse(self.role.force_update.called)
@istest
def forces_an_update(self):
with self.mock_role_methods('execute', 'store_update_date'):
self.role.force_update()
self.assertTrue(self.role.context['yum-up-to-date'])
self.role.execute.assert_called_once_with('yum clean all', stdout=False, sudo=True)
self.role.store_update_date.assert_called_once_with()
@istest
def checks_that_a_package_is_installed(self):
with self.execute_mock() as execute:
execute.return_value = '''yes'''
self.assertTrue(self.role.is_package_installed('foo'))
execute.assert_called_once_with('rpm -qa foo', sudo=True, stdout=False)
@istest
def checks_that_a_package_is_not_installed(self):
with self.execute_mock() as execute:
execute.return_value = ''''''
self.assertFalse(self.role.is_package_installed('baz'))
execute.assert_called_once_with('rpm -qa baz', sudo=True, stdout=False)
@istest
def checks_that_a_package_exists(self):
with self.execute_mock() as execute:
self.assertTrue(self.role.package_exists('python'))
execute.assert_called_with('yum info -q python', stdout=False)
@istest
def checks_that_a_package_doesnt_exist(self):
with self.execute_mock() as execute:
execute.return_value = False
self.assertFalse(self.role.package_exists('phyton'))
execute.assert_called_with('yum info -q phyton', stdout=False)
@istest
def traps_sys_exit_when_checking_if_a_package_exists(self):
def exit(*args, **kwargs):
sys.exit(1)
execute = MagicMock(side_effect=exit)
with patch('provy.core.roles.Role.execute', execute):
self.assertFalse(self.role.package_exists('phyton'))
@istest
def checks_if_a_package_exists_before_installing(self):
with self.execute_mock() as execute, self.mock_role_methods('package_exists', 'is_package_installed') as (package_exists, is_package_installed):
is_package_installed.return_value = False
package_exists.return_value = True
result = self.role.ensure_package_installed('python')
self.assertTrue(result)
self.assertTrue(package_exists.called)
execute.assert_called_with('yum install -y python', stdout=False, sudo=True)
@istest
def fails_to_install_package_if_it_doesnt_exist(self):
with self.execute_mock(), self.mock_role_methods('package_exists', 'is_package_installed') as (package_exists, is_package_installed):
is_package_installed.return_value = False
package_exists.return_value = False
self.assertRaises(PackageNotFound, self.role.ensure_package_installed, 'phyton')
self.assertTrue(package_exists.called)
@istest
def doesnt_install_package_if_already_installed(self):
with self.mock_role_method('is_package_installed'):
self.role.is_package_installed.return_value = True
result = self.role.ensure_package_installed('python')
self.assertFalse(result)
| mit | -2,154,357,719,880,740,400 | 38.611872 | 152 | 0.635274 | false | 3.590646 | true | false | false | 0.002651 |
firmlyjin/brython | www/tests/unittests/test/test_genericpath.py | 26 | 12381 | """
Tests common to genericpath, macpath, ntpath and posixpath
"""
import genericpath
import os
import sys
import unittest
import warnings
from test import support
def safe_rmdir(dirname):
try:
os.rmdir(dirname)
except OSError:
pass
class GenericTest:
common_attributes = ['commonprefix', 'getsize', 'getatime', 'getctime',
'getmtime', 'exists', 'isdir', 'isfile']
attributes = []
def test_no_argument(self):
for attr in self.common_attributes + self.attributes:
with self.assertRaises(TypeError):
getattr(self.pathmodule, attr)()
raise self.fail("{}.{}() did not raise a TypeError"
.format(self.pathmodule.__name__, attr))
def test_commonprefix(self):
commonprefix = self.pathmodule.commonprefix
self.assertEqual(
commonprefix([]),
""
)
self.assertEqual(
commonprefix(["/home/swenson/spam", "/home/swen/spam"]),
"/home/swen"
)
self.assertEqual(
commonprefix(["/home/swen/spam", "/home/swen/eggs"]),
"/home/swen/"
)
self.assertEqual(
commonprefix(["/home/swen/spam", "/home/swen/spam"]),
"/home/swen/spam"
)
self.assertEqual(
commonprefix(["home:swenson:spam", "home:swen:spam"]),
"home:swen"
)
self.assertEqual(
commonprefix([":home:swen:spam", ":home:swen:eggs"]),
":home:swen:"
)
self.assertEqual(
commonprefix([":home:swen:spam", ":home:swen:spam"]),
":home:swen:spam"
)
self.assertEqual(
commonprefix([b"/home/swenson/spam", b"/home/swen/spam"]),
b"/home/swen"
)
self.assertEqual(
commonprefix([b"/home/swen/spam", b"/home/swen/eggs"]),
b"/home/swen/"
)
self.assertEqual(
commonprefix([b"/home/swen/spam", b"/home/swen/spam"]),
b"/home/swen/spam"
)
self.assertEqual(
commonprefix([b"home:swenson:spam", b"home:swen:spam"]),
b"home:swen"
)
self.assertEqual(
commonprefix([b":home:swen:spam", b":home:swen:eggs"]),
b":home:swen:"
)
self.assertEqual(
commonprefix([b":home:swen:spam", b":home:swen:spam"]),
b":home:swen:spam"
)
testlist = ['', 'abc', 'Xbcd', 'Xb', 'XY', 'abcd',
'aXc', 'abd', 'ab', 'aX', 'abcX']
for s1 in testlist:
for s2 in testlist:
p = commonprefix([s1, s2])
self.assertTrue(s1.startswith(p))
self.assertTrue(s2.startswith(p))
if s1 != s2:
n = len(p)
self.assertNotEqual(s1[n:n+1], s2[n:n+1])
def test_getsize(self):
f = open(support.TESTFN, "wb")
try:
f.write(b"foo")
f.close()
self.assertEqual(self.pathmodule.getsize(support.TESTFN), 3)
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_time(self):
f = open(support.TESTFN, "wb")
try:
f.write(b"foo")
f.close()
f = open(support.TESTFN, "ab")
f.write(b"bar")
f.close()
f = open(support.TESTFN, "rb")
d = f.read()
f.close()
self.assertEqual(d, b"foobar")
self.assertLessEqual(
self.pathmodule.getctime(support.TESTFN),
self.pathmodule.getmtime(support.TESTFN)
)
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_exists(self):
self.assertIs(self.pathmodule.exists(support.TESTFN), False)
f = open(support.TESTFN, "wb")
try:
f.write(b"foo")
f.close()
self.assertIs(self.pathmodule.exists(support.TESTFN), True)
if not self.pathmodule == genericpath:
self.assertIs(self.pathmodule.lexists(support.TESTFN),
True)
finally:
if not f.close():
f.close()
support.unlink(support.TESTFN)
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_exists_fd(self):
r, w = os.pipe()
try:
self.assertTrue(self.pathmodule.exists(r))
finally:
os.close(r)
os.close(w)
self.assertFalse(self.pathmodule.exists(r))
def test_isdir(self):
self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
f = open(support.TESTFN, "wb")
try:
f.write(b"foo")
f.close()
self.assertIs(self.pathmodule.isdir(support.TESTFN), False)
os.remove(support.TESTFN)
os.mkdir(support.TESTFN)
self.assertIs(self.pathmodule.isdir(support.TESTFN), True)
os.rmdir(support.TESTFN)
finally:
if not f.close():
f.close()
support.unlink(support.TESTFN)
safe_rmdir(support.TESTFN)
def test_isfile(self):
self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
f = open(support.TESTFN, "wb")
try:
f.write(b"foo")
f.close()
self.assertIs(self.pathmodule.isfile(support.TESTFN), True)
os.remove(support.TESTFN)
os.mkdir(support.TESTFN)
self.assertIs(self.pathmodule.isfile(support.TESTFN), False)
os.rmdir(support.TESTFN)
finally:
if not f.close():
f.close()
support.unlink(support.TESTFN)
safe_rmdir(support.TESTFN)
class TestGenericTest(GenericTest, unittest.TestCase):
# Issue 16852: GenericTest can't inherit from unittest.TestCase
# for test discovery purposes; CommonTest inherits from GenericTest
# and is only meant to be inherited by others.
pathmodule = genericpath
# Following TestCase is not supposed to be run from test_genericpath.
# It is inherited by other test modules (macpath, ntpath, posixpath).
class CommonTest(GenericTest):
common_attributes = GenericTest.common_attributes + [
# Properties
'curdir', 'pardir', 'extsep', 'sep',
'pathsep', 'defpath', 'altsep', 'devnull',
# Methods
'normcase', 'splitdrive', 'expandvars', 'normpath', 'abspath',
'join', 'split', 'splitext', 'isabs', 'basename', 'dirname',
'lexists', 'islink', 'ismount', 'expanduser', 'normpath', 'realpath',
]
def test_normcase(self):
normcase = self.pathmodule.normcase
# check that normcase() is idempotent
for p in ["FoO/./BaR", b"FoO/./BaR"]:
p = normcase(p)
self.assertEqual(p, normcase(p))
self.assertEqual(normcase(''), '')
self.assertEqual(normcase(b''), b'')
# check that normcase raises a TypeError for invalid types
for path in (None, True, 0, 2.5, [], bytearray(b''), {'o','o'}):
self.assertRaises(TypeError, normcase, path)
def test_splitdrive(self):
# splitdrive for non-NT paths
splitdrive = self.pathmodule.splitdrive
self.assertEqual(splitdrive("/foo/bar"), ("", "/foo/bar"))
self.assertEqual(splitdrive("foo:bar"), ("", "foo:bar"))
self.assertEqual(splitdrive(":foo:bar"), ("", ":foo:bar"))
self.assertEqual(splitdrive(b"/foo/bar"), (b"", b"/foo/bar"))
self.assertEqual(splitdrive(b"foo:bar"), (b"", b"foo:bar"))
self.assertEqual(splitdrive(b":foo:bar"), (b"", b":foo:bar"))
def test_expandvars(self):
if self.pathmodule.__name__ == 'macpath':
self.skipTest('macpath.expandvars is a stub')
expandvars = self.pathmodule.expandvars
with support.EnvironmentVarGuard() as env:
env.clear()
env["foo"] = "bar"
env["{foo"] = "baz1"
env["{foo}"] = "baz2"
self.assertEqual(expandvars("foo"), "foo")
self.assertEqual(expandvars("$foo bar"), "bar bar")
self.assertEqual(expandvars("${foo}bar"), "barbar")
self.assertEqual(expandvars("$[foo]bar"), "$[foo]bar")
self.assertEqual(expandvars("$bar bar"), "$bar bar")
self.assertEqual(expandvars("$?bar"), "$?bar")
self.assertEqual(expandvars("${foo}bar"), "barbar")
self.assertEqual(expandvars("$foo}bar"), "bar}bar")
self.assertEqual(expandvars("${foo"), "${foo")
self.assertEqual(expandvars("${{foo}}"), "baz1}")
self.assertEqual(expandvars("$foo$foo"), "barbar")
self.assertEqual(expandvars("$bar$bar"), "$bar$bar")
self.assertEqual(expandvars(b"foo"), b"foo")
self.assertEqual(expandvars(b"$foo bar"), b"bar bar")
self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
self.assertEqual(expandvars(b"$[foo]bar"), b"$[foo]bar")
self.assertEqual(expandvars(b"$bar bar"), b"$bar bar")
self.assertEqual(expandvars(b"$?bar"), b"$?bar")
self.assertEqual(expandvars(b"${foo}bar"), b"barbar")
self.assertEqual(expandvars(b"$foo}bar"), b"bar}bar")
self.assertEqual(expandvars(b"${foo"), b"${foo")
self.assertEqual(expandvars(b"${{foo}}"), b"baz1}")
self.assertEqual(expandvars(b"$foo$foo"), b"barbar")
self.assertEqual(expandvars(b"$bar$bar"), b"$bar$bar")
def test_abspath(self):
self.assertIn("foo", self.pathmodule.abspath("foo"))
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertIn(b"foo", self.pathmodule.abspath(b"foo"))
# Abspath returns bytes when the arg is bytes
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
for path in (b'', b'foo', b'f\xf2\xf2', b'/foo', b'C:\\'):
self.assertIsInstance(self.pathmodule.abspath(path), bytes)
def test_realpath(self):
self.assertIn("foo", self.pathmodule.realpath("foo"))
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
self.assertIn(b"foo", self.pathmodule.realpath(b"foo"))
def test_normpath_issue5827(self):
# Make sure normpath preserves unicode
for path in ('', '.', '/', '\\', '///foo/.//bar//'):
self.assertIsInstance(self.pathmodule.normpath(path), str)
def test_abspath_issue3426(self):
# Check that abspath returns unicode when the arg is unicode
# with both ASCII and non-ASCII cwds.
abspath = self.pathmodule.abspath
for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
self.assertIsInstance(abspath(path), str)
unicwd = '\xe7w\xf0'
try:
os.fsencode(unicwd)
except (AttributeError, UnicodeEncodeError):
# FS encoding is probably ASCII
pass
else:
with support.temp_cwd(unicwd):
for path in ('', 'fuu', 'f\xf9\xf9', '/fuu', 'U:\\'):
self.assertIsInstance(abspath(path), str)
def test_nonascii_abspath(self):
if (support.TESTFN_UNDECODABLE
# Mac OS X denies the creation of a directory with an invalid
# UTF-8 name. Windows allows to create a directory with an
# arbitrary bytes name, but fails to enter this directory
# (when the bytes name is used).
and sys.platform not in ('win32', 'darwin')):
name = support.TESTFN_UNDECODABLE
elif support.TESTFN_NONASCII:
name = support.TESTFN_NONASCII
else:
self.skipTest("need support.TESTFN_NONASCII")
# Test non-ASCII, non-UTF8 bytes in the path.
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
with support.temp_cwd(name):
self.test_abspath()
if __name__=="__main__":
unittest.main()
| bsd-3-clause | -2,680,276,137,108,930,600 | 36.18018 | 77 | 0.552298 | false | 3.888505 | true | false | false | 0.000727 |
oma-deeplearning/deeplearning | python/CRBM.py | 1 | 1853 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
RBM w/ continuous-valued inputs (Linear Energy)
References :
- Y. Bengio, P. Lamblin, D. Popovici, H. Larochelle: Greedy Layer-Wise
Training of Deep Networks, Advances in Neural Information Processing
Systems 19, 2007
"""
import sys
import numpy
from RBM import RBM
from utils import *
class CRBM(RBM):
def propdown(self, h):
pre_activation = numpy.dot(h, self.W.T) + self.vbias
return pre_activation
def sample_v_given_h(self, h0_sample):
a_h = self.propdown(h0_sample)
en = numpy.exp(-a_h)
ep = numpy.exp(a_h)
v1_mean = 1 / (1 - en) - 1 / a_h
U = numpy.array(self.numpy_rng.uniform(
low=0,
high=1,
size=v1_mean.shape))
v1_sample = numpy.log((1 - U * (1 - ep))) / a_h
return [v1_mean, v1_sample]
def test_crbm(learning_rate=0.1, k=1, training_epochs=1000):
data = numpy.array([[0.4, 0.5, 0.5, 0., 0., 0.],
[0.5, 0.3, 0.5, 0., 0., 0.],
[0.4, 0.5, 0.5, 0., 0., 0.],
[0., 0., 0.5, 0.3, 0.5, 0.],
[0., 0., 0.5, 0.4, 0.5, 0.],
[0., 0., 0.5, 0.5, 0.5, 0.]])
rng = numpy.random.RandomState(123)
# construct CRBM
rbm = CRBM(input=data, n_visible=6, n_hidden=5, numpy_rng=rng)
# train
for epoch in range(training_epochs):
rbm.contrastive_divergence(lr=learning_rate, k=k)
# cost = rbm.get_reconstruction_cross_entropy()
# print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
# test
v = numpy.array([[0.5, 0.5, 0., 0., 0., 0.],
[0., 0., 0., 0.5, 0.5, 0.]])
print(rbm.reconstruct(v))
if __name__ == "__main__":
test_crbm()
| gpl-2.0 | -1,301,735,656,881,664,300 | 24.383562 | 74 | 0.504047 | false | 2.820396 | false | false | false | 0.002698 |
jbedorf/tensorflow | tensorflow/python/tools/freeze_graph_test.py | 3 | 13439 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the graph freezing tool."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
from tensorflow.core.example import example_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_io
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.tools import freeze_graph
from tensorflow.python.training import saver as saver_lib
class FreezeGraphTest(test_util.TensorFlowTestCase):
def _testFreezeGraph(self, saver_write_version):
checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
checkpoint_state_name = "checkpoint_state"
input_graph_name = "input_graph.pb"
output_graph_name = "output_graph.pb"
# We'll create an input graph that has a single variable containing 1.0,
# and that then multiplies it by 2.
with ops.Graph().as_default():
variable_node = variables.VariableV1(1.0, name="variable_node")
output_node = math_ops.multiply(variable_node, 2.0, name="output_node")
sess = session.Session()
init = variables.global_variables_initializer()
sess.run(init)
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
saver = saver_lib.Saver(write_version=saver_write_version)
checkpoint_path = saver.save(
sess,
checkpoint_prefix,
global_step=0,
latest_filename=checkpoint_state_name)
graph_io.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)
# We save out the graph to disk, and then call the const conversion
# routine.
input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
input_saver_def_path = ""
input_binary = False
output_node_names = "output_node"
restore_op_name = "save/restore_all"
filename_tensor_name = "save/Const:0"
output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
clear_devices = False
freeze_graph.freeze_graph(
input_graph_path,
input_saver_def_path,
input_binary,
checkpoint_path,
output_node_names,
restore_op_name,
filename_tensor_name,
output_graph_path,
clear_devices,
"",
"",
"",
checkpoint_version=saver_write_version)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with ops.Graph().as_default():
output_graph_def = graph_pb2.GraphDef()
with open(output_graph_path, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = importer.import_graph_def(output_graph_def, name="")
self.assertEqual(4, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("VariableV2", node.op)
self.assertNotEqual("Variable", node.op)
with session.Session() as sess:
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
def _createTFExampleString(self, feature_name, feature_value):
"""Create a serialized tensorflow example."""
example = example_pb2.Example()
example.features.feature[feature_name].float_list.value.extend([
feature_value])
return example.SerializeToString()
def _writeDummySavedModel(self, path, feature_name):
"""Writes a classifier with two input features to the given path."""
with ops.Graph().as_default():
examples = array_ops.placeholder(dtypes.string, name="input_node")
feature_configs = {
feature_name: parsing_ops.FixedLenFeature(shape=[],
dtype=dtypes.float32),
}
features = parsing_ops.parse_example(examples, feature_configs)
feature = features[feature_name]
variable_node = variables.VariableV1(1.0, name="variable_node")
scores = math_ops.multiply(variable_node, feature, name="output_node")
class_feature = array_ops.fill(array_ops.shape(feature),
"class_%s" % feature_name)
classes = array_ops.transpose(class_feature)
with session.Session() as sess:
sess.run(variables.global_variables_initializer())
signature = (
signature_def_utils.classification_signature_def(
examples=examples,
classes=classes,
scores=scores,))
builder = saved_model_builder.SavedModelBuilder(path)
builder.add_meta_graph_and_variables(
sess,
[tag_constants.SERVING],
signature_def_map={
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
signature,
},)
builder.save(as_text=True)
@test_util.run_v1_only("b/120545219")
def testFreezeGraphV1(self):
self._testFreezeGraph(saver_pb2.SaverDef.V1)
@test_util.run_v1_only("b/120545219")
def testFreezeGraphV2(self):
self._testFreezeGraph(saver_pb2.SaverDef.V2)
def testFreezeMetaGraph(self):
tmp_dir = self.get_temp_dir()
checkpoint_prefix = os.path.join(tmp_dir, "meta_graph_checkpoint")
checkpoint_state_name = "checkpoint_state"
output_graph_filename = os.path.join(tmp_dir, "output_graph.pb")
with ops.Graph().as_default():
variable_node = variables.VariableV1(1.0, name="variable_node")
output_node = math_ops.multiply(variable_node, 2.0, name="output_node")
sess = session.Session()
init = variables.global_variables_initializer()
sess.run(init)
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
saver = saver_lib.Saver()
checkpoint_path = saver.save(
sess,
checkpoint_prefix,
global_step=0,
latest_filename=checkpoint_state_name)
input_saver_def_path = ""
input_binary = True
output_node_names = "output_node"
restore_op_name = "save/restore_all"
filename_tensor_name = "save/Const:0"
clear_devices = False
input_meta_graph = checkpoint_path + ".meta"
freeze_graph.freeze_graph(
"", input_saver_def_path, input_binary, checkpoint_path,
output_node_names, restore_op_name, filename_tensor_name,
output_graph_filename, clear_devices, "", "", "", input_meta_graph)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with ops.Graph().as_default():
output_graph_def = graph_pb2.GraphDef()
with open(output_graph_filename, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = importer.import_graph_def(output_graph_def, name="")
self.assertEqual(4, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("VariableV2", node.op)
self.assertNotEqual("Variable", node.op)
with session.Session() as sess:
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
def testFreezeSavedModel(self):
tmp_dir = self.get_temp_dir()
saved_model_dir = os.path.join(tmp_dir, "saved_model_dir")
feature_name = "feature"
self._writeDummySavedModel(saved_model_dir, feature_name)
output_graph_filename = os.path.join(tmp_dir, "output_graph.pb")
input_saved_model_dir = saved_model_dir
output_node_names = "output_node"
input_binary = False
input_saver_def_path = False
restore_op_name = None
filename_tensor_name = None
clear_devices = False
input_meta_graph = False
checkpoint_path = None
input_graph_filename = None
saved_model_tags = tag_constants.SERVING
freeze_graph.freeze_graph(input_graph_filename, input_saver_def_path,
input_binary, checkpoint_path, output_node_names,
restore_op_name, filename_tensor_name,
output_graph_filename, clear_devices, "", "", "",
input_meta_graph, input_saved_model_dir,
saved_model_tags)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with ops.Graph().as_default():
output_graph_def = graph_pb2.GraphDef()
with open(output_graph_filename, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = importer.import_graph_def(output_graph_def, name="")
self.assertEqual(8, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("VariableV2", node.op)
self.assertNotEqual("Variable", node.op)
feature_value = 2.0
example = self._createTFExampleString(feature_name, feature_value)
with session.Session() as sess:
input_node = sess.graph.get_tensor_by_name("input_node:0")
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node, feed_dict={input_node: [example]})
self.assertNear(feature_value, output, 0.00001)
def testSinglePartitionedVariable(self):
"""Ensures partitioned variables fail cleanly with freeze graph."""
checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
checkpoint_state_name = "checkpoint_state"
input_graph_name = "input_graph.pb"
output_graph_name = "output_graph.pb"
# Create a graph with partition variables. When weights are partitioned into
# a single partition, the weights variable is followed by a identity ->
# identity (an additional identity node).
partitioner = partitioned_variables.fixed_size_partitioner(1)
with ops.Graph().as_default():
with variable_scope.variable_scope("part", partitioner=partitioner):
batch_size, height, width, depth = 5, 128, 128, 3
input1 = array_ops.zeros(
(batch_size, height, width, depth), name="input1")
input2 = array_ops.zeros(
(batch_size, height, width, depth), name="input2")
num_nodes = depth
filter1 = variable_scope.get_variable("filter", [num_nodes, num_nodes])
filter2 = array_ops.reshape(filter1, [1, 1, num_nodes, num_nodes])
conv = nn.conv2d(
input=input1, filter=filter2, strides=[1, 1, 1, 1], padding="SAME")
node = math_ops.add(conv, input2, name="test/add")
node = nn.relu6(node, name="test/relu6")
# Save graph and checkpoints.
sess = session.Session()
sess.run(variables.global_variables_initializer())
saver = saver_lib.Saver()
checkpoint_path = saver.save(
sess,
checkpoint_prefix,
global_step=0,
latest_filename=checkpoint_state_name)
graph_io.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)
# Ensure this graph has partition variables.
self.assertTrue([
tensor.name.split(":")[0]
for op in sess.graph.get_operations()
for tensor in op.values()
if re.search(r"/part_\d+/", tensor.name)
])
# Test freezing graph doesn't make it crash.
output_node_names = "save/restore_all"
output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
return_value = freeze_graph.freeze_graph_with_def_protos(
input_graph_def=sess.graph_def,
input_saver_def=None,
input_checkpoint=checkpoint_path,
output_node_names=output_node_names,
restore_op_name="save/restore_all", # default value
filename_tensor_name="save/Const:0", # default value
output_graph=output_graph_path,
clear_devices=False,
initializer_nodes="")
self.assertTrue(return_value, -1)
if __name__ == "__main__":
test.main()
| apache-2.0 | -5,676,305,373,818,004,000 | 39.357357 | 80 | 0.659424 | false | 3.769705 | true | false | false | 0.004911 |
microelly2/geodata | geodat/navigator.py | 1 | 27716 | '''navigation in 3D'''
# -*- coding: utf-8 -*-
#-------------------------------------------------
#-- event filter next germ + navigator
#--
#-- microelly 2016
#--
#-- GNU Lesser General Public License (LGPL)
#-------------------------------------------------
#http://doc.qt.io/qt-5/qt.html#Key-enum
#http://doc.qt.io/qt-5/qevent.html#Type-enum
#http://doc.qt.io/qt-5/qcolor.html#setNamedColor
#http://doc.qt.io/qt-5/richtext-html-subset.html
from geodat.say import *
import PySide
from PySide import QtGui,QtCore
import FreeCAD,FreeCADGui
#\cond
App=FreeCAD
Err=FreeCAD.Console.PrintError
Msg=FreeCAD.Console.PrintMessage
import FreeCADGui
from PySide import QtGui
from pivy import coin
import sys
from PySide import QtGui, QtCore
import os
#\endcond
import time,sys,traceback,math
from pivy import coin
'''
def sayexc(mess='',last=False):
exc_type, exc_value, exc_traceback = sys.exc_info()
ttt=repr(traceback.format_exception(exc_type, exc_value,exc_traceback))
lls=eval(ttt)
if last:
lls=[lls[-1]]
Err(mess + "\n" +"--> ".join(lls))
'''
# whenever the module is loaded stop an old eventserver
try:
stop()
except:
pass
## the debug window for runtime parameter
def myDebugWidget():
liste=QtGui.QWidget()
liste.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
layout=QtGui.QVBoxLayout()
liste.setLayout(layout)
liste.vmap={}
for k in ['key','xa','ya','za','xr','yr','zr','dxw','dyw','click','clickcount' ]:
line = QtGui.QLineEdit()
line.setText("("+k+")")
layout.addWidget(line)
liste.vmap[k]=line
for k in ['windows']:
line = QtGui.QTextEdit()
line.setText("("+k+")")
layout.addWidget(line)
liste.vmap[k]=line
bt= QtGui.QPushButton()
bt.setText("Ende")
bt.clicked.connect(stop)
layout.addWidget(bt)
liste.show()
return liste
##callback when a key is pressed
def on_key_press(ef,keystring):
print("on_key_press:", keystring)
if keystring=='Escape':
print("stoppe eventserver ...")
ef.output.hide()
stop()
return True
##callback when a key is released
def on_key_release(ef,keystring):
print("on_key_release:", keystring)
return True
## The EventFilter controls the Qt mouse and keybord events
#
class EventFilter(QtCore.QObject):
#\cond
def __init__(self):
QtCore.QObject.__init__(self)
self.lastpos=None
self.on_key_press=on_key_press
self.on_key_release=on_key_release
self.on_move=on_move
self.on_clicks=on_clicks
self.on_windowslist=on_windowslist
self.keyTimeout=0.1
self.keyset=0
self.keyPressed2=False
self.output=myDebugWidget()
self.keymap={}
for t in dir(QtCore.Qt):
if t.startswith( 'Key_' ):
v=eval('QtCore.Qt.'+t)
self.keymap[v]=t[4:]
self.modmap={}
for t in dir(QtCore.Qt):
if t.endswith('Modifier'):
if t!= 'Modifier':
v=eval('QtCore.Qt.'+t)
self.modmap[v]=t[:-8]
#\endcond
## the event handler
#
def eventFilter(self, o, e):
# http://doc.qt.io/qt-5/qevent.html
z=str(e.type())
# not used events
if z == 'PySide.QtCore.QEvent.Type.ChildAdded' or \
z == 'PySide.QtCore.QEvent.Type.ChildRemoved'or \
z == 'PySide.QtCore.QEvent.Type.User' or \
z == 'PySide.QtCore.QEvent.Type.Paint' or \
z == 'PySide.QtCore.QEvent.Type.LayoutRequest' or\
z == 'PySide.QtCore.QEvent.Type.UpdateRequest' :
return QtGui.QWidget.eventFilter(self, o, e)
if z == 'PySide.QtCore.QEvent.Type.KeyPress':
if time.time()-self.keyset<self.keyTimeout:
return True
self.keyPressed2=True
self.keyset=time.time()
ks=''
for k in self.modmap:
if e.modifiers() & k:
ks += self.modmap[k] + '-'
if not self.keymap[e.key()] in ['Shift','Meta','Alt','Control','GroupSwitch']:
ks +=self.keymap[e.key()]
self.output.vmap['key'].setText(ks)
return self.on_key_press(self,ks)
# end of a single key pressed
if z == 'PySide.QtCore.QEvent.Type.KeyRelease':
if self.keyPressed2:
self.keyPressed2=False
self.keyset=0
ks=''
for k in self.modmap:
if e.modifiers() & k:
ks += self.modmap[k] + '-'
ks +=self.keymap[e.key()]
self.output.vmap['key'].setText(ks)
return self.on_key_release(self,ks)
# enter and leave a widget
if z == 'PySide.QtCore.QEvent.Type.Enter' or z == 'PySide.QtCore.QEvent.Type.Leave':
#FreeCAD.Console.PrintMessage("Enter Leave\n")
return True
if z == 'PySide.QtCore.QEvent.Type.HoverMove' :
if False:
FreeCAD.Console.PrintMessage("old Pos: ")
FreeCAD.Console.PrintMessage(e.oldPos())
FreeCAD.Console.PrintMessage(", new Pos: ")
FreeCAD.Console.PrintMessage(e.pos())
FreeCAD.Console.PrintMessage("\n")
self.lastpos=e.pos()
try: za=int(self.output.vmap['za'].text())
except: za=0
za2=za
self.output.vmap['xa'].setText(str(e.pos().x()))
self.output.vmap['ya'].setText(str(e.pos().y()))
#return self.on_move(self,[e.pos().x(),e.pos().y(),za2],[99,99,99])
return self.on_move(self,[e.pos().x(),e.pos().y(),za2],[e.pos().x(),e.pos().y(),0])
try:
if e.type() == QtCore.QEvent.ContextMenu and o.__class__ == QtGui.QWidget:
# hier contextmenue rechte maus auschalten
# FreeCAD.Console.PrintMessage('!! cancel -------------------------------------context-----------\n')
return True
pass
# wheel rotation
if e.type()== QtCore.QEvent.Type.Wheel:
# http://doc.qt.io/qt-4.8/qwheelevent.html
self.output.vmap['xr'].setText(str(e.x()))
self.output.vmap['yr'].setText(str(e.y()))
self.output.vmap['zr'].setText(str(e.delta()))
self.output.vmap['xa'].setText(str(e.globalX()))
self.output.vmap['ya'].setText(str(e.globalY()))
try: za=int(self.output.vmap['za'].text())
except: za=0
za2=za+int(e.delta())
self.output.vmap['za'].setText(str(za2))
return self.on_move(self,[e.globalX(),e.globalY(),za2],[e.x(),e.y(),e.delta()] )
# mouse clicks
if e.type() == QtCore.QEvent.MouseButtonPress or \
e.type() == QtCore.QEvent.MouseButtonRelease or\
e.type() == QtCore.QEvent.MouseButtonDblClick:
windowlist=[]
myclass=o.__class__.__name__
try:
mytext=o.text()
except:
mytext="???"
if myclass=='QTabBar':
windowlist.append([myclass,str(o.tabText(o.currentIndex())),o.currentIndex()])
else:
windowlist.append([myclass,str(mytext)])
self.output.vmap['dxw'].setText(str(o.width()))
self.output.vmap['dyw'].setText(str(o.height()))
widget = QtGui.qApp.widgetAt(self.lastpos)
if widget:
while widget:
try:
p=widget
# Msg("widget "+ p.objectName()+"!\n")
if p.__class__.__name__ =='QMdiSubWindow':
widget=None
# gefunden
# Msg('\n')
label='???'
try:
# Msg( p.__class__.__name__ +" objectName:" + p.objectName()+ "\n" )
label2=p.objectName()
if label2!='': label=label2
except: pass
try:
# Msg( p.__class__.__name__ +" windowTitle" + p.windowTitle()+ "\n" )
label2=p.windowTitle()
if label2!='': label=label2
except: pass
try:
# Msg( p.__class__.__name__ +" tabTExt" + p.tabText()+ "\n" )
label2=p.tabText()
if label2!='': label=label2
except: pass
windowlist.append([p.__class__.__name__ ,str(label)])
p=widget.parent()
widget=p
except:
widget=None
stack=''
for t in windowlist:
stack += str(t)+"\n"
self.output.vmap['xr'].setText(str(e.pos().x()))
self.output.vmap['yr'].setText(str(e.pos().y()))
self.output.vmap['windows'].setText(stack)
self.windowlist=windowlist
self.on_windowslist(self,windowlist)
if e.type() == QtCore.QEvent.MouseButtonRelease:
self.output.vmap['clickcount'].setText('release')
return self.on_clicks(self,'Release',0)
return True
# double clicked
if e.type() == QtCore.QEvent.MouseButtonDblClick and e.button() == QtCore.Qt.LeftButton:
self.output.vmap['click'].setText('left')
self.output.vmap['clickcount'].setText('2')
return True
if e.type() == QtCore.QEvent.MouseButtonDblClick and e.button() == QtCore.Qt.RightButton:
self.output.vmap['click'].setText('right')
self.output.vmap['clickcount'].setText('2')
return True
if e.type() == QtCore.QEvent.MouseButtonDblClick and e.button() == QtCore.Qt.MiddleButton:
self.output.vmap['click'].setText('middle')
self.output.vmap['clickcount'].setText('2')
return True
# middle
if e.button() == QtCore.Qt.MidButton or e.button() == QtCore.Qt.MiddleButton:
self.output.vmap['click'].setText('middle')
self.output.vmap['clickcount'].setText('1')
# kontextmenu abfangen -> return True !
return True
if e.button() == QtCore.Qt.LeftButton:
FreeCAD.Console.PrintMessage('!Mouse one left\n')
self.output.vmap['click'].setText('left')
self.output.vmap['clickcount'].setText('1')
return self.on_clicks(self,'Left',1)
# return True
# right mouse button when context menue deactivated
elif e.button() == QtCore.Qt.RightButton:
self.output.vmap['click'].setText('right')
self.output.vmap['clickcount'].setText('1')
# kontextmenu abfangen -> return True !
return self.on_clicks(self,'Right',1)
# return True
except:
sayexc()
return False
## stop and delete the EventFilter
#
def stop():
mw=QtGui.qApp
ef=FreeCAD.eventfilter
mw.removeEventFilter(ef)
#mw.setOverrideCursor(QtCore.Qt.SizeAllCursor)
mw.setOverrideCursor(QtCore.Qt.ArrowCursor)
# FreeCADGui.activateWorkbench("Geodat")
sg = FreeCADGui.ActiveDocument.ActiveView.getSceneGraph()
ef.output.deleteLater()
ef.navi.deleteLater()
sg.removeChild(ef.background)
def keypress(ef,keystring):
camera=FreeCAD.ActiveDocument.Wedge
if keystring=='X':
camera.Placement.Base.x += 10
if keystring=='Y':
camera.Placement.Base.y += 10
if keystring=='Z':
camera.Placement.Base.z += 10
ax=camera.Placement.Rotation.Axis
an=camera.Placement.Rotation.Angle
an=an* 180/math.pi
[y,p,r]=camera.Placement.Rotation.toEuler()
if keystring=='G':
y += 0.1
camera.Placement.Rotation=FreeCAD.Rotation(y,p,r)
if keystring=='H':
p += 0.1
camera.Placement.Rotation=FreeCAD.Rotation(y,p,r)
if keystring=='F':
r += 0.1
camera.Placement.Rotation=FreeCAD.Rotation(y,p,r)
if keystring=='C':
camera.Placement=FreeCAD.Placement()
FreeCAD.activeDocument().recompute()
if keystring=='Escape':
print("stoppe eventserver ...")
ef.output.hide()
stop()
return True
def on_keypress2(ef,keystring):
try:
camera=FreeCADGui.activeDocument().activeView().getCameraNode()
# # Hilfspunkt kameraposition
# c=App.ActiveDocument.Vertex
#
# # Hud
# panel=App.ActiveDocument.Compound
# # Kugel im HUD
# s=App.ActiveDocument.Sphere001
if ef.firstCall:
FreeCADGui.activeDocument().activeView().setCameraType("Perspective")
ef.firstCall=False
campos=FreeCAD.Vector( 0, 0, 0)
camera.position.setValue(campos)
nD=100
fD=12000000
camera.nearDistance.setValue(nD)
camera.farDistance.setValue(fD)
if keystring=='X' or keystring=='Insert':
ef.campos.x += 10
if keystring=='Y'or keystring=='Home' :
ef.campos.y += 10
if keystring=='Z'or keystring=='PageUp':
ef.campos.z += 10
if keystring=='Shift-X'or keystring=='Delete':
ef.campos.x -= 10
if keystring=='Shift-Y'or keystring=='End':
ef.campos.y -= 10
if keystring=='Shift-Z'or keystring=='PageDown':
ef.campos.z -= 10
if keystring=='F12':
ef.campos = FreeCAD.Vector( 0, 0, 0)
ef.laenge=0
ef.breite=0
ef.roll=0
if keystring=='Control-Left':
ef.roll += 10
if keystring=='Control-Right':
ef.roll -= 10
if keystring=='Control-Down':
ef.roll = 0
if ef.mode=='turn':
if keystring=='Up':
ef.breite += 1.0
if keystring=='Down':
ef.breite -= 1.0
if keystring=='Shift-Up' or keystring=='Shift-Down':
ef.breite=-ef.breite
if ef.laenge <=0:
ef.laenge += 180
else:
ef.laenge -= 180
if keystring=='Left':
ef.laenge -= 1.1
if keystring=='Right':
ef.laenge += 1.2
if keystring=='Shift-Left' or keystring=='Shift-Right':
if ef.laenge <=0:
ef.laenge += 180
else:
ef.laenge -= 180
elif ef.mode=='walk':
Msg('walk mode')
if keystring=='Left':
ef.direction -= 0.1
ef.laenge= -90+ef.direction*180/math.pi
if keystring=='Right':
ef.direction += 0.1
ef.laenge= -90+ef.direction*180/math.pi
if keystring=='Up':
ef.campos.x -= ef.speed*math.cos(ef.direction)
ef.campos.y += ef.speed*math.sin(ef.direction)
ef.campos.z += ef.speed*math.sin(ef.breite/180*math.pi)
if keystring=='Down':
ef.campos.x += ef.speed*math.cos(ef.direction)
ef.campos.y -= ef.speed*math.sin(ef.direction)
ef.campos.z -= ef.speed*math.sin(ef.breite/180*math.pi)
if keystring=='Return':
pass
elif ef.mode=='xyz':
Err('xyz mode')
if keystring=='Up':
ef.campos.z += ef.speed*math.cos(math.pi*ef.roll/180)
if keystring=='Down':
ef.campos.z -= ef.speed*math.cos(math.pi*ef.roll/180)
# if keystring=='Up':
# ef.campos.x += ef.speed*math.cos(ef.direction)
# ef.campos.y += ef.speed*math.sin(ef.direction)
# if keystring=='Down':
# ef.campos.x -= ef.speed*math.cos(ef.direction)
# ef.campos.y -= ef.speed*math.sin(ef.direction)
if keystring=='Left':
ef.campos.y += ef.speed*math.sin(0.0+ef.laenge/180*math.pi)
ef.campos.x -= ef.speed*math.cos(0.0+ef.laenge/180*math.pi)
if keystring=='Right':
ef.campos.y -= ef.speed*math.sin(0.0+ef.laenge/180*math.pi)
ef.campos.x += ef.speed*math.cos(0.0+ef.laenge/180*math.pi)
else:
Err("no known mode -- no action")
ef.compass.direction(ef.laenge)
ef.horizon.direction(ef.roll)
ef.horizon.setnick(ef.breite)
r=1000
pos3=FreeCAD.Vector(
r*math.sin(ef.laenge/180*math.pi)*math.cos(ef.breite/180*math.pi),
r*math.cos(ef.laenge/180*math.pi)*math.cos(ef.breite/180*math.pi),
r*math.sin(ef.breite/180*math.pi))
dir=FreeCAD.Vector(pos3)# .sub(ef.campos)
dir.normalize()
print(ef.direction)
print("ef.campos", ef.campos)
ef.map.setPos(ef.campos.x,ef.campos.y,ef.campos.z)
spos=FreeCAD.Vector(ef.campos)
d=200
prpos=FreeCAD.Vector(d*dir.x,d*dir.y,d*dir.z)
ppos=spos.add(prpos)
# kamera position
# c.Placement.Base=ef.campos
camera.position.setValue(ef.campos)
camera.pointAt(coin.SbVec3f(ppos),coin.SbVec3f(0,0.0+math.sin(math.pi*ef.roll/180),0.0+math.cos(math.pi*ef.roll/180)))
print("Roll ", ef.roll)
# #hud
# panel.Placement.Base=ppos
# panel.Placement.Rotation=FreeCAD.Rotation(ef.laenge,-ef.breite,0)
# #drehung des kompass/horizonts
# s.Placement.Rotation=FreeCAD.Rotation(-ef.laenge-90,0,ef.breite)
#
# kamera einstellungen
#
if keystring=='F9':
a=camera.heightAngle.getValue()
a += 0.01
camera.heightAngle.setValue(a)
if keystring=='F10':
a=camera.heightAngle.getValue()
a -= 0.01
camera.heightAngle.setValue(a)
if keystring=='F11':
camera.heightAngle.setValue(0.785398185253)
if keystring=='F5':
nD=camera.nearDistance.getValue()
nD *=1.03
print("near Distance",nD)
camera.nearDistance.setValue(nD)
if keystring=='F6':
nD=camera.nearDistance.getValue()
nD /=1.03
if nD >0:
print("near Distance",nD)
camera.nearDistance.setValue(nD)
if keystring=='F2':
fn='/home/microelly2/FCB/b175_camera_controller/P1170438.JPG'
ef.tex.filename = fn
if keystring=='F3':
fn='/home/microelly2/FCB/b175_camera_controller/P1170039.JPG'
ef.tex.filename = fn
if keystring=='F4':
fn='/home/microelly2/FCB/b175_camera_controller/winter.jpg'
ef.tex.filename = fn
#
# ausgabe daten
#
if 1 or keystring=='F2':
t=FreeCAD.Vector(prpos)
try:
t.normalize()
except:
pass
campos2=(round(ef.campos[0]),round(ef.campos[1]),round(ef.campos[2]))
nD=camera.nearDistance.getValue()
a=camera.heightAngle.getValue()
out=''
out += "camera position " + str(campos2) +"\n"
out += "camera direction " + str([round(t.x,2),round(t.y,2),round(t.z,2)]) + "\n"
out += "speed " + str(ef.speed) +"\n"
out += "dir " + str(round(ef.direction*180/math.pi)) +"\n"
out += '\n'
out += "height Angle " + str(round(a/math.pi*180)) +'\n'
out += "focal length " + str(round(10/math.tan(a/2)))+"\n"
out += "near Distance " + str(round(nD)) + '\n'
print(out)
ef.navi.output.setText(out)
FreeCAD.ActiveDocument.recompute()
FreeCADGui.updateGui()
if keystring=='Escape':
print("stoppe eventserver ...")
stop()
sg = FreeCADGui.ActiveDocument.ActiveView.getSceneGraph()
ef.output.deleteLater()
ef.navi.deleteLater()
sg.removeChild(ef.background)
except:
sayexc()
stop()
return True
def on_move(ef,globalVector,localVector):
return True
def on_move2(ef,globalVector,localVector):
if ef.mouseMode:
d=3
if ef.v:
if ef.v[0]>globalVector[0]+d:
ef.on_key_press(ef,"Left")
elif ef.v[0]<globalVector[0]-d:
ef.on_key_press(ef,"Right")
if ef.v[1]>globalVector[1]+d:
ef.on_key_press(ef,"Up")
elif ef.v[1]<globalVector[1]-d:
ef.on_key_press(ef,"Down")
ef.v=globalVector
return True
def on_move3(ef,globalVector,localVector):
return True
## the old click callback
def on_clicks(ef,button,count):
print("on_mouse:", button, str(count))
return True
def on_clicks2(ef,button,count):
print("on_clicks2:", button, str(count))
if button=='Release':
ef.mouseMode=False
if button=='Left':
ef.mouseMode=True
ef.v=None
return True
## click callback for debug
def on_clicks3(ef,button,count):
print("on clicks 3",button)
print(ef.windowlist)
try:
if ef.windowlist[0][1]=='Testme':
print("call HUHU")
return False
except:
return True
## a widget to display the yaw direction inside a circle
class Compass(QtGui.QWidget):
#\cond
def __init__(self):
super(Compass, self).__init__()
self.rect= (0, 0, 100, 100)
self.arc=90
self.resize(150, 150)
#self.update()
#self.initUI()
def initUI(self):
self.setGeometry(300, 300, 350, 100)
self.setWindowTitle('Colors')
#self.show()
def paintEvent(self, e):
qp = QtGui.QPainter()
qp.begin(self)
self.drawRectangles(qp)
qp.end()
def drawRectangles(self, qp):
color = QtGui.QColor(0, 0, 0)
color.setNamedColor('#d4d4d4')
qp.setPen(color)
qp.setBrush(QtGui.QColor(100, 0, 0,50))
qp.drawEllipse(0, 0, 100, 100);
qp.save();
qp.translate(50,50);
qp.rotate(self.arc);
qp.setBrush(QtGui.QColor(255, 0, 0, 255))
qp.drawRect(0, -3, 50, 6);
qp.restore();
def direction(self,arc):
self.arc=arc-90
self.repaint()
#\endcond
## a widget to display the pitch of the view
class Horizon(QtGui.QWidget):
#\cond
def __init__(self):
super(Horizon, self).__init__()
self.rect= (0, 0, 100, 100)
self.arc=0
self.nick=0
self.resize(100, 100)
def initUI(self):
self.setGeometry(300, 300, 350, 100)
self.setWindowTitle('Colors')
def paintEvent(self, e):
qp = QtGui.QPainter()
qp.begin(self)
self.drawRectangles(qp)
qp.end()
def drawRectangles(self, qp):
color = QtGui.QColor(0, 0, 0)
color.setNamedColor('#d4d4d4')
qp.setBrush(QtGui.QColor(100, 100, 100, 255))
qp.drawEllipse(0, 0, 100, 100);
qp.setPen(color)
qp.setBrush(QtGui.QColor(220, 220, 255,200))
rect = QtCore.QRectF(0.0, 0.0, 100.0, 100.0)
startAngle = (90+self.arc-0.5*self.nick) * 16
spanAngle = (self.nick) * 16
qp.drawChord(rect, startAngle, spanAngle)
def direction(self,arc):
self.arc=arc
self.repaint()
def setnick(self,n):
self.nick=-n-180
self.repaint()
#\endcond
## a widget to dispay the xy position of the camera in the scene
class Map(QtGui.QWidget):
def __init__(self):
super(Map, self).__init__()
self.rect= (0, 0, 100, 100)
self.x=50
self.y=50
self.z=50
self.resize(150, 140)
#self.update()
#self.initUI()
def initUI(self):
self.setGeometry(300, 300, 350, 105)
self.setWindowTitle('Colors')
#self.show()
def paintEvent(self, e):
qp = QtGui.QPainter()
qp.begin(self)
self.drawRectangles(qp)
qp.end()
def drawRectangles(self, qp):
color = QtGui.QColor(0, 0, 0)
color.setNamedColor('#d4d4d4')
qp.setPen(color)
qp.setBrush(QtGui.QColor(100, 0, 0,50))
qp.drawRect(0, 0, 105, 105);
qp.save();
qp.translate(self.x,self.y);
qp.setBrush(QtGui.QColor(255, 0, 0, 255))
qp.drawRect(0, 0, 5, 5);
# qp.save();
qp.translate(-self.x,-self.y+self.z);
qp.setBrush(QtGui.QColor(255, 255, 0, 255))
qp.drawRect(0, 0, 10, 5);
qp.restore();
# qp.restore();
def setPos(self,x,y,z):
fak=50.0
self.z=-z/fak+50
self.x=x/fak+50
self.y=-y/fak+50
print("setpos",x,y)
self.repaint()
##creates and returns the navigator display widget
def myNavigatorWidget(ef):
liste=QtGui.QWidget()
liste.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
layout=QtGui.QVBoxLayout()
liste.setLayout(layout)
liste.vmap={}
# for k in ['key','xa','ya','za','xr','yr','zr','dxw','dyw','click','clickcount' ]:
# line = QtGui.QLineEdit()
# line.setText("("+k+")")
# layout.addWidget(line)
# liste.vmap[k]=line
# for k in ['windows']:
# line = QtGui.QTextEdit()
# line.setText("("+k+")")
# layout.addWidget(line)
# liste.vmap[k]=line
# liste.setGeometry(100, 100, 250, 1250)
liste2=QtGui.QWidget()
layout2=QtGui.QHBoxLayout()
liste2.setLayout(layout2)
layout.addWidget(liste2)
liste2.setMinimumHeight(130)
liste2.setMinimumWidth(360)
# drei Anzeiger ...
# compass
ex = Compass()
layout2.addWidget(ex)
ex.direction(-50)
ef.compass=ex
# horizon
ex2 = Horizon()
ex2.setnick(100)
ex2.direction(20)
layout2.addWidget(ex2)
ef.horizon=ex2
# ex2.direction(50)
# speed
ex3 = Map()
layout2.addWidget(ex3)
ex3.setPos(20,40,20)
ef.map=ex3
ll= QtGui.QLabel()
ll.setText("Turn")
layout.addWidget(ll)
liste.modelabel=ll
bt= QtGui.QPushButton()
bt.setText("Walk Mode")
layout.addWidget(bt)
bt= QtGui.QPushButton()
bt.setText("Frontal Mode")
layout.addWidget(bt)
bt= QtGui.QPushButton()
bt.setText("Turn Mode")
layout.addWidget(bt)
line = QtGui.QTextEdit()
line.setText("yyy")
layout.addWidget(line)
liste.output=line
bt= QtGui.QPushButton()
bt.setText("Stop Navigation")
layout.addWidget(bt)
# bt= QtGui.QPushButton()
# bt.setText("Testme")
# layout.addWidget(bt)
# bt.clicked.connect(huhu)
bt= QtGui.QPushButton()
bt.setText("Background 1 Snowland")
layout.addWidget(bt)
bt.clicked.connect(lambda:background1(ef))
bt= QtGui.QPushButton()
bt.setText("Background 2 Duneland")
layout.addWidget(bt)
bt.clicked.connect(lambda:background2(ef))
bt= QtGui.QPushButton()
bt.setText("Background 3 Cologne")
layout.addWidget(bt)
bt.clicked.connect(lambda:background3(ef))
bt= QtGui.QPushButton()
bt.setText("Background 4 Transparence")
layout.addWidget(bt)
bt.clicked.connect(lambda:background4(ef))
liste.ef=ef
liste.show()
return liste
## background image winter
def background1(ef):
fn='/home/microelly2/FCB/b175_camera_controller/winter.jpg'
fn=os.path.dirname(__file__) +"/../pics/winter.jpg"
ef.tex.filename = fn
## background image dune
def background2(ef):
fn='/home/microelly2/FCB/b175_camera_controller/P1170437.JPG'
fn=os.path.dirname(__file__) +"/../pics//P1170437.JPG"
ef.tex.filename = fn
## background image city
def background3(ef):
fn='/home/microelly2/FCB/b175_camera_controller/P1170039.JPG'
fn=os.path.dirname(__file__) +"/../pics/P1170039.JPG"
ef.tex.filename = fn
## background partially transparent
def background4(ef):
fn='/home/microelly2/FCB/b175_camera_controller/transpa.png'
fn=os.path.dirname(__file__) +"/../pics/transpa.png"
ef.tex.filename = fn
def on_windowslist(ef,windowslist):
return True
## callback to set the mode or to do some other useful things
def on_windowslist2(ef,windowslist):
for t in windowslist:
if t==['QPushButton','Stop Navigation']:
stop()
ef.output.deleteLater()
ef.navi.deleteLater()
if t==['QPushButton','Walk Mode']:
print("Walk mode")
ef.mode="walk"
ef.navi.modelabel.setText("Walk")
if t==['QPushButton','Frontal Mode']:
print("Frontal mode")
ef.mode="xyz"
ef.navi.modelabel.setText("Frontal")
if t==['QPushButton','Turn Mode']:
print("Turn mode")
ef.mode="turn"
ef.navi.modelabel.setText("Turn")
return
## initialize and start the Eventfilter
def navi():
'''navigator startup'''
mw=QtGui.qApp
#widget.setCursor(QtCore.Qt.SizeAllCursor)
#cursor ausblenden
#mw.setOverrideCursor(QtCore.Qt.BlankCursor)
# FreeCADGui.activateWorkbench("NoneWorkbench")
mw.setOverrideCursor(QtCore.Qt.PointingHandCursor)
ef=EventFilter()
ef.laenge=0.0
ef.breite=0.0
ef.campos=FreeCAD.Vector( 0, 0, 20000)
# ef.output.hide()
ef.mouseMode=False
ef.firstCall=True
ef.mode="turn"
ef.navi=myNavigatorWidget(ef)
ef.speed=100
ef.direction=0.5*math.pi
ef.roll=0
#--------------
# get a jpg filename
# jpgfilename = QtGui.QFileDialog.getOpenFileName(QtGui.qApp.activeWindow(),'Open image file','*.jpg')
fn='/home/microelly2/FCB/b175_camera_controller/winter.jpg'
fn=os.path.dirname(__file__) +"/../pics/winter.jpg"
sg = FreeCADGui.ActiveDocument.ActiveView.getSceneGraph()
col = coin.SoBaseColor()
#col.rgb=(1,0,0)
trans = coin.SoTranslation()
trans.translation.setValue([0,0,0])
myCustomNode = coin.SoSeparator()
#myCustomNode.addChild(col)
if 0 or False:
cub = coin.SoCylinder()
cub.radius.setValue(3000)
cub.height.setValue(4000)
cub.parts.set("SIDES")
s=coin.SoRotationXYZ()
s.angle.setValue(1.5708)
s.axis.setValue(0)
myCustomNode.addChild(s)
s=coin.SoRotationXYZ()
s.angle.setValue(math.pi)
s.axis.setValue(1)
myCustomNode.addChild(s)
else:
cub = coin.SoSphere()
cub.radius.setValue(10000000)
s=coin.SoRotationXYZ()
s.angle.setValue(1.5708)
s.axis.setValue(0)
myCustomNode.addChild(s)
s=coin.SoRotationXYZ()
s.angle.setValue(math.pi)
s.axis.setValue(1)
myCustomNode.addChild(s)
if False:
l=coin.SoDirectionalLight()
l.direction.setValue(coin.SbVec3f(0,1,0))
l.color.setValue(coin.SbColor(0,0,1))
myCustomNode.addChild(l)
l=coin.SoDirectionalLight()
l.direction.setValue(coin.SbVec3f(0,-1,0))
l.color.setValue(coin.SbColor(0,1,1))
myCustomNode.addChild(l)
l=coin.SoDirectionalLight()
l.direction.setValue(coin.SbVec3f(0,0,1))
l.color.setValue(coin.SbColor(1,0,0))
myCustomNode.addChild(l)
l=coin.SoDirectionalLight()
l.direction.setValue(coin.SbVec3f(0,0,-1))
l.color.setValue(coin.SbColor(0.6,0.6,1))
myCustomNode.addChild(l)
l=coin.SoSpotLight()
l.direction.setValue(coin.SbVec3f(1,0,1))
l.color.setValue(coin.SbColor(0,1,0))
l.location.setValue(coin.SbVec3f(0,0,0))
# l.cutOffAngle.setValue(0.01)
# l.dropOffRate.setValue(1)
myCustomNode.addChild(l)
#myCustomNode.addChild(trans)
myCustomNode.addChild(cub)
sg.addChild(myCustomNode)
tex = coin.SoTexture2()
tex.filename = fn
myCustomNode.insertChild(tex,0)
#---------------
ef.background=myCustomNode
ef.tex=tex
FreeCAD.eventfilter=ef
mw.installEventFilter(ef)
FreeCAD.eventfilter.on_key_press=on_keypress2
FreeCAD.eventfilter.on_move=on_move3
FreeCAD.eventfilter.on_clicks=on_clicks3
FreeCAD.eventfilter.on_windowslist=on_windowslist2
on_keypress2(FreeCAD.eventfilter,'O')
view=FreeCADGui.activeDocument().activeView()
FreeCADGui.ActiveDocument.ActiveView.setAnimationEnabled(False)
mgr=view.getViewer().getSoRenderManager()
mgr.setAutoClipping(0)
FreeCAD.ActiveDocument.recompute()
FreeCADGui.updateGui()
return ef
def runtest():
navi()
ef=navi()
ef.navi.hide()
ef.output.hide()
def Navigator():
runtest()
| lgpl-3.0 | 6,674,549,925,316,580,000 | 22.749786 | 120 | 0.65933 | false | 2.614224 | false | false | false | 0.055455 |
kangkot/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/ctypes/test/test_struct_fields.py | 68 | 1507 | import unittest
from ctypes import *
class StructFieldsTestCase(unittest.TestCase):
# Structure/Union classes must get 'finalized' sooner or
# later, when one of these things happen:
#
# 1. _fields_ is set.
# 2. An instance is created.
# 3. The type is used as field of another Structure/Union.
# 4. The type is subclassed
#
# When they are finalized, assigning _fields_ is no longer allowed.
def test_1_A(self):
class X(Structure):
pass
self.failUnlessEqual(sizeof(X), 0) # not finalized
X._fields_ = [] # finalized
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_1_B(self):
class X(Structure):
_fields_ = [] # finalized
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_2(self):
class X(Structure):
pass
X()
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_3(self):
class X(Structure):
pass
class Y(Structure):
_fields_ = [("x", X)] # finalizes X
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
def test_4(self):
class X(Structure):
pass
class Y(X):
pass
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
Y._fields_ = []
self.assertRaises(AttributeError, setattr, X, "_fields_", [])
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 3,312,128,138,499,278,000 | 29.14 | 71 | 0.568016 | false | 4.095109 | true | false | false | 0.005309 |
tarthy6/dozer-thesis | py/plot.py | 3 | 43753 | # encoding: utf-8
# 2008 © Václav Šmilauer <[email protected]>
"""
Module containing utility functions for plotting inside woo. Most functionality is exposed through :obj:`woo.core.Plot`, however.
"""
## all exported names
__all__=['live','liveInterval','autozoom','legendAlpha','scientific','scatterMarkerKw']
import sys
PY3K=sys.version_info[0]==3
pilOk=False
try:
import PIL as Image
pilOk=True
except ImportError: pass
try:
import Image
pilOk=True
except ImportError: pass
if not pilOk: print 'WARN: PIL/Image module (python-imaging) not importable, embedding images into plots will give errors.'
# PY3K
if PY3K:
def _bytes(s): return bytes(s,'ascii')
else:
def _bytes(s): return s
import matplotlib,os,time,math,itertools,sys
# running in batch
#
# If GtkAgg is the default, X must be working, which is not the case
# with batches (DISPLAY is unset in such case) and importing pylab fails then.
#
# Agg does not require the GUI part and works without any DISPLAY active
# just fine.
#
# see http://www.mail-archive.com/[email protected]/msg04320.html
# and https://lists.launchpad.net/woo-users/msg03289.html
#
# IMPORTANT: this sets woo.runtime.hasDisplay
try: import woo.qt
except ImportError: pass
import woo.runtime, wooMain, woo.config
if wooMain.options.fakeDisplay: woo.runtime.hasDisplay=False
if 'qt4' not in woo.config.features: woo.runtime.hasDisplay=False
if woo.runtime.hasDisplay==None: # not yet set
raise RuntimeError('woo.plot imported before woo.runtime.hasDisplay is set. This should not really happen, please report.')
if not woo.runtime.hasDisplay:
#from matplotlib.backends.backend_agg import FigureCanvasAgg as WooFigureCanvas
matplotlib.use('Agg') ## pylab API
else:
matplotlib.use('Qt4Agg') # pylab API
#from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as WooFigureCanvas
from matplotlib.backends.backend_agg import FigureCanvasAgg as _HeadlessFigureCanvas
from minieigen import *
matplotlib.rc('axes',grid=True) # put grid in all figures
import pylab
# simulation-specific bits moved to woo.core.Plot
# so that they are saved and reloaded with Scene automatically
#
# those remain module-global objects
#
live=True if woo.runtime.hasDisplay else False
"Enable/disable live plot updating. Disabled without display (useless)."
liveInterval=.5
"Interval for the live plot updating, in seconds."
autozoom=True
"Enable/disable automatic plot rezooming after data update."
legendAlpha=.6
'Transparency of legend frames in plots'
scientific=True if hasattr(matplotlib.axes.Axes,'ticklabel_format') else False ## safe default for older matplotlib versions
"Use scientific notation for axes ticks."
current=-1
"Point that is being tracked with a scatter point. -1 is for the last point, set to *nan* to disable."
afterCurrentAlpha=.2
"Color alpha value for part of lines after :obj:`woo.plot.current`, between 0 (invisible) to 1 (full color)"
scatterMarkerKw=dict(verts=[(0.,0.),(-30.,10.),(-25,0),(-30.,-10.)],marker=None)
"Parameters for the current position marker"
annotateKw=dict(horizontalalignment='left',verticalalignment='upper right',fontsize=9)
"Parameters for annotation (current value) display"
lineKw=dict(linewidth=1.5,alpha=.8)
"Parameters for the normal line plot"
componentSeparator='_'
componentSuffixes={Vector2:{-1:'norm',0:'x',1:'y'},Vector3:{-1:'norm',0:'x',1:'y',2:'z'},Vector2i:{0:'x',1:'y'},Vector3i:{0:'x',1:'y',2:'z'},Vector6:{-1:'norm',0:'xx',1:'yy',2:'zz',3:'yz',4:'zx',5:'xy'},Matrix3:{(0,0):'xx',(1,1):'yy',(2,2):'zz',(0,1):'xy',(1,0):'yx',(0,2):'xz',(2,0):'zx',(1,2):'yz',(2,1):'zy'}}
# if a type with entry in componentSuffixes is given in addData, columns for individual components are synthesized using indices and suffixes given for each type; negative index means the norm, which is computed using the 'norm()' method (must be defined by the type)
# e.g. foo=Vector3r(1,2,3) will result in columns foo_x=1,foo_y=2,foo_z=3,foo_norm=3.741657...
def Scene_plot_reset(P):
"Reset all plot-related variables (data, plots, labels)"
P.data,P.plots,P.imgData={},{},{}
pylab.close('all')
def Scene_plot_resetData(P):
"Reset all plot data; keep plots and labels intact."
P.data={}
def Scene_plot_splitData(P):
"Make all plots discontinuous at this point (adds nan's to all data fields)"
P.addData({})
def Scene_plot_reverseData(P):
"""Reverse woo.core.Plot.data order.
Useful for tension-compression test, where the initial (zero) state is loaded and, to make data continuous, last part must *end* in the zero state.
"""
for k in P.data: P.data[k].reverse()
def addDataColumns(data,dd):
'''Add new columns with NaN data, without adding anything to other columns. Does nothing for columns that already exist'''
numSamples=len(data[data.keys()[0]]) if len(data)>0 else 0
for d in dd:
if d in data.keys(): continue
data[d]=[nan for i in range(numSamples)]
def Scene_plot_autoData(P,**kw):
"""Add data by evaluating contents of :obj:`woo.core.Plot.plots`. Expressions rasing exceptions will be handled gracefully, but warning is printed for each.
>>> from woo import plot; from woo.dem import *; from woo.core import *
>>> from pprint import pprint
>>> S=Scene(fields=[DemField(gravity=(0,0,-10))])
>>> S.plot.plots={'S.step':('S.time',None,'numParticles=len(S.dem.par)')}
>>> S.plot.autoData()
>>> pprint(S.plot.data)
{'S.step': [0], 'S.time': [0.0], 'numParticles': [0]}
Note that each item in :obj:`woo.core.Plot.plots` can be
* an expression to be evaluated (using the ``eval`` builtin);
* ``name=expression`` string, where ``name`` will appear as label in plots, and expression will be evaluated each time;
* a dictionary-like object -- current keys are labels of plots and current values are added to :obj:`woo.core.Plot.data`. The contents of the dictionary can change over time, in which case new lines will be created as necessary.
A simple simulation with plot can be written in the following way; note how the energy plot is specified.
>>> from woo import plot, utils
>>> S=Scene(fields=[DemField(gravity=(0,0,-10))])
>>> S.plot.plots={'i=S.step':('**S.energy','total energy=S.energy.total()',None,'rel. error=S.energy.relErr()')}
>>> # we create a simple simulation with one ball falling down
>>> S.dem.par.add(Sphere.make((0,0,0),1,mat=utils.defaultMaterial()))
0
>>> S.engines=[Leapfrog(damping=.4,reset=True),
... # get data required by plots at every step
... PyRunner(1,'S.plot.autoData()')
... ]
>>> S.trackEnergy=True
>>> S.run(3,True)
>>> pprint(S.plot.data) #doctest: +ELLIPSIS
{'grav': [0.0, 0.0, -20.357...],
'i': [0, 1, 2],
'kinetic': [0.0, 1.526..., 13.741...],
'nonviscDamp': [nan, nan, 8.143...],
'rel. error': [0.0, 1.0, 0.0361...],
'total energy': [0.0, 1.526..., 1.526...]}
.. plot::
import woo, woo.plot, woo.utils
from woo.dem import *
from woo.core import *
S=Scene(fields=[DemField(gravity=(0,0,-10))])
S.dem.par.add(Sphere.make((0,0,0),1));
S.engines=[Leapfrog(damping=.4,reset=True),PyRunner('S.plot.autoData()')]
S.plot.plots={'i=S.step':('**S.energy','total energy=S.energy.total()',None,'rel. error=S.energy.relErr()')}
S.trackEnergy=True
S.run(500,True)
S.plot.legendLoc=('lower left','upper right')
S.plot.plot()
"""
def colDictUpdate(col,dic,kw):
'update *dic* with the value from col, which is a "expr" or "name=expr" string; all exceptions from ``eval`` are caught and warning is printed without adding any data.'
name,expr=col.split('=',1) if '=' in col else (col,col)
try:
val=eval(expr,kw)
dic.update({name:val})
except:
import traceback
traceback.print_exc()
print 'WARN: ignoring exception raised while evaluating auto-column `'+expr+"'%s."%('' if name==expr else ' ('+name+')')
cols={}
S=P.scene
# data,imgData,plots=P.data,P.imgData,P.plots
kw.update(S=S)
kw.update(woo=woo)
for p in P.plots:
pp=P.plots[p]
colDictUpdate(p.strip(),cols,kw)
for y in tuplifyYAxis(P.plots[p]):
# imgplot specifier
if y==None: continue
yy=addPointTypeSpecifier(y,noSplit=True)[0]
yy1=yy.split('=')[-1]
# dict-like object
# if hasattr(yy,'keys'): cols.update(dict(yy))
# callable returning list sequence of expressions to evaluate
if yy1.startswith('**'):
try:
dd=eval(yy1[2:],{'S':S})
except:
import traceback
traceback.print_exc()
print 'WARN: ignoring exception raised while evaluating dictionary-returning expression "'+yy1[2:]+':'
for k,v in dd.items(): cols[k]=v
elif yy1.startswith('*'):
ee=eval(yy1[1:],{'S':S})
for e in ee: colDictUpdate(e,cols,{'S':S})
else: colDictUpdate(yy,cols,kw)
P.addData(cols)
def Scene_plot_addData(P,*d_in,**kw):
"""Add data from arguments name1=value1,name2=value2 to woo.plot.data.
(the old {'name1':value1,'name2':value2} is deprecated, but still supported)
New data will be padded with nan's, unspecified data will be nan (nan's don't appear in graphs).
This way, equal length of all data is assured so that they can be plotted one against any other.
>>> S=woo.master.scene
>>> from pprint import pprint
>>> S.plot.resetData()
>>> S.plot.addData(a=1)
>>> S.plot.addData(b=2)
>>> S.plot.addData(a=3,b=4)
>>> pprint(S.plot.data)
{'a': [1, nan, 3], 'b': [nan, 2, 4]}
Some sequence types can be given to addData; they will be saved in synthesized columns for individual components.
>>> S.plot.resetData()
>>> S.plot.addData(c=Vector3(5,6,7),d=Matrix3(8,9,10, 11,12,13, 14,15,16))
>>> pprint(S.plot.data) #doctest: +ELLIPSIS
{'c_norm': [10.488...],
'c_x': [5.0],
'c_y': [6.0],
'c_z': [7.0],
'd_xx': [8.0],
'd_xy': [9.0],
'd_xz': [10.0],
'd_yx': [11.0],
'd_yy': [12.0],
'd_yz': [13.0],
'd_zx': [14.0],
'd_zy': [15.0],
'd_zz': [16.0]}
"""
data,imgData=P.data,P.imgData
import numpy
if len(data)>0: numSamples=len(data[data.keys()[0]])
else: numSamples=0
# align with imgData, if there is more of them than data
if len(imgData)>0 and numSamples==0: numSamples=max(numSamples,len(imgData[imgData.keys()[0]]))
d=(d_in[0] if len(d_in)>0 else {})
d.update(**kw)
# handle types composed of multiple values (vectors, matrices)
dNames=d.keys()[:] # make copy, since dict cannot change size if iterated over directly
for name in dNames:
if type(d[name]) in componentSuffixes:
val=d[name]
suffixes=componentSuffixes[type(d[name])]
for ix in suffixes:
d[name+componentSeparator+suffixes[ix]]=(d[name][ix] if ix>=0 else d[name].norm())
del d[name]
elif hasattr(d[name],'__len__'):
raise ValueError('plot.addData given unhandled sequence type (is a '+type(d[name]).__name__+', must be number or '+'/'.join([k.__name__ for k in componentSuffixes])+')')
for name in d:
if not name in data.keys(): data[name]=[]
for name in data:
data[name]+=(numSamples-len(data[name]))*[nan]
data[name].append(d[name] if name in d else nan)
#print [(k,len(data[k])) for k in data.keys()]
#numpy.array([nan for i in range(numSamples)])
#numpy.append(data[name],[d[name]],1)
def Scene_plot_addImgData(P,**kw):
data,imgData=P.data,P.imgData
for k in kw:
if k not in imgData: imgData[k]=[]
# align imgData with data
if len(data.keys())>0 and len(imgData.keys())>0:
nData,nImgData=len(data[data.keys()[0]]),len(imgData[imgData.keys()[0]])
#if nImgData>nData-1: raise RuntimeError("imgData is already the same length as data?")
if nImgData<nData-1: # repeat last value
for k in imgData.keys():
lastValue=imgData[k][-1] if len(imgData[k])>0 else None
imgData[k]+=(nData-len(imgData[k])-1)*[lastValue]
elif nData<nImgData:
for k in data.keys():
lastValue=data[k][-1] if len(data[k])>0 else nan
data[k]+=(nImgData-nData)*[lastValue] # add one more, because we will append to imgData below
# add values from kw
newLen=(len(imgData[imgData.keys()[0]]) if imgData else 0)+1 # current length plus 1
for k in kw:
if k in imgData and len(imgData[k])>0: imgData[k]+=(newLen-len(imgData[k])-1)*[imgData[k][-1]]+[kw[k]] # repeat last element as necessary
else: imgData[k]=(newLen-1)*[None]+[kw[k]] # repeat None if no previous value
# align values which were not in kw by repeating the last value
for k in imgData:
if len(imgData[k])<newLen: imgData[k]+=(newLen-len(imgData[k]))*[imgData[k][-1]]
assert len(set([len(i) for i in imgData.values()]))<=1 # no data or all having the same value
# not public functions
def addPointTypeSpecifier(o,noSplit=False):
"""Add point type specifier to simple variable name; optionally take only the part before '=' from the first item."""
if type(o) in [tuple,list]:
if noSplit or not type(o[0])==str: return o
else: return (o[0].split('=',1)[0],)+tuple(o[1:])
else: return (o if (noSplit or not type(o)==str) else (o.split('=',1)[0]),'')
def tuplifyYAxis(pp):
"""convert one variable to a 1-tuple"""
if type(pp) in [tuple,list]: return pp
else: return (pp,)
def xlateLabel(l,labels):
"Return translated label; return l itself if not in the labels dict."
if l in labels.keys(): return labels[l]
else: return l
class LineRef:
"""Holds reference to plot line and to original data arrays (which change during the simulation),
and updates the actual line using those data upon request."""
def __init__(self,line,scatter,annotation,line2,xdata,ydata,imgData=None,dataName=None):
self.line,self.scatter,self.annotation,self.line2,self.xdata,self.ydata,self.imgData,self.dataName=line,scatter,annotation,line2,xdata,ydata,imgData,dataName
def update(self):
if isinstance(self.line,matplotlib.image.AxesImage):
# image name
try:
if len(self.xdata)==0 and self.dataName: self.xdata=self.imgData[self.dataName] # empty list reference an empty singleton, not the list we want; adjust here
import Image
if self.xdata[current]==None: img=Image.new('RGBA',(1,1),(0,0,0,0))
else: img=Image.open(self.xdata[current])
self.line.set_data(img)
except IndexError: pass
else:
# regular data
import numpy
# current==-1 avoids copy slicing data in the else part
if current==None or current==-1 or afterCurrentAlpha==1:
self.line.set_xdata(self.xdata); self.line.set_ydata(self.ydata)
self.line2.set_xdata([]); self.line2.set_ydata([])
else:
try: # try if we can extend the first part by one so that lines are connected
self.xdata[:current+1]; preCurrEnd=current+1
except IndexError: preCurrEnd=current
preCurrEnd=current+(1 if len(self.xdata)>current else 0)
self.line.set_xdata(self.xdata[:preCurrEnd]); self.line.set_ydata(self.ydata[:preCurrEnd])
self.line2.set_xdata(self.xdata[current:]); self.line2.set_ydata(self.ydata[current:])
try:
x,y=self.xdata[current],self.ydata[current]
except IndexError: x,y=0,0
# this could be written in a nicer way, very likely
try:
pt=numpy.ndarray((2,),buffer=numpy.array([float(x),float(y)]))
if self.scatter:
self.scatter.set_offsets(pt)
# change rotation of the marker (possibly incorrect)
try:
dx,dy=self.xdata[current]-self.xdata[current-1],self.ydata[current]-self.ydata[current-1]
# smoothing from last n values, if possible
# FIXME: does not show arrow at all if less than window values
#try:
# window=10
# dx,dy=[numpy.average(numpy.diff(dta[current-window:current])) for dta in self.xdata,self.ydata]
#except IndexError: pass
# there must be an easier way to find on-screen derivative angle, ask on the matplotlib mailing list
axes=self.line.get_axes()
p=axes.patch; xx,yy=p.get_verts()[:,0],p.get_verts()[:,1]; size=max(xx)-min(xx),max(yy)-min(yy)
aspect=(size[1]/size[0])*(1./axes.get_data_ratio())
angle=math.atan(aspect*dy/dx)
if dx<0: angle-=math.pi
self.scatter.set_transform(matplotlib.transforms.Affine2D().rotate(angle))
except IndexError: pass
if self.annotation:
if math.isnan(x) or math.isnan(y):
if hasattr(self.annotation,'xyann'): self.annotation.xyann=(x,y)
else: self.annotation.xytext=(0,0)
self.annotation.set_text('') # make invisible, place anywhere
else:
#
if hasattr(self.annotation,'xyann'): self.annotation.xyann=(x,y) # newer MPL versions (>=1.4)
else: self.annotation.xyann=(x,y)
self.annotation.set_text(self.annotation.annotateFmt.format(xy=(float(x),float(y))))
except TypeError: pass # this happens at i386 with empty data, saying TypeError: buffer is too small for requested array
liveTimeStamp=0 # timestamp when live update was started, so that the old thread knows to stop if that changes
nan=float('nan')
def createPlots(P,subPlots=True,noShow=False,replace=True,scatterSize=60,wider=False):
'''Create plots based on current data;
:param subPlots: show all plots in one figure as subplots; otherwise, create multiple figures
:param noShow: use headless backend for plots, and do not show plots on the screen
:param replace: do not close existing figures, and do not update P.currLineRefs
'''
import logging
data,imgData,plots,labels,xylabels,legendLoc,axesWd,annotateFmt=P.data,P.imgData,P.plots,P.labels,P.xylabels,P.legendLoc,P.axesWd,P.annotateFmt
if replace:
if P.currLineRefs:
logging.info('Closing existing figures')
ff=set([l.line.get_axes().get_figure() for l in P.currLineRefs]) # get all current figures
for f in ff: pylab.close(f) # close those
P.currLineRefs=[]
figs=[]
if len(plots)==0: return # nothing to plot
if subPlots:
# compute number of rows and colums for plots we have
subCols=int(round(math.sqrt(len(plots)))); subRows=int(math.ceil(len(plots)*1./subCols))
if wider: subRows,subCols=subCols,subRows
# create a new figure; called once with subPlots, for each subplot without subPlots
def _newFig():
## pylab API
if not noShow: return pylab.figure() # this will go onto the screen; the pylab call sets up the windows as well
else: # with noShow
fig=matplotlib.figure.Figure()
canvas=_HeadlessFigureCanvas(fig) #
return fig
if subPlots: figs=[_newFig()]
for nPlot,p in enumerate(plots.keys()):
pStrip=p.strip().split('=',1)[0]
if not subPlots:
figs.append(_newFig())
axes=figs[-1].add_subplot(1,1,1)
else: axes=figs[-1].add_subplot(subRows,subCols,nPlot+1) # nPlot is 1-based in mpl, for matlab comatibility
axes.grid(True)
if plots[p]==None: # image plot
if not pStrip in imgData.keys(): imgData[pStrip]=[]
# fake (empty) image if no data yet
import Image
if len(imgData[pStrip])==0 or imgData[pStrip][-1]==None: img=Image.new('RGBA',(1,1),(0,0,0,0))
else: img=Image.open(imgData[pStrip][-1])
img=axes.imshow(img,origin='upper')
if replace: P.currLineRefs.append(LineRef(line=img,scatter=None,annotation=None,line2=None,xdata=imgData[pStrip],ydata=None,imgData=imgData,dataName=pStrip))
axes.set_axis_off()
continue
plots_p=[addPointTypeSpecifier(o) for o in tuplifyYAxis(plots[p])]
plots_p_y1,plots_p_y2=[],[]; y1=True
missing=set() # missing data columns
if pStrip not in data.keys(): missing.add(pStrip.decode('utf-8','ignore'))
for d in plots_p:
if d[0]==None:
y1=False; continue
if not isinstance(d[0],(str,unicode)): raise ValueError('Plots specifiers must be strings (not %s)'%(type(d[0]).__name__))
if y1: plots_p_y1.append(d)
else: plots_p_y2.append(d)
try:
if (
d[0] not in data.keys()
# and not callable(d[0])
and not (isinstance(d[0],(str,unicode)) and (d[0].startswith('**') or d[0].startswith('*'))) # hack for callable as strings
# and not hasattr(d[0],'keys')
):
missing.add(d[0])
except UnicodeEncodeError:
import warnings
warnings.error('UnicodeDecodeError when processing data set '+repr(d[0]))
if missing:
if len(data.keys())==0 or len(data[data.keys()[0]])==0: # no data at all yet, do not add garbage NaNs
for m in missing: data[m]=[]
else:
addDataColumns(data,missing)
try:
print 'Missing columns in Scene.plot.data, added NaNs:',', '.join([m.encode('utf-8') for m in missing])
except UnicodeDecodeError:
warnings.warn('UnicodeDecodeError reporting missing data columns -- harmless, just wondering...')
def createLines(pStrip,ySpecs,axes,isY1=True,y2Exists=False):
'''Create data lines from specifications; this code is common for y1 and y2 axes;
it handles y-data specified as callables/dicts passed as string (starting with '*'/'**'), which might create additional lines when updated with liveUpdate.
'''
# save the original specifications; they will be smuggled into the axes object
# the live updated will run yNameFuncs to see if there are new lines to be added
# and will add them if necessary
yNameFuncs=set()
yNames=set()
ySpecs2=[]
for ys in ySpecs:
if not isinstance(ys[0],(str,unicode)): raise ValueError('Plot specifications must be strings (not a %s).'%type(ys[0]))
if ys[0].startswith('**') or ys[0].startswith('*'):
evEx=eval(ys[0][(2 if ys[0].startswith('**') else 1):],{'S':P.scene})
yNameFuncs.add(evEx) # add callable or dictionary
# XXX: what is ys[1]? Previously, there was no line specifier there for dicts at least
# print evEx,type(evEx), evEx.__iter__(),type(evEx.__iter__())
ySpecs2+=[(ret,ys[1]) for ret in evEx] # traverse list or dict keys
else: ySpecs2.append(ys)
if len(ySpecs2)==0:
print 'woo.plot: creating fake plot, since there are no y-data yet'
line,=axes.plot([nan],[nan])
line2,=axes.plot([nan],[nan])
if replace: P.currLineRefs.append(LineRef(line=line,scatter=None,annotation=None,line2=line2,xdata=[nan],ydata=[nan]))
# set different color series for y1 and y2 so that they are recognizable
if matplotlib.rcParams.has_key('axes.color_cycle'): matplotlib.rcParams['axes.color_cycle']='b,g,r,c,m,y,k' if not isY1 else 'm,y,k,b,g,r,c'
for d in ySpecs2:
yNames.add(d)
# should have been handled above already
#if pStrip not in data:
# print 'Missing column %s in Scene.plot.data, added NaN.'%pString
# addDataColumns(data,[pStrip])
if d[0] not in data:
print 'Missing column %s in Scene.plot.data, added NaN.'%d[0]
addDataColumns(data,[d[0]])
line,=axes.plot(data[pStrip],data[d[0]],d[1],label=xlateLabel(d[0],P.labels),**lineKw)
lineKwWithoutAlpha=dict([(k,v) for k,v in lineKw.items() if k!='alpha'])
line2,=axes.plot([],[],d[1],color=line.get_color(),alpha=afterCurrentAlpha,**lineKwWithoutAlpha)
# use (0,0) if there are no data yet
scatterPt=[0,0] if len(data[pStrip])==0 else (data[pStrip][current],data[d[0]][current])
scatterPtPos=[scatterPt[0] if not math.isnan(scatterPt[0]) else 0,scatterPt[1] if not math.isnan(scatterPt[1]) else 0]
# if current value is NaN, use zero instead
scatter=axes.scatter(scatterPtPos[0],scatterPtPos[1],s=scatterSize,color=line.get_color(),**scatterMarkerKw)
if annotateFmt:
if math.isnan(scatterPtPos[0]) or math.isnan(scatterPtPos[1]): text=''
else: text=annotateFmt.format(xy=scatterPt)
annotation=axes.annotate(text,xy=scatterPtPos,color=line.get_color(),**annotateKw)
annotation.annotateFmt=annotateFmt
else: annotation=None
if replace: P.currLineRefs.append(LineRef(line=line,scatter=scatter,annotation=annotation,line2=line2,xdata=data[pStrip],ydata=data[d[0]]))
axes=line.get_axes()
labelLoc=(legendLoc[0 if isY1 else 1] if y2Exists>0 else 'best')
l=axes.legend(loc=labelLoc)
if l:
l.get_frame().set_alpha(legendAlpha)
if hasattr(l,'draggable'): l.draggable(True)
if scientific:
axes.ticklabel_format(style='sci',scilimits=(0,0),axis='both')
# fixes scientific exponent placement for y2: https://sourceforge.net/mailarchive/forum.php?thread_name=20101223174750.GD28779%40ykcyc&forum_name=matplotlib-users
if not isY1: axes.yaxis.set_offset_position('right')
if isY1:
axes.set_ylabel((', '.join([xlateLabel(_p[0],P.labels) for _p in ySpecs2])) if p not in xylabels or not xylabels[p][1] else xylabels[p][1])
axes.set_xlabel(xlateLabel(pStrip,P.labels) if (p not in xylabels or not xylabels[p][0]) else xylabels[p][0])
else:
axes.set_ylabel((', '.join([xlateLabel(_p[0],P.labels) for _p in ySpecs2])) if (p not in xylabels or len(xylabels[p])<3 or not xylabels[p][2]) else xylabels[p][2])
# if there are callable/dict ySpecs, save them inside the axes object, so that the live updater can use those
if yNameFuncs:
axes.wooYNames,axes.wooYFuncs,axes.wooXName,axes.wooLabelLoc=yNames,yNameFuncs,pStrip,labelLoc # prepend woo to avoid clashes
if 0:
# fix missing 'show' method; this has been fixed in matplotlib already, but we need to backport that
# see https://github.com/matplotlib/matplotlib/commit/15fd0ae587a57cb1d7b69546eb359085315148c8
# don't do that for headless backend, error there is fine
fig=axes.get_figure()
if not hasattr(fig,'show'):
mgr=getattr(fig.canvas,'manager')
if mgr: fig.show=lambda *args: mgr.window.show()
createLines(pStrip,plots_p_y1,axes=axes,isY1=True,y2Exists=len(plots_p_y2)>0)
if axesWd>0:
axes.axhline(linewidth=axesWd,color='k')
axes.axvline(linewidth=axesWd,color='k')
# create y2 lines, if any
if len(plots_p_y2)>0:
axes=axes.twinx() # create the y2 axis
createLines(pStrip,plots_p_y2,axes,isY1=False,y2Exists=True)
### scene is not directly accessible from here, do it like this:
S=woo.master.scene
if S.plot==P:
if 'title' in S.tags: axes.set_title(S.tags['title'])
return figs
def liveUpdate(P,timestamp):
global liveTimeStamp
liveTimeStamp=timestamp
import sys
while True:
if not live or liveTimeStamp!=timestamp:
return
figs,axes,linesData=set(),set(),set()
data=P.data
for l in P.currLineRefs:
l.update()
figs.add(l.line.get_figure())
axes.add(l.line.get_axes())
linesData.add(id(l.ydata))
# find callables in y specifiers, create new lines if necessary
for ax in axes:
if not hasattr(ax,'wooYFuncs') or not ax.wooYFuncs: continue # not defined of empty
yy=set();
for f in ax.wooYFuncs:
if callable(f): yy.update(f())
elif hasattr(f,'keys'):
yy.update(f.keys())
else: raise ValueError("Internal error: ax.wooYFuncs items must be callables or dictionary-like objects and nothing else.")
#print 'callables y names:',yy
news=yy-ax.wooYNames
if not news: continue
for new in news:
ax.wooYNames.add(new)
if new in data.keys() and id(data[new]) in linesData: continue # do not add when reloaded and the old lines are already there
print 'woo.plot: creating new line for',new
if not new in data.keys(): data[new]=len(data[ax.wooXName])*[nan] # create data entry if necessary
#print 'data',len(data[ax.wooXName]),len(data[new]),data[ax.wooXName],data[new]
line,=ax.plot(data[ax.wooXName],data[new],label=xlateLabel(new,P.labels)) # no line specifier
line2,=ax.plot([],[],color=line.get_color(),alpha=afterCurrentAlpha)
scatterPt=(0 if len(data[ax.wooXName])==0 or math.isnan(data[ax.wooXName][current]) else data[ax.wooXName][current]),(0 if len(data[new])==0 or math.isnan(data[new][current]) else data[new][current])
scatter=ax.scatter(scatterPt[0],scatterPt[1],s=60,color=line.get_color(),**scatterMarkerKw)
if P.annotateFmt:
annotation=ax.annotate(P.annotateFmt.format(xy=scatterPt),xy=scatterPt,color=line.get_color(),**annotateKw)
annotation.annotateFmt=P.annotateFmt
else: annotation=None
P.currLineRefs.append(LineRef(line=line,scatter=scatter,annotation=annotation,line2=line2,xdata=data[ax.wooXName],ydata=data[new]))
ax.set_ylabel(ax.get_ylabel()+(', ' if ax.get_ylabel() else '')+xlateLabel(new,P.labels))
# it is possible that the legend has not yet been created
l=ax.legend(loc=ax.wooLabelLoc)
if l:
l.get_frame().set_alpha(legendAlpha)
if hasattr(l,'draggable'): l.draggable(True)
if autozoom:
for ax in axes:
try:
ax.relim() # recompute axes limits
ax.autoscale_view()
except RuntimeError: pass # happens if data are being updated and have not the same dimension at the very moment
for fig in figs:
#sys.stderr.write('*')
try:
fig.canvas.draw()
except RuntimeError: pass # happens here too
#sys.stderr.write('(')
time.sleep(liveInterval)
#sys.stderr.write(')')
def savePlotSequence(P,fileBase,stride=1,imgRatio=(5,7),title=None,titleFrames=20,lastFrames=30):
'''Save sequence of plots, each plot corresponding to one line in history. It is especially meant to be used for :obj:`woo.utils.makeVideo`.
:param stride: only consider every stride-th line of history (default creates one frame per each line)
:param title: Create title frame, where lines of title are separated with newlines (``\\n``) and optional subtitle is separated from title by double newline.
:param int titleFrames: Create this number of frames with title (by repeating its filename), determines how long the title will stand in the movie.
:param int lastFrames: Repeat the last frame this number of times, so that the movie does not end abruptly.
:return: List of filenames with consecutive frames.
'''
data,imgData,plots=P.data,P.imgData,P.plots
fig=createPlots(P,noShow=True,replace=True,subPlots=True,scatterSize=60,wider=True)[0]
sqrtFigs=math.sqrt(len(plots))
fig.set_size_inches(8*sqrtFigs,5*sqrtFigs) # better readable
fig.subplots_adjust(left=.05,right=.95,bottom=.05,top=.95) # make it more compact
if len(plots)==1 and plots[plots.keys()[0]]==None: # only pure snapshot is there
fig.set_size_inches(5,5)
fig.subplots_adjust(left=0,right=1,bottom=0,top=1)
#if not data.keys(): raise ValueError("plot.data is empty.")
pltLen=max(len(data[data.keys()[0]]) if data else 0,len(imgData[imgData.keys()[0]]) if imgData else 0)
if pltLen==0: raise ValueError("Both plot.data and plot.imgData are empty.")
global current
ret=[]
print 'Saving %d plot frames, it can take a while...'%(pltLen)
for i,n in enumerate(range(0,pltLen,stride)):
current=n
for l in P.currLineRefs: l.update()
out=fileBase+'-%03d.png'%i
fig.savefig(out)
ret.append(out)
sys.stderr.write('[%d]'%i)
if len(ret)==0: raise RuntimeError("No images created?!")
if title:
import Image
titleImgName=fileBase+'-title.png'
createTitleFrame(titleImgName,Image.open(ret[-1]).size,title)
ret=titleFrames*[titleImgName]+ret
if lastFrames>1: ret+=(lastFrames-1)*[ret[-1]]
return ret
def createTitleFrame(out,size,title,bgColor=(.8,.6,.8),fgColor='#405090',logo=None,logoPos=(20,20)):
'''Create figure with title and save to file.
:param out: file to save the result to; format is anything supported by matplotlib.
:param size: figure size (for pixel output formats), tuple of (width,height)
:param str title: title and subtitle; lines are separated by single newlines (``\n``) and subtitle (if any) is separated from the title by two consecutive newlines (``\n\n``). Oversize lines are scaled to fit the width, line spacing fits all lines.
:param color fgColor: Font color, any `color format that Matplotlib understands <http://matplotlib.org/api/colors_api.html>`__.
:param color bgColor: Background color.
:param logo: filename or file-like object to be read via `matplotlib.pyploy.imread <http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.imread>`__.
:param logoPos: position where to place the logo.
'''
import matplotlib, matplotlib.figure, matplotlib.mathtext
# http://stackoverflow.com/a/13714720/761090
dpi=100 # does not matter as font is specified in inches
fig=matplotlib.figure.Figure(figsize=(size[0]/dpi,size[1]/dpi),dpi=dpi,facecolor=bgColor)
canvas=_HeadlessFigureCanvas(fig)
#fig.set_facecolor('blue'); fig.patch.set_color('blue'); fig.patch.set_facecolor('blue'); fig.patch.set_alpha(None)
titSub=title.split('\n\n')
if len(titSub)==0: subtitle=''
elif len(titSub)==1: title,subtitle=titSub
else: title,subtitle=titSub[0],'\n'.join(titSub[1:])
lines=[(t,True) for t in title.split('\n')]+([(t,False) for t in subtitle.split('\n')] if subtitle else [])
nLines=len(lines); fontSizes=size[1]/10.,size[1]/16.
def writeLine(text,vertPos,fontsize):
rgba,depth=matplotlib.mathtext.MathTextParser('Bitmap').to_rgba(text,fontsize=fontsize,dpi=fig.get_dpi(),color=fgColor)
textsize=rgba.shape[1],rgba.shape[0]
if textsize[0]>size[0]:
rgba,depth=matplotlib.mathtext.MathTextParser('Bitmap').to_rgba(text,fontsize=fontsize*size[0]/textsize[0],dpi=fig.get_dpi(),color=fgColor)
textsize=rgba.shape[1],rgba.shape[0]
fig.figimage(rgba.astype(float)/255.,xo=(size[0]-textsize[0])/2.,yo=vertPos-depth)
nTitle,nSubtitle=len(title.split('\n')),len(subtitle.split('\n')) if subtitle else 0
nLines=nTitle+nSubtitle
ht=size[1]; y0=ht-2*fontSizes[0]; yStep=(ht-2.5*fontSizes[0])/(nTitle+.6*nSubtitle+(.5 if nSubtitle else 0))
def lineYOffset(lineno):
# .5*yStep is per between title and subtitle
return nTitle*yStep+.5*yStep+(i-nTitle)*.6*yStep if i>=nTitle else i*yStep
if logo:
import matplotlib.pylab
logoData=pylab.imread(logo)
fig.figimage(logoData,xo=logoPos[0],yo=logoPos[1],origin='upper')
for i,(l,isTitle) in enumerate(lines):
writeLine(l,y0-lineYOffset(i),fontSizes[0 if isTitle else 1])
# http://stackoverflow.com/a/4805178/761090 - savefig default overrides facecolor set previously
fig.savefig(out,facecolor=fig.get_facecolor())
def Scene_plot_plot(P,noShow=False,subPlots=True):
"""Do the actual plot, which is either shown on screen (and nothing is returned: if *noShow* is ``False``) or, if *noShow* is ``True``, returned list of matplotlib's Figure objects.
You can use
>>> import woo,woo.core,os
>>> S=woo.core.Scene()
>>> S.plot.plots={'foo':('bar',)}
>>> S.plot.addData(foo=1,bar=2)
>>> somePdf=woo.master.tmpFilename()+'.pdf'
>>> S.plot.plot(noShow=True)[0].savefig(somePdf)
>>> os.path.exists(somePdf)
True
to save the figure to file automatically.
"""
figs=createPlots(P,subPlots=subPlots,noShow=noShow,replace=(False if noShow else True))
# figs=set([l.line.get_axes().get_figure() for l in P.currLineRefs])
if not figs:
import warnings
warnings.warn('Nothing to plot.')
return
if not hasattr(list(figs)[0],'show') and not noShow:
import warnings
warnings.warn('plot.plot not showing figure (matplotlib using headless backend?)')
noShow=True
if not noShow:
if not woo.runtime.hasDisplay: return # would error out with some backends, such as Agg used in batches
if 1:
if live:
import threading
t=threading.Thread(target=liveUpdate,args=(P,time.time()))
t.daemon=True
t.start()
# pylab.show() # this blocks for some reason; call show on figures directly
for f in figs:
f.show()
# should have fixed https://bugs.launchpad.net/woo/+bug/606220, but does not work apparently
if 0:
import matplotlib.backend_bases
if 'CloseEvent' in dir(matplotlib.backend_bases):
def closeFigureCallback(event):
ff=event.canvas.figure
# remove closed axes from our update list
P.currLineRefs=[l for l in P.currLineRefs if l.line.get_axes().get_figure()!=ff]
f.canvas.mpl_connect('close_event',closeFigureCallback)
# else:
# figs=list(set([l.line.get_axes().get_figure() for l in P.currLineRefs]))
return figs
def Scene_plot_saveDataTxt(P,fileName,vars=None):
"""Save plot data into a (optionally compressed) text file. The first line contains a comment (starting with ``#``) giving variable name for each of the columns. This format is suitable for being loaded for further processing (outside woo) with ``numpy.genfromtxt`` function, which recognizes those variable names (creating numpy array with named entries) and handles decompression transparently.
>>> import woo, woo.core
>>> from pprint import pprint
>>> S=woo.core.Scene()
>>> S.plot.addData(a=1,b=11,c=21,d=31) # add some data here
>>> S.plot.addData(a=2,b=12,c=22,d=32)
>>> pprint(S.plot.data)
{'a': [1, 2], 'b': [11, 12], 'c': [21, 22], 'd': [31, 32]}
>>> txt=woo.master.tmpFilename()+'.txt.bz2'
>>> S.plot.saveDataTxt(txt,vars=('a','b','c'))
>>> import numpy
>>> d=numpy.genfromtxt(txt,dtype=None,names=True)
>>> d['a']
array([1, 2])
>>> d['b']
array([11, 12])
:param fileName: file to save data to; if it ends with ``.bz2`` / ``.gz``, the file will be compressed using bzip2 / gzip.
:param vars: Sequence (tuple/list/set) of variable names to be saved. If ``None`` (default), all variables in :obj:`woo.core.Plot` are saved.
"""
import bz2,gzip
data=P.data
if not vars:
vars=data.keys(); vars.sort()
fileName=P.scene.expandTags(fileName)
if fileName.endswith('.bz2'): f=bz2.BZ2File(fileName,'wb')
elif fileName.endswith('.gz'): f=gzip.GzipFile(fileName,'wb')
else: f=open(fileName,'wb')
f.write(_bytes("# "+"\t".join(vars)+"\n"))
for i in range(len(data[vars[0]])):
f.write(_bytes("\t".join([str(data[var][i]) for var in vars])+"\n"))
f.close()
def savePylab(baseName,timestamp=False,title=None):
'''This function is not finished, do not use it.'''
import time
if len(data.keys())==0: raise RuntimeError("No data for plotting were saved.")
if timestamp: baseName+=_mkTimestamp()
baseNameNoPath=baseName.split('/')[-1]
saveDataTxt(fileName=baseName+'.data.bz2')
if len(plots)==0: raise RuntimeError("No plots to save, only data saved.")
py=file(baseName+'.py','w')
py.write('#!/usr/bin/env python\n# encoding: utf-8\n# created '+time.asctime()+' ('+time.strftime('%Y%m%d_%H:%M')+')\n#\nimport pylab, numpy\n')
py.write("data=numpy.genfromtxt('%s.data.bz2',dtype=None,names=True)\n"%baseName)
subCols=int(round(math.sqrt(len(plots)))); subRows=int(math.ceil(len(plots)*1./subCols))
for nPlot,p in enumerate(plots.keys()):
pStrip=p.strip().split('=',1)[0]
if plots[p]==None: continue # image plots, which is not exported
if len(plots)==1: py.write('pylab.figure()\n')
else: py.write('pylab.subplot(%d,%d,%d)\n'%(subRows,subCols,nPlots))
def _mkTimestamp():
import time
return time.strftime('_%Y%m%d_%H:%M')
def Scene_plot_saveGnuplot(P,baseName,term='wxt',extension=None,timestamp=False,comment=None,title=None,varData=False,timeStamp=True):
"""Save data added with :obj:`woo.plot.addData` into (compressed) file and create .gnuplot file that attempts to mimick plots specified with :obj:`woo.plot.plots`.
:param baseName: used for creating baseName.gnuplot (command file for gnuplot), associated ``baseName.data.bz2`` (data) and output files (if applicable) in the form ``baseName.[plot number].extension``
:param term: specify the gnuplot terminal; defaults to ``x11``, in which case gnuplot will draw persistent windows to screen and terminate; other useful terminals are ``png``, ``cairopdf`` and so on
:param extension: extension for ``baseName`` defaults to terminal name; fine for png for example; if you use ``cairopdf``, you should also say ``extension='pdf'`` however
:param bool timestamp: append numeric time to the basename
:param bool varData: whether file to plot will be declared as variable or be in-place in the plot expression
:param comment: a user comment (may be multiline) that will be embedded in the control file
:return: name of the gnuplot file created.
"""
data,imgData,plots,labels,xylabels=P.data,P.imgData,P.plots,P.labels,P.xylabels
if len(data.keys())==0: raise RuntimeError("No data for plotting were saved.")
if timestamp: baseName+=_mkTimestamp()
baseNameNoPath=baseName.split('/')[-1]
vars=data.keys(); vars.sort()
P.saveDataTxt(fileName=baseName+'.data.bz2',vars=vars)
fPlot=file(baseName+".gnuplot",'w')
fPlot.write('#!/usr/bin/env gnuplot\n#\n')
if timeStamp: fPlot.write('# created '+time.asctime()+' ('+time.strftime('%Y%m%d_%H:%M')+')\n#\n')
if comment: fPlot.write('# '+comment.replace('\n','\n# ')+'#\n')
dataFile='"< bzcat %s.data.bz2"'%(baseNameNoPath)
if varData:
fPlot.write('dataFile=%s'%dataFile); dataFile='dataFile'
if not extension: extension=term
i=0
for p in plots:
pStrip=p.strip().split('=',1)[0]
if plots[p]==None: continue ## this plot is image plot, which is not applicable to gnuplot
plots_p=[addPointTypeSpecifier(o) for o in tuplifyYAxis(plots[p])]
if term in ['wxt','x11']: fPlot.write("set term %s %d persist\n"%(term,i))
else: fPlot.write("set term %s; set output '%s.%d.%s'\n"%(term,baseNameNoPath,i,extension))
fPlot.write("set xlabel '%s'\n"%xlateLabel(p,labels))
fPlot.write("set grid\n")
fPlot.write("set datafile missing 'nan'\n")
if title: fPlot.write("set title '%s'\n"%title)
y1=True; plots_y1,plots_y2=[],[]
# replace callable/dict-like data specifiers by the results, it that particular data exists
plots_p2=[]
for pp in plots_p:
if pp[0]==None: plots_p2.append((pp[0],pp[1]))
elif pp[0].startswith('**'):
try:
dd=eval(pp[0][2:],{'S':P.scene})
plots_p2+=[(ppp,'') for ppp in dd.keys() if ppp in data.keys()]
except:
import traceback
traceback.print_exc()
print 'WARN: ignoring exception raised while evaluating expression "'+pp[0][2:]+'".'
elif pp[0].startswith('*'):
plots_p2+=[(e,'') for e in eval(pp[0][1:],{'S':P.scene}) if e in data.keys()]
else: plots_p2.append((pp[0],pp[1]))
plots_p=plots_p2
#plots_p=sum([([(pp,'') for pp in p[0]() if pp in data.keys()] if callable(p[0]) else [(p[0],p[1])] ) for p in plots_p],[])
for d in plots_p:
if d[0]==None:
y1=False; continue
if y1: plots_y1.append(d)
else: plots_y2.append(d)
fPlot.write("set ylabel '%s'\n"%(','.join([xlateLabel(_p[0],labels) for _p in plots_y1])))
if len(plots_y2)>0:
fPlot.write("set y2label '%s'\n"%(','.join([xlateLabel(_p[0],labels) for _p in plots_y2])))
fPlot.write("set y2tics\n")
ppp=[]
def _mkLine(varX,varY,i):
return " %s using %d:%d title '%s%s(%s)%s' with lines%s"%(dataFile,vars.index(varX)+1,vars.index(varY)+1,'← ' if i==0 else'',xlateLabel(varY,labels),xlateLabel(varX,labels),' →' if i==1 else '',' axes x1y2' if i==1 else '')
for pp in plots_y1: ppp.append(_mkLine(pStrip,pp[0],0))
for pp in plots_y2: ppp.append(_mkLine(pStrip,pp[0],1))
fPlot.write("plot "+",".join(ppp)+"\n")
i+=1
fPlot.close()
return baseName+'.gnuplot'
def _deprecPlotFunc(old,func,new=None,takesScene=False,*args,**kw):
"Wrapper for deprecated functions, example below."
import warnings
if not new: new=old
warnings.warn('Function plot.%s is deprecated, use %s.%s instead.'%(old,('Scene' if takesScene else 'Scene.plot'),new),stacklevel=3,category=DeprecationWarning)
S=woo.master.scene
if takesScene: return func(S,*args,**kw)
else: return func(S.plot,*args,**kw)
#
# DEPRECATED functions, will be removed at some point!
#
def reset(): _deprecPlotFunc('reset',Scene_plot_reset)
def resetData(): _deprecPlotFunc('resetData',Scene_plot_resetData)
def splitData(): _deprecPlotFunc('splitData',Scene_plot_splitData)
def reverseData(): _deprecPlotFunc('reverseData',Scene_plot_reverseData)
def addAutoData(): _deprecPlotFunc('addAutoData',Scene_plot_autoData,new='autoData')
def addData(): _deprecPlotFunc('addData',Scene_plot_addData)
def addImgData(): _deprecPlotFunc('addImgData',Scene_plot_addImgData)
def saveGnuplot(): _deprecPlotFunc('saveGnuplot',Scene_plot_saveGnuplot)
def saveDataTxt(): _deprecPlotFunc('saveDataTxt',Scene_plot_saveDataTxt)
def plot(): _deprecPlotFunc('plot',Scene_plot_plot)
# called at startup from from woo._monkey.plot
def defMonkeyMethods():
import woo.core
woo.core.Plot.reset=Scene_plot_reset
woo.core.Plot.resetData=Scene_plot_resetData
woo.core.Plot.splitData=Scene_plot_splitData
woo.core.Plot.reverseData=Scene_plot_reverseData
woo.core.Plot.autoData=Scene_plot_autoData
woo.core.Plot.addData=Scene_plot_addData
woo.core.Plot.addImgData=Scene_plot_addImgData
woo.core.Plot.saveGnuplot=Scene_plot_saveGnuplot
woo.core.Plot.saveDataTxt=Scene_plot_saveDataTxt
woo.core.Plot.plot=Scene_plot_plot
defMonkeyMethods()
| gpl-2.0 | -1,342,440,483,409,564,000 | 45.341102 | 397 | 0.698692 | false | 2.904781 | false | false | false | 0.046793 |
jeremiak/regulations-site | regulations/views/diff.py | 1 | 8631 | #vim: set encoding=utf-8
from regulations.generator import generator
from regulations.generator.html_builder import HTMLBuilder
from regulations.generator.layers.toc_applier import TableOfContentsLayer
from regulations.generator.node_types import EMPTYPART, REGTEXT
from regulations.generator.section_url import SectionUrl
from regulations.generator.toc import fetch_toc
from regulations.views import error_handling, utils
from regulations.views.chrome import ChromeView
from regulations.views.navigation import choose_next_section
from regulations.views.navigation import choose_previous_section
from regulations.views.partial import PartialView
from django.core.urlresolvers import reverse
def get_appliers(label_id, older, newer):
diff = generator.get_diff_applier(label_id, older, newer)
if diff is None:
raise error_handling.MissingContentException()
appliers = utils.handle_diff_layers(
'graphics,paragraph,keyterms,defined',
label_id,
older,
newer)
appliers += (diff,)
return appliers
class PartialSectionDiffView(PartialView):
""" A diff view of a partial section. """
template_name = 'regulations/regulation-content.html'
def get(self, request, *args, **kwargs):
""" Override GET so that we can catch and propagate any errors. """
try:
return super(PartialSectionDiffView, self).get(request, *args,
**kwargs)
except error_handling.MissingContentException, e:
return error_handling.handle_generic_404(request)
def footer_nav(self, label, toc, old_version, new_version, from_version):
nav = {}
for idx, toc_entry in enumerate(toc):
if toc_entry['section_id'] != label:
continue
p_sect = choose_previous_section(idx, toc)
n_sect = choose_next_section(idx, toc)
if p_sect:
nav['previous'] = p_sect
nav['previous']['url'] = reverse_chrome_diff_view(
p_sect['section_id'], old_version,
new_version, from_version)
if n_sect:
nav['next'] = n_sect
nav['next']['url'] = reverse_chrome_diff_view(
n_sect['section_id'], old_version,
new_version, from_version)
return nav
def get_context_data(self, **kwargs):
# We don't want to run the content data of PartialView -- it assumes
# we will be applying layers
context = super(PartialView, self).get_context_data(**kwargs)
label_id = context['label_id']
older = context['version']
newer = context['newer_version']
tree = generator.get_tree_paragraph(label_id, older)
if tree is None:
#TODO We need a more complicated check here to see if the diffs
#add the requested section. If not -> 404
tree = {}
appliers = get_appliers(label_id, older, newer)
builder = HTMLBuilder(*appliers)
builder.tree = tree
builder.generate_html()
child_of_root = builder.tree
if builder.tree['node_type'] == REGTEXT:
child_of_root = {
'node_type': EMPTYPART,
'children': [builder.tree]}
context['tree'] = {'children': [child_of_root]}
context['markup_page_type'] = 'diff'
regpart = label_id.split('-')[0]
old_toc = fetch_toc(regpart, older)
diff = generator.get_diff_json(regpart, older, newer)
from_version = self.request.GET.get('from_version', older)
context['TOC'] = diff_toc(older, newer, old_toc, diff, from_version)
context['navigation'] = self.footer_nav(label_id, context['TOC'],
older, newer, from_version)
return context
class ChromeSectionDiffView(ChromeView):
"""Search results with chrome"""
template_name = 'regulations/diff-chrome.html'
partial_class = PartialSectionDiffView
has_sidebar = False
def check_tree(self, context):
pass # The tree may or may not exist in the particular version
def add_diff_content(self, context):
context['from_version'] = self.request.GET.get(
'from_version', context['version'])
context['left_version'] = context['version']
context['right_version'] = \
context['main_content_context']['newer_version']
from_version = self.request.GET.get('from_version', context['version'])
context['TOC'] = context['main_content_context']['TOC']
# Add reference to the first subterp, so we know how to redirect
toc = fetch_toc(context['label_id'].split('-')[0], from_version)
for entry in toc:
if entry.get('is_supplement') and entry.get('sub_toc'):
el = entry['sub_toc'][0]
el['url'] = SectionUrl().of(
el['index'], from_version,
self.partial_class.sectional_links)
context['first_subterp'] = el
return context
def add_main_content(self, context):
super(ChromeSectionDiffView, self).add_main_content(context)
return self.add_diff_content(context)
def reverse_chrome_diff_view(sect_id, left_ver, right_ver, from_version):
""" Reverse the URL for a chromed diff view. """
diff_url = reverse(
'chrome_section_diff_view',
args=(sect_id, left_ver, right_ver))
diff_url += '?from_version=%s' % from_version
return diff_url
def extract_sections(toc):
compiled_toc = []
for i in toc:
if 'Subpart' in i['index']:
compiled_toc.extend(i['sub_toc'])
else:
compiled_toc.append(i)
return compiled_toc
def diff_toc(older_version, newer_version, old_toc, diff, from_version):
#We work around Subparts in the TOC for now.
compiled_toc = extract_sections(old_toc)
for node in (v['node'] for v in diff.values() if v['op'] == 'added'):
if len(node['label']) == 2 and node['title']:
element = {
'label': node['title'],
'index': node['label'],
'section_id': '-'.join(node['label']),
'op': 'added'
}
data = {'index': node['label'], 'title': node['title']}
TableOfContentsLayer.section(element, data)
TableOfContentsLayer.appendix_supplement(element, data)
compiled_toc.append(element)
modified, deleted = modified_deleted_sections(diff)
for el in compiled_toc:
if not 'Subpart' in el['index'] and not 'Subjgrp' in el['index']:
el['url'] = reverse_chrome_diff_view(
el['section_id'], older_version, newer_version, from_version)
# Deleted first, lest deletions in paragraphs affect the section
if tuple(el['index']) in deleted and 'op' not in el:
el['op'] = 'deleted'
if tuple(el['index']) in modified and 'op' not in el:
el['op'] = 'modified'
return sort_toc(compiled_toc)
def sort_toc(toc):
""" Sort the Table of Contents elements. """
def normalize(element):
""" Return a sorting order for a TOC element, primarily based
on the index, and the type of content. """
# The general order of a regulation is: regulation text sections,
# appendices, and then the interpretations.
normalized = []
if element.get('is_section'):
normalized.append(0)
elif element.get('is_appendix'):
normalized.append(1)
elif element.get('is_supplement'):
normalized.append(2)
for part in element['index']:
if part.isdigit():
normalized.append(int(part))
else:
normalized.append(part)
return normalized
return sorted(toc, key=lambda el: tuple(normalize(el)))
def modified_deleted_sections(diff):
modified, deleted = set(), set()
for label, diff_value in diff.iteritems():
label = tuple(label.split('-'))
if 'Interp' in label:
section_label = (label[0], 'Interp')
else:
section_label = tuple(label[:2])
# Whole section was deleted
if diff_value['op'] == 'deleted' and label == section_label:
deleted.add(section_label)
# Whole section added/modified or paragraph added/deleted/modified
else:
modified.add(section_label)
return modified, deleted
| cc0-1.0 | -5,635,030,999,713,222,000 | 35.417722 | 79 | 0.597613 | false | 4.031294 | false | false | false | 0.000579 |
nkalodimas/invenio | modules/bibformat/lib/elements/bfe_field.py | 28 | 6253 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibFormat element - Prints a custom field
"""
__revision__ = "$Id$"
from invenio.bibformat_utils import parse_tag
def format_element(bfo, tag, limit, instances_separator=" ",
subfields_separator=" ", extension="", output_pattern=""):
"""
Prints the given field of a record.
If tag is in range [001, 010], this element assumes
that it accesses a control field. Else it considers it
accesses a data field.
<p>For eg. consider the following metdata:
<pre>
100__ $$aCalatroni, S$$uCERN
245__ $$aStatus of the EP Simulations and Facilities for the SPL
700__ $$aFerreira, L$$uCERN
700__ $$aMacatrao, M$$uCERN
700__ $$aSkala, A$$uCERN
700__ $$aSosin, M$$uCERN
700__ $$ade Waele, R$$uCERN
700__ $$aWithofs, Y$$uKHLim, Diepenbeek
</pre>
The following calls to bfe_field would print:
<pre>
<BFE_FIELD tag="700" instances_separator="<br/>" subfields_separator=" - ">
Ferreira, L - CERN
Macatrao, M - CERN
Skala, A - CERN
Sosin, M - CERN
de Waele, R - CERN
Withofs, Y - KHLim, Diepenbeek
</pre>
</p>
<p>For more advanced formatting, the <code>output_pattern</code>
parameter can be used to output the subfields of each instance in
the specified way. For eg. consider the following metadata:
<pre>
775__ $$b15. Aufl.$$c1995-1996$$nv.1$$pGrundlagen und Werkstoffe$$w317999
775__ $$b12. Aufl.$$c1963$$w278898
775__ $$b14. Aufl.$$c1983$$w107899
775__ $$b13. Aufl.$$c1974$$w99635
</pre>
with the following <code>output_pattern</code>:
<pre>
<a href="/record/%(w)s">%(b)s (%(c)s) %(n)s %(p)s</a>
</pre>
would print:<br/>
<a href="/record/317999">15. Aufl. (1995-1996) v.1 Grundlagen und Werkstoffe</a><br/>
<a href="/record/278898">12. Aufl. (1963) </a><br/>
<a href="/record/107899">14. Aufl. (1983) </a><br/>
<a href="/record/99635">13. Aufl. (1974) </a>
<br/>(<code>instances_separator="<br/>"</code> set for
readability)<br/> The output pattern must follow <a
href="http://docs.python.org/library/stdtypes.html#string-formatting-operations">Python
string formatting</a> syntax. The format must use parenthesized
notation to map to the subfield code. This currently restricts the
support of <code>output_pattern</code> to non-repeatable
subfields</p>
@param tag: the tag code of the field that is to be printed
@param instances_separator: a separator between instances of field
@param subfields_separator: a separator between subfields of an instance
@param limit: the maximum number of values to display.
@param extension: a text printed at the end if 'limit' has been exceeded
@param output_pattern: when specified, prints the subfields of each instance according to pattern specified as parameter (following Python string formatting convention)
"""
# Check if data or control field
p_tag = parse_tag(tag)
if p_tag[0].isdigit() and int(p_tag[0]) in range(0, 11):
return bfo.control_field(tag)
elif p_tag[0].isdigit():
# Get values without subcode.
# We will filter unneeded subcode later
if p_tag[1] == '':
p_tag[1] = '_'
if p_tag[2] == '':
p_tag[2] = '_'
values = bfo.fields(p_tag[0]+p_tag[1]+p_tag[2]) # Values will
# always be a
# list of
# dicts
else:
return ''
x = 0
instances_out = [] # Retain each instance output
for instance in values:
filtered_values = [value for (subcode, value) in instance.iteritems()
if p_tag[3] == '' or p_tag[3] == '%' \
or p_tag[3] == subcode]
if len(filtered_values) > 0:
# We have found some corresponding subcode(s)
if limit.isdigit() and x + len(filtered_values) >= int(limit):
# We are going to exceed the limit
filtered_values = filtered_values[:int(limit)-x] # Takes only needed one
if len(filtered_values) > 0: # do not append empty list!
if output_pattern:
try:
instances_out.append(output_pattern % DictNoKeyError(instance))
except:
pass
else:
instances_out.append(subfields_separator.join(filtered_values))
x += len(filtered_values) # record that so we know limit has been exceeded
break # No need to go further
else:
if output_pattern:
try:
instances_out.append(output_pattern % DictNoKeyError(instance))
except:
pass
else:
instances_out.append(subfields_separator.join(filtered_values))
x += len(filtered_values)
ext_out = ''
if limit.isdigit() and x > int(limit):
ext_out = extension
return instances_separator.join(instances_out) + ext_out
class DictNoKeyError(dict):
def __getitem__(self, key):
if dict.__contains__(self, key):
val = dict.__getitem__(self, key)
else:
val = ''
return val
| gpl-2.0 | -1,610,546,076,519,092,200 | 39.341935 | 172 | 0.592036 | false | 3.669601 | false | false | false | 0.006077 |
verekia/hackarena | hackarena/server.py | 1 | 4323 | from tornado.ioloop import IOLoop
from tornado.web import RequestHandler, Application, url, StaticFileHandler
from tornado.options import define, options
from sockjs.tornado import SockJSRouter, SockJSConnection
import json
class WebSocketHandler(SockJSConnection):
clients = {
'lobby': {},
}
def on_open(self, info):
# Required because the last part of the 3-part session string varies on on_close
# str(self.session): 1416760865.178006 bmv6q4zu 1416760865
# self.sessionString: 1416760865.178006 bmv6q4zu
# self.temporaryName: bmv6q4zu
self.sessionString = getSessionString(str(self.session))
self.temporaryName = generateRandomName(str(self.session))
self.clients['lobby'][self.sessionString] = self
def on_close(self):
del self.clients[self.room][getSessionString(str(self.session))]
self.refresh_users()
def refresh_users(self):
room_users = [self.getName(value) for key, value in self.clients[self.room].items()]
self.broadcast([value for key, value in self.clients[self.room].items()], createMessage('USERS', room_users))
def getName(self, obj=None):
if obj:
return obj.chosenName if hasattr(obj, 'chosenName') else obj.temporaryName
else:
return self.chosenName if hasattr(self, 'chosenName') else self.temporaryName
def broadcast_to_all(self, message):
self.broadcast([value for key, value in self.clients[self.room].items()], message)
def on_message(self, message):
try:
data = json.loads(message)
except:
self.send(createMessage('SIMPLE_MESSAGE', 'Unsupported message type.'))
print 'Received unsupported message type'
return
##############################################
# #
# Backend Events Handling, core logic #
# #
##############################################
if data['type'] == 'ROOM':
self.room = data['content']
if self.clients['lobby'][self.sessionString]:
del self.clients['lobby'][self.sessionString]
try:
self.clients[self.room][self.sessionString] = self
except:
self.clients[self.room] = {}
self.clients[self.room][self.sessionString] = self
self.send(createMessage('ENTERED_ROOM', {'roomName':self.room, 'temporaryName': self.temporaryName}))
self.broadcast_to_all(createMessage('OTHER_ENTERED_ROOM', self.getName()))
self.refresh_users()
elif data['type'] == 'NAME':
old_name = self.getName()
self.chosenName = data['content']
self.broadcast_to_all(createMessage('USER_RENAME', {'previousName': old_name, 'newName': self.chosenName}))
self.refresh_users()
elif data['type'] == 'USER_MESSAGE':
self.broadcast_to_all(createMessage('USER_MESSAGE', {'username': self.getName(), 'message': data['content']}))
else:
self.send(createMessage('MESSAGE', 'Unsupported message type.'))
print 'Received unsupported message type'
##############################################
# #
# App Setup #
# #
##############################################
define('port', default=8888, help="run on the given port", type=int)
define('address', default='192.168.X.X', help="run on the address", type=str)
class IndexHandler(RequestHandler):
def get(self, room):
self.render("index.html")
def make_app():
sock_router = SockJSRouter(WebSocketHandler, '/websocket')
return Application(
sock_router.urls +
[
(r'/assets/(.*)', StaticFileHandler, {'path': 'assets'}),
(r'/static/(.*)', StaticFileHandler, {'path': 'static'}),
url(r'/(.*)', IndexHandler),
]
)
def main():
app = make_app()
app.listen(options.port) #, options.address)
IOLoop.current().start()
if __name__ == '__main__':
options.parse_config_file('server.conf')
main()
| mit | 4,482,904,039,003,858,400 | 34.434426 | 122 | 0.557715 | false | 4.21345 | false | false | false | 0.005552 |
bbc/kamaelia | Sketches/MPS/BugReports/FixTests/Kamaelia/Examples/SimpleGraphicalApps/Ticker/Ticker.py | 6 | 1294 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Kamaelia.UI.Pygame.Ticker import Ticker
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor
Pipeline( ReadFileAdaptor("Ulysses",readmode="line",steptime=0.5),
Ticker(background_colour=(128,48,128),
render_left = 1,
render_top = 1,
render_right = 600,
render_bottom = 200,
position = (100, 300),
)
).run()
| apache-2.0 | -8,735,597,551,677,132,000 | 37.058824 | 78 | 0.690881 | false | 3.707736 | false | false | false | 0.013138 |
TheKK/Shedskin | tests/99.py | 6 | 6253 |
# (c) Mark Dufour, Haifang Ni
# --- [email protected]
empty, black, white = 0, 1, -1 # [int], [int], [int]
board = [[empty for x in range(8)] for y in range(8)] # [list(list(int))]
board[3][3] = board[4][4] = white # [int]
board[3][4] = board[4][3] = black # [int]
player, depth = {white: 'human', black: 'lalaoth'}, 3 # [dict(int, str)], [int]
def possible_move(board, x, y, color): # board: [list(list(int))], x: [int], y: [int], color: [int]
if board[x][y] != empty: # [int]
return False # [int]
for direction in [(1, 1), (-1, 1), (0, 1), (1, -1), (-1, -1), (0, -1), (1, 0), (-1, 0)]: # [list(tuple2(int, int))]
if flip_in_direction(board, x, y, direction, color): # [int]
return True # [int]
return False # [int]
def flip_in_direction(board, x, y, direction, color): # board: [list(list(int))], x: [int], y: [int], direction: [tuple2(int, int)], color: [int]
other_color = False # [int]
while True: # [int]
x, y = x+direction[0], y+direction[1] # [int], [int]
if x not in range(8) or y not in range(8): # [int]
return False # [int]
square = board[x][y] # [int]
if square == empty: return False # [int]
if square != color: other_color = True # [int]
else: return other_color # [int]
def flip_stones(board, move, color): # board: [list(list(int))], move: [tuple2(int, int)], color: [int]*
global flips
flips += 1 # [int]
for direction in [(1, 1), (-1, 1), (0, 1), (1, -1), (-1, -1), (0, -1), (1, 0), (-1, 0)]: # [list(tuple2(int, int))]
if flip_in_direction(board, move[0], move[1], direction, color): # [int]
x, y = move[0]+direction[0], move[1]+direction[1] # [int], [int]
while board[x][y] != color: # [int]
board[x][y] = color # [int]
x, y = x+direction[0], y+direction[1] # [int], [int]
board[move[0]][move[1]] = color # [int]
#def print_board(board, turn): # board: [], turn: []
# for line in board: # []
# print ' '.join([{white: 'O', black: 'X', empty: '.'}[square] for square in line]) # []
# print 'turn:', player[turn] # [], []
# print 'black:', stone_count(board, black), 'white:', stone_count(board, white) # [], [], [], []
def possible_moves(board, color): # board: [list(list(int))], color: [int]
return [(x,y) for x in range(8) for y in range(8) if possible_move(board, x, y, color)] # [list(tuple2(int, int))]
#def coordinates(move): # move: []
# return (int(move[1])-1, 'abcdefgh'.index(move[0])) # []
def stone_count(board, color): # board: [list(list(int))], color: [int]
return sum([len([square for square in line if square == color]) for line in board]) # [list(int)]
#def human_move(move): # move: []
# return 'abcdefgh'[move[0]]+str(move[1]+1) # []
def best_move(board, color, first, step=1): # board: [list(list(int))], color: [int]*, first: [int], step: [int]
max_move, max_mobility, max_score = None, 0, 0 # [none], [int], [int]
#print 'possible', possible_moves(board, color) # [str], [list(tuple2(int, int))]
for move in possible_moves(board, color): # [list(tuple2(int, int))]
#print 'board before' # [str]
#print_board(board, color) # []
#print 'move', move # [str], [tuple2(int, int)]
if move in [(0,0),(0,7),(7,0),(7,7)]: # [list(tuple2(int, int))]
mobility, score = 64, 64 # [int], [int]
if color != first: # [int]
mobility = 64-mobility # [int]
else:
testboard = [[square for square in line] for line in board] # [list(list(int))]
flip_stones(testboard, move, color) # []
#print_board(testboard, color) # []
if step < depth: # [int]
#print 'deeper' # [str]
next_move, mobility = best_move(testboard, -color, first, step+1) # [tuple2(tuple2(int, int), int)]
else:
#print 'mobility' # [str]
mobility = len(possible_moves(testboard, first)) # [int]
score = mobility # [int]
if color != first: # [int]
score = 64-score # [int]
if score >= max_score: # []
max_move, max_mobility, max_score = move, mobility, score # [tuple2(int, int)], [int], [int]
#print 'done' # [str]
return max_move, max_mobility # [tuple2(tuple2(int, int), int)]
flips = 0 # [int]
steps = 0 # [int]
turn = black # [int]
while possible_moves(board, black) or possible_moves(board, white): # [list(tuple2(int, int))]
if possible_moves(board, turn): # [list(tuple2(int, int))]
#print_board(board, turn) # []
#print 'flips', flips # [str], [int]
# steps += 1 # [int]
# if steps > 5: # [int]
# break
#if turn == black: # [int]
move, mobility = best_move(board, turn, turn) # [tuple2(tuple2(int, int), int)]
#else:
# move = coordinates(raw_input()) # [tuple2(int, int)]
if not possible_move(board, move[0], move[1], turn): # [int]
print 'impossible!' # [str]
turn = -turn # [int]
else:
flip_stones(board, move, turn) # []
turn = -turn # [int]
#print_board(board, turn)
print 'flips', flips # [str], [int]
if stone_count(board, black) == stone_count(board, white): # [int]
print 'draw!' # [str]
else:
if stone_count(board, black) > stone_count(board, white): print player[black], 'wins!' # [str], [str]
else: print player[white], 'wins!' # [str], [str]
| gpl-3.0 | 4,341,224,447,258,199,600 | 50.677686 | 145 | 0.471774 | false | 3.211608 | false | false | false | 0.015193 |
GRIFFINCollaboration/beamCompanionExplorer | data/parseMass.py | 1 | 1034 | import json
def generateMassTable():
'''
generate a mass table for the beam companion explorer.
writes and returns the mass table, a list of dicts keyed as [Z]['A'].
'''
# handy variables
lineLength = 124
headerLength = 39
massTable = []
fid = open("mass.mas12", "r")
# fast forward through header
for i in range(headerLength):
fid.readline()
# unpack each line
while True:
#decode fixed-width columns
record = fid.readline()
if record == '':
break
N = int(record[6:9])
Z = int(record[11:14])
A = N + Z
mass = record[96:110].replace(' ', '')
mass = mass.replace('#', '.')
mass = float(mass)/1000000.
#pack N, Z, mass into dictionary for beam companion explorer:
while len(massTable)-1 < Z:
massTable.append({})
massTable[Z][str(A)] = mass
outputTable = open('mass.dict', 'w')
outputTable.write(json.dumps(massTable))
return massTable | mit | 5,964,059,097,637,266,000 | 23.642857 | 73 | 0.564797 | false | 3.931559 | false | false | false | 0.003868 |
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 34